diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml index 94a56e46..86293445 100644 --- a/.github/workflows/linters.yaml +++ b/.github/workflows/linters.yaml @@ -9,80 +9,56 @@ on: - release_* jobs: - bandit: - name: Bandit analyzer for Python ${{ matrix.os.python }} on - ${{ matrix.os.name }}-${{ matrix.os.version }} + flake8: + name: Flake8 for Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: - fail-fast: false matrix: - os: - - name: centos - version: 7 - python: 3 - engine: docker - - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: [ "3.8" ] steps: - - uses: actions/checkout@v1 - - - run: ./test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - ACTION: bandit - - flake8: - name: Flake8 for Python 3 - runs-on: ubuntu-latest - - steps: - - name: Check out repo - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - name: Run flake8 for Python 3 - uses: containerbuildsystem/actions/flake8@master + - name: Run flake8 on python${{ matrix.python-version }} + run: python -m tox -e flake8 - # markdownlint: - # name: Markdownlint - # runs-on: ubuntu-latest + markdownlint: + name: Markdownlint + runs-on: ubuntu-latest - # steps: - # - name: Check out repo - # uses: actions/checkout@v2 + steps: + - name: Check out repo + uses: actions/checkout@v2 - # - name: Run markdownlint - # uses: containerbuildsystem/actions/markdownlint@master + - name: Run markdownlint + uses: containerbuildsystem/actions/markdownlint@master pylint: - name: Pylint analyzer for Python ${{ matrix.os.python }} + name: Pylint analyzer for Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: matrix: - os: - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: [ "3.8" ] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - run: ./test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - ACTION: pylint + - name: Run pylint on python${{ matrix.python-version }} + run: python -m tox -e pylint shellcheck: name: Shellcheck @@ -94,3 +70,43 @@ jobs: - name: Run ShellCheck uses: containerbuildsystem/actions/shellcheck@master + +# mypy: +# name: mypy type checker for Python ${{ matrix.python-version }} +# runs-on: ubuntu-latest +# +# strategy: +# matrix: +# python-version: [ "3.8" ] +# +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# - name: Install dependencies +# run: | +# python -m pip install --upgrade pip setuptools tox +# +# - name: Run mypy on python${{ matrix.python-version }} +# run: python -m tox -e mypy + + bandit: + name: Bandit analyzer for Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [ "3.8" ] + + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox + + - name: Run bandit analyzer on python${{ matrix.python-version }} + run: python -m tox -e bandit diff --git a/.github/workflows/unittests.yaml b/.github/workflows/unittests.yaml index 4c45b551..3140088e 100644 --- a/.github/workflows/unittests.yaml +++ b/.github/workflows/unittests.yaml @@ -10,68 +10,51 @@ on: jobs: test: - name: Python ${{ matrix.os.python }} tests on ${{ matrix.os.name }}-${{ matrix.os.version }} - runs-on: ubuntu-18.04 + name: Python ${{ matrix.python-version }} tests + runs-on: ubuntu-latest strategy: fail-fast: false matrix: - os: - - name: centos - version: 7 - python: 3 - engine: docker - - - name: fedora - version: 33 - python: 3 - engine: docker - - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: ["3.7", "3.8", "3.9"] steps: - - name: Check out repo - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - name: pytests via test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - run: ./test.sh + - name: Run unittests on python${{ matrix.python-version }} + run: python -m tox -e test - name: Upload pytest html report uses: actions/upload-artifact@v2 if: failure() with: - path: __pytest_reports/atomic-reactor-unit-tests.html - name: atomic-reactor-unit-tests_${{ matrix.os.name }}_${{ matrix.os.version }}.python${{ matrix.os.python }}.html + path: __pytest_reports/charon-unit-tests.html + name: charon-unit-tests_python${{ matrix.os.python }}.html + - name: Run coveralls-python env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_FLAG_NAME: ${{ matrix.os.name }}-${{ matrix.os.version }}-python${{ matrix.os.python }} + COVERALLS_FLAG_NAME: python${{ matrix.python-version }} COVERALLS_PARALLEL: true run: | - pip3 install --upgrade pip - pip3 install --upgrade setuptools - pip3 install --upgrade coveralls - /home/runner/.local/bin/coveralls --service=github + python -m pip install --upgrade coveralls + python -m coveralls --service=github coveralls-finish: name: Finish coveralls-python needs: test - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest steps: - name: Finished run: | - pip3 install --upgrade pip - pip3 install --upgrade setuptools - pip3 install --upgrade coveralls + pip3 install --upgrade pip setuptools coveralls /home/runner/.local/bin/coveralls --finish --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index 406de866..cdd1608d 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,10 @@ future. And Ronda service will be hosted in AWS S3. See [AWS CLi V2 installation](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2-linux.html#cliv2-linux-install) +### [Optional] rpm-sign or GnuPG CLI tool + +Can be configured to use rpm-sign or any command to generate .asc file. + ## Installation ### From git @@ -49,7 +53,7 @@ to configure AWS access credentials. ### charon-upload: upload a repo to S3 ```bash -usage: charon upload $tarball --product/-p ${prod} --version/-v ${ver} [--root_path] [--ignore_patterns] [--debug] +usage: charon upload $tarball --product/-p ${prod} --version/-v ${ver} [--root_path] [--ignore_patterns] [--debug] [--contain_signature] [--key] ``` This command will upload the repo in tarball to S3. diff --git a/charon.spec b/charon.spec index a1888098..abe7d7e5 100644 --- a/charon.spec +++ b/charon.spec @@ -1,89 +1,90 @@ -%global binaries_py_version %{python3_version} %global owner Commonjava -%global project charon +%global modulename charon + +%global charon_version 1.1.2 +%global sdist_tar_name %{modulename}-%{charon_version} + +%global python3_pkgversion 3 + +Name: %{modulename} +Summary: Charon CLI +Version: %{charon_version} +Release: 1%{?dist} +URL: https://github.com/%{owner}/%{modulename} +Source0: %{url}/archive/%{charon_version}.tar.gz +Provides: %{modulename} = %{version}-%{release} + +Group: Development/Tools +License: APLv2 + +# Build Requirements +BuildArch: x86_64 + +BuildRequires: python%{python3_pkgversion}-setuptools +BuildRequires: python%{python3_pkgversion}-devel + +Requires: python%{python3_pkgversion}-boto3 +Requires: python%{python3_pkgversion}-botocore +Requires: python%{python3_pkgversion}-jinja2 +Requires: python%{python3_pkgversion}-markupsafe +Requires: python%{python3_pkgversion}-dateutil +Requires: python%{python3_pkgversion}-six +Requires: python%{python3_pkgversion}-jmespath +Requires: python%{python3_pkgversion}-urllib3 +Requires: python%{python3_pkgversion}-s3transfer +Requires: python%{python3_pkgversion}-click +Requires: python%{python3_pkgversion}-requests +Requires: python%{python3_pkgversion}-idna +Requires: python%{python3_pkgversion}-chardet +Requires: python%{python3_pkgversion}-cryptography +Requires: python%{python3_pkgversion}-cffi +Requires: python%{python3_pkgversion}-pycparser +Requires: python%{python3_pkgversion}-certifi +Requires: python%{python3_pkgversion}-pyOpenSSL +Requires: python%{python3_pkgversion}-ruamel-yaml +Requires: python%{python3_pkgversion}-defusedxml +Requires: python%{python3_pkgversion}-semantic-version +Requires: python%{python3_pkgversion}-subresource-integrity +Requires: python%{python3_pkgversion}-jsonschema +Requires: python%{python3_pkgversion}-importlib-metadata +Requires: python%{python3_pkgversion}-zipp +Requires: python%{python3_pkgversion}-attrs +Requires: python%{python3_pkgversion}-pyrsistent -Name: %{project} -Version: 1.0.0 -Release: 1%{?dist} - -Summary: Charon CLI -Group: Development/Tools -License: APLv2 -URL: https://github.com/%{owner}/%{project} -Source0: https://github.com/%{owner}/%{project}/archive/%{version}.tar.gz - -BuildArch: noarch - -Requires: python3-charon = %{version}-%{release} -Requires: git >= 1.7.10 - -BuildRequires: python3-devel -BuildRequires: python3-setuptools %description Simple Python tool with command line interface for charon init, upload, delete, gen and ls functions. -%package -n python3-charon -Summary: Python 3 CHARON library -Group: Development/Tools -License: APLv2 -Requires: python3-requests -Requires: python3-setuptools -Requires: python3-rpm -%{?python_provide:%python_provide python3-charon} - -%description -n python3-charon -Simple Python 3 library for CHARON functions. - %prep -%setup -q +%autosetup -p1 -n %{sdist_tar_name} %build +# Disable debuginfo packages +%define _enable_debug_package 0 +%define debug_package %{nil} %py3_build %install +export LANG=en_US.UTF-8 LANGUAGE=en_US.en LC_ALL=en_US.UTF-8 %py3_install -mv %{buildroot}%{_bindir}/charon %{buildroot}%{_bindir}/charon-%{python3_version} -ln -s %{_bindir}/charon-%{python3_version} %{buildroot}%{_bindir}/charon-3 - -ln -s %{_bindir}/charon-%{binaries_py_version} %{buildroot}%{_bindir}/charon - -# ship charon in form of tarball so it can be installed within build image -mkdir -p %{buildroot}/%{_datadir}/%{name}/ -cp -a %{sources} %{buildroot}/%{_datadir}/%{name}/charon.tar.gz - -# setup docs -#mkdir -p %{buildroot}%{_mandir}/man1 -#cp -a docs/manpage/charon.1 %{buildroot}%{_mandir}/man1/ %files +%defattr(-,root,root) %doc README.md -#%{_mandir}/man1/charon.1* +%{_bindir}/%{modulename}* +%{python3_sitelib}/* %{!?_licensedir:%global license %doc} %license LICENSE -%{_bindir}/charon - -%files -n python3-charon -%doc README.md -%{!?_licensedir:%global license %doc} -%license LICENSE -%{_bindir}/charon-%{python3_version} -%{_bindir}/charon-3 -#%{_mandir}/man1/charon.1* -%dir %{python3_sitelib}/charon -%dir %{python3_sitelib}/charon/__pycache__ -%{python3_sitelib}/charon/*.* -%{python3_sitelib}/charon/cmd -%{python3_sitelib}/charon/pkgs -%{python3_sitelib}/charon/utils -%{python3_sitelib}/charon/__pycache__/*.py* -%{python3_sitelib}/charon_*.egg-info -%dir %{_datadir}/%{name} -# ship charon in form of tarball so it can be installed within build image -%{_datadir}/%{name}/charon.tar.gz %changelog +* Tue Sep 20 2022 Harsh Modi +- 1.1.2 release +- add configuration schema and validation +- allow specifying multiple target buckets + +* Thu Aug 25 2022 Harsh Modi +- 1.1.1 release diff --git a/charon/cmd/command.py b/charon/cmd/command.py index cbb4e110..2ef88aed 100644 --- a/charon/cmd/command.py +++ b/charon/cmd/command.py @@ -16,6 +16,7 @@ from typing import List, Tuple from charon.config import CharonConfig, get_config +from charon.constants import DEFAULT_REGISTRY from charon.utils.logs import set_logging from charon.utils.archive import detect_npm_archive, download_archive, NpmArchiveType from charon.pkgs.maven import handle_maven_uploading, handle_maven_del @@ -60,6 +61,7 @@ @option( "--target", "-t", + 'targets', help=""" The target to do the uploading, which will decide which s3 bucket and what root path where all files will be uploaded to. @@ -94,6 +96,22 @@ be extracted, when needed. """, ) +@option( + "--contain_signature", + "-s", + is_flag=True, + help=""" + Toggle signature generation and upload feature in charon. + """ +) +@option( + "--sign_key", + "-k", + help=""" + rpm-sign key to be used, will replace {{ key }} in default configuration for signature. + Does noting if detach_signature_command does not contain {{ key }} field. + """, +) @option( "--debug", "-D", @@ -114,10 +132,12 @@ def upload( repo: str, product: str, version: str, - target: List[str], + targets: List[str], root_path="maven-repository", ignore_patterns: List[str] = None, work_dir: str = None, + contain_signature: bool = False, + sign_key: str = "redhatdevel", debug=False, quiet=False, dryrun=False @@ -147,15 +167,17 @@ def upload( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() - targets_ = __get_targets(target, conf) + buckets = __get_buckets(targets, conf) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_uploading( archive_path, product_key, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, + gen_sign=contain_signature, + key=sign_key, dry_run=dryrun, manifest_bucket_name=manifest_bucket_name ) @@ -173,9 +195,11 @@ def upload( product_key, ignore_patterns_list, root=root_path, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, + gen_sign=contain_signature, + key=sign_key, dry_run=dryrun, manifest_bucket_name=manifest_bucket_name ) @@ -217,6 +241,7 @@ def upload( @option( "--target", "-t", + 'targets', help=""" The target to do the deletion, which will decide which s3 bucket and what root path where all files will be deleted from. @@ -270,7 +295,7 @@ def delete( repo: str, product: str, version: str, - target: List[str], + targets: List[str], root_path="maven-repository", ignore_patterns: List[str] = None, work_dir: str = None, @@ -303,13 +328,13 @@ def delete( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() - targets_ = __get_targets(target, conf) + buckets = __get_buckets(targets, conf) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_del( archive_path, product_key, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, dry_run=dryrun, @@ -329,7 +354,7 @@ def delete( product_key, ignore_patterns_list, root=root_path, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, dry_run=dryrun, @@ -345,22 +370,15 @@ def delete( __safe_delete(tmp_dir) -def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]: - targets_ = [] - for tgt in target: - aws_bucket = conf.get_aws_bucket(tgt) - if not aws_bucket: - continue - prefix = conf.get_bucket_prefix(tgt) - registry = conf.get_bucket_registry(tgt) - targets_.append([tgt, aws_bucket, prefix, registry]) - if len(targets_) == 0: - logger.error( - "All targets are not valid or configured, " - "please check your charon configurations." - ) - sys.exit(1) - return targets_ +def __get_buckets(targets: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]: + buckets = [] + for target in targets: + for bucket in conf.get_target(target): + aws_bucket = bucket.get('bucket') + prefix = bucket.get('prefix', '') + registry = bucket.get('registry', DEFAULT_REGISTRY) + buckets.append((target, aws_bucket, prefix, registry)) + return buckets def __safe_delete(tmp_dir: str): diff --git a/charon/config.py b/charon/config.py index bb7308fa..8f128617 100644 --- a/charon/config.py +++ b/charon/config.py @@ -13,14 +13,11 @@ See the License for the specific language governing permissions and limitations under the License. """ -from typing import Dict, List -from ruamel.yaml import YAML -from pathlib import Path -import os import logging +import os +from typing import Dict, List, Optional -from charon.utils.strings import remove_prefix -from charon.constants import DEFAULT_REGISTRY +from charon.utils.yaml import read_yaml_from_file_path CONFIG_FILE = "charon.yaml" @@ -33,82 +30,49 @@ class CharonConfig(object): The configuration file will be named as charon.yaml, and will be stored in $HOME/.charon/ folder by default. """ + def __init__(self, data: Dict): self.__ignore_patterns: List[str] = data.get("ignore_patterns", None) self.__aws_profile: str = data.get("aws_profile", None) self.__targets: Dict = data.get("targets", None) - if not self.__targets or not isinstance(self.__targets, Dict): - raise TypeError("Charon configuration is not correct: targets is invalid.") self.__manifest_bucket: str = data.get("manifest_bucket", None) + self.__ignore_signature_suffix: Dict = data.get("ignore_signature_suffix", None) + self.__signature_command: str = data.get("detach_signature_command", None) def get_ignore_patterns(self) -> List[str]: return self.__ignore_patterns + def get_target(self, target: str) -> List[Dict]: + target_: List = self.__targets.get(target, []) + if not target_: + logger.error("The target %s is not found in charon configuration.", target) + return target_ + def get_aws_profile(self) -> str: return self.__aws_profile - def get_aws_bucket(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon configuration.", target) - return None - bucket = target_.get("bucket", None) - if not bucket: - logger.error("The bucket is not found for target %s " - "in charon configuration.", target) - return bucket - - def get_bucket_prefix(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon " - "configuration.", target) - return None - prefix = target_.get("prefix", None) - if not prefix: - logger.warning("The prefix is not found for target %s " - "in charon configuration, so no prefix will " - "be used", target) - prefix = "" - # removing first slash as it is not needed. - prefix = remove_prefix(prefix, "/") - return prefix - - def get_bucket_registry(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon configuration.", target) - return None - registry = target_.get("registry", None) - if not registry: - registry = DEFAULT_REGISTRY - logger.error("The registry is not found for target %s " - "in charon configuration, so DEFAULT_REGISTRY(localhost) will be used.", - target) - return registry - def get_manifest_bucket(self) -> str: return self.__manifest_bucket + def get_ignore_signature_suffix(self, package_type: str) -> List[str]: + xartifact_list: List = self.__ignore_signature_suffix.get(package_type) + if not xartifact_list: + logger.error("package type %s does not have ignore artifact config.", package_type) + return xartifact_list + + def get_detach_signature_command(self) -> str: + return self.__signature_command + -def get_config() -> CharonConfig: - config_file = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE) - try: - yaml = YAML(typ='safe') - data = yaml.load(stream=Path(config_file)) - except Exception as e: - logger.error("Can not load charon config file due to error: %s", e) - return None - try: - return CharonConfig(data) - except TypeError as e: - logger.error(e) - return None +def get_config() -> Optional[CharonConfig]: + config_file_path = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE) + data = read_yaml_from_file_path(config_file_path, 'schemas/charon.json') + return CharonConfig(data) def get_template(template_file: str) -> str: template = os.path.join( - os.getenv("HOME"), ".charon/template", template_file + os.getenv("HOME", ''), ".charon/template", template_file ) if os.path.isfile(template): with open(template, encoding="utf-8") as file_: diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index cabc87ef..9fd57422 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -15,12 +15,13 @@ """ from charon.utils.files import HashType import charon.pkgs.indexing as indexing +import charon.pkgs.signature as signature from charon.utils.files import overwrite_file, digest, write_manifest from charon.utils.archive import extract_zip_all from charon.utils.strings import remove_prefix from charon.storage import S3Client from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process -from charon.config import get_template +from charon.config import CharonConfig, get_template, get_config from charon.constants import (META_FILE_GEN_KEY, META_FILE_DEL_KEY, META_FILE_FAILED, MAVEN_METADATA_TEMPLATE, ARCHETYPE_CATALOG_TEMPLATE, ARCHETYPE_CATALOG_FILENAME, @@ -256,10 +257,12 @@ def handle_maven_uploading( prod_key: str, ignore_patterns=None, root="maven-repository", - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, + gen_sign=False, + key=None, dry_run=False, manifest_bucket_name=None ) -> Tuple[str, bool]: @@ -304,10 +307,10 @@ def handle_maven_uploading( # 4. Do uploading s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - targets_ = [(target[1], remove_prefix(target[2], "/")) for target in targets] + targets_ = [(bucket[1], remove_prefix(bucket[2], "/")) for bucket in buckets] logger.info( "Start uploading files to s3 buckets: %s", - [target[1] for target in targets] + [bucket[1] for bucket in buckets] ) failed_files = s3_client.upload_files( file_paths=valid_mvn_paths, @@ -317,7 +320,8 @@ def handle_maven_uploading( ) logger.info("Files uploading done\n") succeeded = True - for target in targets: + generated_signs = [] + for bucket in buckets: # 5. Do manifest uploading if not manifest_bucket_name: logger.warning( @@ -325,7 +329,7 @@ def handle_maven_uploading( 'uploading\n') else: logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) - manifest_folder = target[1] + manifest_folder = bucket[1] manifest_name, manifest_full_path = write_manifest(valid_mvn_paths, top_level, prod_key) s3_client.upload_manifest( manifest_name, manifest_full_path, @@ -334,39 +338,38 @@ def handle_maven_uploading( logger.info("Manifest uploading is done\n") # 6. Use uploaded poms to scan s3 for metadata refreshment - bucket_ = target[1] - prefix__ = remove_prefix(target[2], "/") - failed_metas = [] - logger.info("Start generating maven-metadata.xml files for bucket %s", bucket_) + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + logger.info("Start generating maven-metadata.xml files for bucket %s", bucket_name) meta_files = _generate_metadatas( - s3=s3_client, bucket=bucket_, + s3=s3_client, bucket=bucket_name, poms=valid_poms, root=top_level, - prefix=prefix__ + prefix=prefix ) logger.info("maven-metadata.xml files generation done\n") failed_metas = meta_files.get(META_FILE_FAILED, []) # 7. Upload all maven-metadata.xml if META_FILE_GEN_KEY in meta_files: - logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_) + logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=meta_files[META_FILE_GEN_KEY], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) failed_metas.extend(_failed_metas) - logger.info("maven-metadata.xml updating done in bucket %s\n", bucket_) + logger.info("maven-metadata.xml updating done in bucket %s\n", bucket_name) # 8. Determine refreshment of archetype-catalog.xml if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): - logger.info("Start generating archetype-catalog.xml for bucket %s", bucket_) + logger.info("Start generating archetype-catalog.xml for bucket %s", bucket_name) upload_archetype_file = _generate_upload_archetype_catalog( - s3=s3_client, bucket=bucket_, + s3=s3_client, bucket=bucket_name, root=top_level, - prefix=prefix__ + prefix=prefix ) - logger.info("archetype-catalog.xml files generation done in bucket %s\n", bucket_) + logger.info("archetype-catalog.xml files generation done in bucket %s\n", bucket_name) # 9. Upload archetype-catalog.xml if it has changed if upload_archetype_file: @@ -374,31 +377,60 @@ def handle_maven_uploading( archetype_files.extend( __hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME) ) - logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_) + logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=archetype_files, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) failed_metas.extend(_failed_metas) - logger.info("archetype-catalog.xml updating done in bucket %s\n", bucket_) + logger.info("archetype-catalog.xml updating done in bucket %s\n", bucket_name) + + # 10. Generate signature file if contain_signature is set to True + if gen_sign: + conf = get_config() + if not conf: + sys.exit(1) + suffix_list = __get_suffix(PACKAGE_TYPE_MAVEN, conf) + command = conf.get_detach_signature_command() + artifacts = [s for s in valid_mvn_paths if not s.endswith(tuple(suffix_list))] + logger.info("Start generating signature for s3 bucket %s\n", bucket_name) + (_failed_metas, _generated_signs) = signature.generate_sign( + PACKAGE_TYPE_MAVEN, artifacts, + top_level, prefix, + s3_client, bucket_name, + key, command + ) + failed_metas.extend(_failed_metas) + generated_signs.extend(_generated_signs) + logger.info("Singature generation done.\n") + + logger.info("Start upload singature files to s3 bucket %s\n", bucket_name) + _failed_metas = s3_client.upload_signatures( + meta_file_paths=generated_signs, + target=(bucket_name, prefix), + product=None, + root=top_level + ) + failed_metas.extend(_failed_metas) + logger.info("Signature uploading done.\n") # this step generates index.html for each dir and add them to file list # index is similar to metadata, it will be overwritten everytime if do_index: - logger.info("Start generating index files to s3 bucket %s", bucket_) + logger.info("Start generating index files to s3 bucket %s", bucket_name) created_indexes = indexing.generate_indexes( PACKAGE_TYPE_MAVEN, top_level, valid_dirs, - s3_client, bucket_, prefix__ + s3_client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index files to s3 bucket %s", bucket_) + logger.info("Start updating index files to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -407,7 +439,7 @@ def handle_maven_uploading( else: logger.info("Bypass indexing") - upload_post_process(failed_files, failed_metas, prod_key, bucket_) + upload_post_process(failed_files, failed_metas, prod_key, bucket_name) succeeded = succeeded and len(failed_files) <= 0 and len(failed_metas) <= 0 return (tmp_root, succeeded) @@ -418,7 +450,7 @@ def handle_maven_del( prod_key: str, ignore_patterns=None, root="maven-repository", - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, @@ -433,7 +465,7 @@ def handle_maven_del( need to upload in the tarball * root is a prefix in the tarball to identify which path is the beginning of the maven GAV path - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir is base dir for extracting the tarball, will use system @@ -454,21 +486,21 @@ def handle_maven_del( # 3. Delete all valid_paths from s3 logger.debug("Valid poms: %s", valid_poms) succeeded = True - for target in targets: - prefix_ = remove_prefix(target[2], "/") + for bucket in buckets: + prefix = remove_prefix(bucket[2], "/") s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - bucket = target[1] - logger.info("Start deleting files from s3 bucket %s", bucket) + bucket_name = bucket[1] + logger.info("Start deleting files from s3 bucket %s", bucket_name) failed_files = s3_client.delete_files( valid_mvn_paths, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=prod_key, root=top_level ) logger.info("Files deletion done\n") # 4. Delete related manifest from s3 - manifest_folder = target[1] + manifest_folder = bucket[1] logger.info( "Start deleting manifest from s3 bucket %s in folder %s", manifest_bucket_name, manifest_folder @@ -479,25 +511,25 @@ def handle_maven_del( # 5. Use changed GA to scan s3 for metadata refreshment logger.info( "Start generating maven-metadata.xml files for all changed GAs in s3 bucket %s", - bucket + bucket_name ) meta_files = _generate_metadatas( - s3=s3_client, bucket=bucket, + s3=s3_client, bucket=bucket_name, poms=valid_poms, root=top_level, - prefix=prefix_ + prefix=prefix ) logger.info("maven-metadata.xml files generation done\n") # 6. Upload all maven-metadata.xml. We need to delete metadata files # firstly for all affected GA, and then replace the theirs content. - logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket) + logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_name) all_meta_files = [] for _, files in meta_files.items(): all_meta_files.extend(files) s3_client.delete_files( file_paths=all_meta_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -505,7 +537,7 @@ def handle_maven_del( if META_FILE_GEN_KEY in meta_files: _failed_metas = s3_client.upload_metadatas( meta_file_paths=meta_files[META_FILE_GEN_KEY], - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -517,9 +549,9 @@ def handle_maven_del( if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): logger.info("Start generating archetype-catalog.xml") archetype_action = _generate_rollback_archetype_catalog( - s3=s3_client, bucket=bucket, + s3=s3_client, bucket=bucket_name, root=top_level, - prefix=prefix_ + prefix=prefix ) logger.info("archetype-catalog.xml files generation done\n") @@ -527,10 +559,10 @@ def handle_maven_del( archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)] archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME)) if archetype_action < 0: - logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket) + logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.delete_files( file_paths=archetype_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -539,7 +571,7 @@ def handle_maven_del( elif archetype_action > 0: _failed_metas = s3_client.upload_metadatas( meta_file_paths=archetype_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -550,14 +582,14 @@ def handle_maven_del( if do_index: logger.info("Start generating index files for all changed entries") created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_MAVEN, top_level, valid_dirs, s3_client, bucket, prefix_ + PACKAGE_TYPE_MAVEN, top_level, valid_dirs, s3_client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index to s3 bucket %s", bucket) + logger.info("Start updating index to s3 bucket %s", bucket_name) _failed_index_files = s3_client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -567,7 +599,7 @@ def handle_maven_del( else: logger.info("Bypassing indexing") - rollback_post_process(failed_files, failed_metas, prod_key, bucket) + rollback_post_process(failed_files, failed_metas, prod_key, bucket_name) succeeded = succeeded and len(failed_files) == 0 and len(failed_metas) == 0 return (tmp_root, succeeded) @@ -1012,6 +1044,12 @@ def _handle_error(err_msgs: List[str]): pass +def __get_suffix(package_type: str, conf: CharonConfig) -> List[str]: + if package_type: + return conf.get_ignore_signature_suffix(package_type) + return [] + + class VersionCompareKey: 'Used as key function for version sorting' def __init__(self, obj): diff --git a/charon/pkgs/npm.py b/charon/pkgs/npm.py index 70c45a77..7c160673 100644 --- a/charon/pkgs/npm.py +++ b/charon/pkgs/npm.py @@ -24,6 +24,8 @@ from semantic_version import compare import charon.pkgs.indexing as indexing +import charon.pkgs.signature as signature +from charon.config import CharonConfig, get_config from charon.constants import META_FILE_GEN_KEY, META_FILE_DEL_KEY, PACKAGE_TYPE_NPM from charon.storage import S3Client from charon.utils.archive import extract_npm_tarball @@ -66,10 +68,12 @@ def __init__(self, metadata, is_version): def handle_npm_uploading( tarball_path: str, product: str, - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, + gen_sign=False, + key=None, dry_run=False, manifest_bucket_name=None ) -> Tuple[str, bool]: @@ -79,7 +83,7 @@ def handle_npm_uploading( * tarball_path is the location of the tarball in filesystem * product is used to identify which product this repo tar belongs to - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir_ is base dir for extracting the tarball, will use system @@ -88,22 +92,23 @@ def handle_npm_uploading( Returns the directory used for archive processing and if uploading is successful """ client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - for target in targets: - bucket_ = target[1] - prefix__ = remove_prefix(target[2], "/") - registry__ = target[3] + generated_signs = [] + for bucket in buckets: + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + registry = bucket[3] target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive( - tarball_path, registry__, prod=product, dir__=dir_ + tarball_path, registry, prod=product, dir__=dir_ ) if not os.path.isdir(target_dir): logger.error("Error: the extracted target_dir path %s does not exist.", target_dir) sys.exit(1) valid_dirs = __get_path_tree(valid_paths, target_dir) - logger.info("Start uploading files to s3 buckets: %s", bucket_) + logger.info("Start uploading files to s3 buckets: %s", bucket_name) failed_files = client.upload_files( file_paths=[valid_paths[0]], - targets=[(bucket_, prefix__)], + targets=[(bucket_name, prefix)], product=product, root=target_dir ) @@ -117,7 +122,7 @@ def handle_npm_uploading( 'uploading\n') else: logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) - manifest_folder = bucket_ + manifest_folder = bucket_name manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product) client.upload_manifest( @@ -128,13 +133,13 @@ def handle_npm_uploading( logger.info( "Start generating version-level package.json for package: %s in s3 bucket %s", - package_metadata.name, bucket_ + package_metadata.name, bucket_name ) failed_metas = [] _version_metadata_path = valid_paths[1] _failed_metas = client.upload_metadatas( meta_file_paths=[_version_metadata_path], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=product, root=target_dir ) @@ -143,36 +148,66 @@ def handle_npm_uploading( logger.info( "Start generating package.json for package: %s in s3 bucket %s", - package_metadata.name, bucket_ + package_metadata.name, bucket_name ) meta_files = _gen_npm_package_metadata_for_upload( - client, bucket_, target_dir, package_metadata, prefix__ + client, bucket_name, target_dir, package_metadata, prefix ) logger.info("package.json generation done\n") if META_FILE_GEN_KEY in meta_files: _failed_metas = client.upload_metadatas( meta_file_paths=[meta_files[META_FILE_GEN_KEY]], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=target_dir ) failed_metas.extend(_failed_metas) logger.info("package.json uploading done") + if gen_sign: + conf = get_config() + if not conf: + sys.exit(1) + suffix_list = __get_suffix(PACKAGE_TYPE_NPM, conf) + command = conf.get_detach_signature_command() + artifacts = [s for s in valid_paths if not s.endswith(tuple(suffix_list))] + if META_FILE_GEN_KEY in meta_files: + artifacts.extend(meta_files[META_FILE_GEN_KEY]) + logger.info("Start generating signature for s3 bucket %s\n", bucket_name) + (_failed_metas, _generated_signs) = signature.generate_sign( + PACKAGE_TYPE_NPM, artifacts, + target_dir, prefix, + client, bucket_name, + key, command + ) + failed_metas.extend(_failed_metas) + generated_signs.extend(_generated_signs) + logger.info("Singature generation done.\n") + + logger.info("Start upload singature files to s3 bucket %s\n", bucket_name) + _failed_metas = client.upload_signatures( + meta_file_paths=generated_signs, + target=(bucket_name, prefix), + product=None, + root=target_dir + ) + failed_metas.extend(_failed_metas) + logger.info("Signature uploading done.\n") + # this step generates index.html for each dir and add them to file list # index is similar to metadata, it will be overwritten everytime if do_index: - logger.info("Start generating index files to s3 bucket %s", bucket_) + logger.info("Start generating index files to s3 bucket %s", bucket_name) created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_, prefix__ + PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index files to s3 bucket %s", bucket_) + logger.info("Start updating index files to s3 bucket %s", bucket_name) _failed_metas = client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -181,7 +216,7 @@ def handle_npm_uploading( else: logger.info("Bypass indexing\n") - upload_post_process(failed_files, failed_metas, product, bucket_) + upload_post_process(failed_files, failed_metas, product, bucket_name) succeeded = succeeded and len(failed_files) == 0 and len(failed_metas) == 0 return (target_dir, succeeded) @@ -190,7 +225,7 @@ def handle_npm_uploading( def handle_npm_del( tarball_path: str, product: str, - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, @@ -201,7 +236,7 @@ def handle_npm_del( * tarball_path is the location of the tarball in filesystem * product is used to identify which product this repo tar belongs to - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir is base dir for extracting the tarball, will use system @@ -217,19 +252,19 @@ def handle_npm_del( client = S3Client(aws_profile=aws_profile, dry_run=dry_run) succeeded = True - for target in targets: - bucket = target[1] - prefix_ = remove_prefix(target[2], "/") - logger.info("Start deleting files from s3 bucket %s", bucket) + for bucket in buckets: + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + logger.info("Start deleting files from s3 bucket %s", bucket_name) failed_files = client.delete_files( file_paths=valid_paths, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=product, root=target_dir ) logger.info("Files deletion done\n") if manifest_bucket_name: - manifest_folder = target[1] + manifest_folder = bucket[1] logger.info( "Start deleting manifest from s3 bucket %s in folder %s", manifest_bucket_name, manifest_folder @@ -243,27 +278,27 @@ def handle_npm_del( logger.info( "Start generating package.json for package: %s in bucket %s", - package_name_path, bucket + package_name_path, bucket_name ) meta_files = _gen_npm_package_metadata_for_del( - client, bucket, target_dir, package_name_path, prefix_ + client, bucket_name, target_dir, package_name_path, prefix ) logger.info("package.json generation done\n") - logger.info("Start uploading package.json to s3 bucket %s", bucket) + logger.info("Start uploading package.json to s3 bucket %s", bucket_name) all_meta_files = [] for _, file in meta_files.items(): all_meta_files.append(file) client.delete_files( file_paths=all_meta_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) failed_metas = [] if META_FILE_GEN_KEY in meta_files: _failed_metas = client.upload_metadatas( meta_file_paths=[meta_files[META_FILE_GEN_KEY]], - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -273,17 +308,17 @@ def handle_npm_del( if do_index: logger.info( "Start generating index files for all changed entries for bucket %s", - bucket + bucket_name ) created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket, prefix_ + PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index to s3 bucket %s", bucket) + logger.info("Start updating index to s3 bucket %s", bucket_name) _failed_index_files = client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -292,7 +327,7 @@ def handle_npm_del( else: logger.info("Bypassing indexing\n") - rollback_post_process(failed_files, failed_metas, product, bucket) + rollback_post_process(failed_files, failed_metas, product, bucket_name) succeeded = succeeded and len(failed_files) <= 0 and len(failed_metas) <= 0 return (target_dir, succeeded) @@ -533,3 +568,9 @@ def __get_path_tree(paths: str, prefix: str) -> Set[str]: if dir_.startswith("@"): valid_dirs.add(dir_.split("/")[0]) return valid_dirs + + +def __get_suffix(package_type: str, conf: CharonConfig) -> List[str]: + if package_type: + return conf.get_ignore_signature_suffix(package_type) + return [] diff --git a/charon/pkgs/signature.py b/charon/pkgs/signature.py new file mode 100644 index 00000000..412aeba8 --- /dev/null +++ b/charon/pkgs/signature.py @@ -0,0 +1,131 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import subprocess +import asyncio +import logging +import shlex +from jinja2 import Template +from typing import Awaitable, Callable, List, Tuple +from charon.storage import S3Client + +logger = logging.getLogger(__name__) + + +def generate_sign( + package_type: str, + artifact_path: List[str], + top_level: str, + prefix: str, + s3_client: S3Client, + bucket: str, + key: str = None, + command: str = None +) -> Tuple[List[str], List[str]]: + """ This Python function generates a digital signature for a list of metadata files using + the GPG library for uploads to an Amazon S3 bucket. + + * Does not regenerate the existing metadata files when existing + * Returning all failed to generate signature files due to exceptions + * key: name of the sign key, using inside template to render correct command, + replace {{ key }} field in command string. + * command: A string representing the subprocess command to run. + + It returns a tuple containing two lists: one with the successfully generated files + and another with the failed to generate files due to exceptions. + """ + + async def sign_file( + filename: str, failed_paths: List[str], generated_signs: List[str], + sem: asyncio.BoundedSemaphore + ): + async with sem: + signature_file = filename + ".asc" + if prefix: + remote = os.path.join(prefix, signature_file) + else: + remote = signature_file + local = os.path.join(top_level, signature_file) + artifact = os.path.join(top_level, filename) + + if not os.path.isfile(os.path.join(prefix, artifact)): + logger.warning("Artifact needs signature is missing, please check again") + return + + # skip sign if file already exist locally + if os.path.isfile(local): + logger.debug(".asc file %s existed, skipping", local) + return + # skip sign if file already exist in bucket + try: + existed = s3_client.file_exists_in_bucket(bucket, remote) + except ValueError as e: + logger.error( + "Error: Can not check signature file status due to: %s", e + ) + return + if existed: + logger.debug(".asc file %s existed, skipping", remote) + return + + run_command = Template(command).render(key=key, file=artifact) + result = await __run_cmd_async(shlex.split(run_command)) + + if result.returncode == 0: + generated_signs.append(local) + logger.debug("Generated signature file: %s", local) + else: + failed_paths.append(local) + + return __do_path_cut_and( + file_paths=artifact_path, + path_handler=sign_file, + root=top_level + ) + + +def __do_path_cut_and( + file_paths: List[str], + path_handler: Callable[[str, List[str], List[str], asyncio.Semaphore], Awaitable[bool]], + root="/" +) -> List[str]: + slash_root = root + if not root.endswith("/"): + slash_root = slash_root + "/" + failed_paths = [] + generated_signs = [] + tasks = [] + sem = asyncio.BoundedSemaphore(10) + for full_path in file_paths: + path = full_path + if path.startswith(slash_root): + path = path[len(slash_root):] + tasks.append( + asyncio.ensure_future( + path_handler(path, failed_paths, generated_signs, sem) + ) + ) + + loop = asyncio.get_event_loop() + loop.run_until_complete(asyncio.gather(*tasks)) + return (failed_paths, generated_signs) + + +async def __run_cmd_async(cmd): + loop = asyncio.get_event_loop() + result = await loop.run_in_executor(None, subprocess.run, cmd) + return result diff --git a/charon/schemas/charon.json b/charon/schemas/charon.json new file mode 100644 index 00000000..bf745f9a --- /dev/null +++ b/charon/schemas/charon.json @@ -0,0 +1,80 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "title": "charon configuration", + "type": "object", + "properties": { + "ignore_patterns": { + "type": "array", + "description": "Filename patterns to ignore", + "minItems": 1, + "items": { + "type": "string", + "description": "pattern to ignore" + } + }, + "ignore_signature_suffix": { + "type": "object", + "patternProperties": { + "^[a-z].*$": { + "type": "array", + "description": "artifact type", + "minItems": 1, + "items": { + "type": "string", + "description": "sign artifact suffixt - exclude" + } + } + } + }, + "detach_signature_command": { + "type": "string", + "description": "signature command to be used for signature" + }, + "targets": { + "type": "object", + "patternProperties": { + "^[a-z].*$": { + "type": "array", + "description": "charon targets", + "minItems": 1, + "items": { + "type": "object", + "description": "target bucket", + "properties": { + "bucket": { + "description": "bucket name", + "type": "string" + }, + "prefix": { + "description": "prefix for destination path inside the bucket", + "type": "string" + }, + "registry": { + "description": "npm registry", + "type": "string" + } + }, + "required": [ + "bucket" + ], + "additionalProperties": false + } + } + } + }, + "aws_profile": { + "type": "string", + "description": "aws profile to use with S3" + }, + "manifest_bucket": { + "type": "string", + "description": "which bucket to use for storing manifests" + }, + "additionalProperties": false + }, + "additionalProperties": false, + "required": [ + "targets" + ] +} + diff --git a/charon/storage.py b/charon/storage.py index d6177d03..e040b5a6 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -15,7 +15,6 @@ """ import asyncio import threading -from boto3_type_annotations.s3.service_resource import Object from charon.utils.files import read_sha1 from charon.constants import PROD_INFO_SUFFIX, MANIFEST_SUFFIX @@ -24,7 +23,6 @@ from botocore.exceptions import HTTPClientError from boto3.exceptions import S3UploadFailedError from botocore.config import Config -from boto3_type_annotations import s3 from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple import os import logging @@ -57,15 +55,15 @@ def __init__( aws_profile=None, extra_conf=None, con_limit=25, dry_run=False ) -> None: - self.__client: s3.ServiceResource = self.__init_aws_client(aws_profile, extra_conf) - self.__buckets: Dict[str, s3.Bucket] = {} + self.__client = self.__init_aws_client(aws_profile, extra_conf) + self.__buckets: Dict[str, Any] = {} self.__dry_run = dry_run self.__con_sem = asyncio.BoundedSemaphore(con_limit) self.__lock = threading.Lock() def __init_aws_client( self, aws_profile=None, extra_conf=None - ) -> s3.ServiceResource: + ): if aws_profile: logger.debug("Using aws profile: %s", aws_profile) s3_session = session.Session(profile_name=aws_profile) @@ -130,7 +128,7 @@ def upload_files( main_bucket = self.__get_bucket(main_bucket_name) key_prefix = main_target[1] extra_targets = targets[1:] if len(targets) > 1 else [] - extra_prefixed_buckets: List[Tuple[s3.Bucket, str]] = [] + extra_prefixed_buckets: List[Tuple[Any, str]] = [] if len(extra_targets) > 0: for target in extra_targets: extra_prefixed_buckets.append((self.__get_bucket(target[0]), target[1])) @@ -153,7 +151,7 @@ async def path_upload_handler( index, total, full_file_path, main_bucket_name ) main_path_key = os.path.join(key_prefix, path) if key_prefix else path - main_file_object: s3.Object = main_bucket.Object(main_path_key) + main_file_object = main_bucket.Object(main_path_key) existed = False try: existed = await self.__run_async(self.__file_exists, main_file_object) @@ -218,7 +216,7 @@ async def path_upload_handler( 'Copyinging %s from bucket %s to bucket %s', full_file_path, main_bucket_name, extra_bucket ) - file_object: s3.Object = extra_bucket.Object(extra_path_key) + file_object = extra_bucket.Object(extra_path_key) existed = await self.__run_async(self.__file_exists, file_object) if not existed: if not self.__dry_run: @@ -285,7 +283,7 @@ async def handle_existed( async def __copy_between_bucket( self, source: str, source_key: str, - target: s3.Bucket, target_key: str + target, target_key: str ) -> bool: logger.debug( "Copying file %s from bucket %s to target %s as %s", @@ -345,7 +343,7 @@ async def path_upload_handler( key_prefix = target[1] path_key = os.path.join(key_prefix, path) if key_prefix else path - file_object: s3.Object = bucket.Object(path_key) + file_object = bucket.Object(path_key) existed = False try: existed = await self.__run_async(self.__file_exists, file_object) @@ -416,6 +414,100 @@ async def path_upload_handler( root=root ) + def upload_signatures( + self, meta_file_paths: List[str], + target: Tuple[str, str], + product: Optional[str] = None, root="/" + ) -> List[str]: + """ Upload a list of signature files to s3 bucket. This function is very similar to + upload_metadata, except: + * The signature files will not be overwritten if existed + """ + bucket_name = target[0] + bucket = self.__get_bucket(bucket_name) + + async def path_upload_handler( + full_file_path: str, path: str, index: int, + total: int, failed: List[str] + ): + async with self.__con_sem: + if not os.path.isfile(full_file_path): + logger.warning( + 'Warning: file %s does not exist during uploading. Product: %s', + full_file_path, product + ) + failed.append(full_file_path) + return + + logger.debug( + '(%d/%d) Updating sginature %s to bucket %s', + index, total, path, bucket_name + ) + + key_prefix = target[1] + path_key = os.path.join(key_prefix, path) if key_prefix else path + file_object = bucket.Object(path_key) + existed = False + try: + existed = await self.__run_async(self.__file_exists, file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + failed.append(full_file_path) + return + (content_type, _) = mimetypes.guess_type(full_file_path) + if not content_type: + content_type = DEFAULT_MIME_TYPE + + try: + if not self.__dry_run: + if not existed: + await self.__run_async( + functools.partial( + file_object.put, + Body=open(full_file_path, "rb"), + Metadata={}, + ContentType=content_type + ) + ) + elif product: + # NOTE: This should not happen for most cases, as most + # of the metadata file does not have product info. Just + # leave for requirement change in future + # This is now used for npm version-level package.json + prods = [product] + if existed: + (prods, no_error) = await self.__run_async( + self.__get_prod_info, + path_key, bucket_name + ) + if not no_error: + failed.append(full_file_path) + return + if no_error and product not in prods: + prods.append(product) + updated = await self.__update_prod_info( + path_key, bucket_name, prods + ) + if not updated: + failed.append(full_file_path) + return + logger.debug('Updated signature %s to bucket %s', path, bucket_name) + except (ClientError, HTTPClientError) as e: + logger.error( + "ERROR: file %s not uploaded to bucket" + " %s due to error: %s ", + full_file_path, bucket_name, e + ) + failed.append(full_file_path) + + return self.__do_path_cut_and( + file_paths=meta_file_paths, + path_handler=self.__path_handler_count_wrapper(path_upload_handler), + root=root + ) + def upload_manifest( self, manifest_name: str, manifest_full_path: str, target: str, manifest_bucket_name: str @@ -424,7 +516,7 @@ def upload_manifest( path_key = os.path.join(target, manifest_name) manifest_bucket = self.__get_bucket(manifest_bucket_name) try: - file_object: s3.Object = manifest_bucket.Object(path_key) + file_object = manifest_bucket.Object(path_key) file_object.upload_file( Filename=manifest_full_path, ExtraArgs={'ContentType': DEFAULT_MIME_TYPE} @@ -556,7 +648,7 @@ def delete_manifest(self, product_key: str, target: str, manifest_bucket_name: s path_key = os.path.join(target, manifest_name) manifest_bucket = self.__get_bucket(manifest_bucket_name) - file_object: s3.Object = manifest_bucket.Object(path_key) + file_object = manifest_bucket.Object(path_key) existed = False try: existed = self.__file_exists(file_object) @@ -641,7 +733,7 @@ def file_exists_in_bucket( file_object = bucket.Object(path) return self.__file_exists(file_object) - def __get_bucket(self, bucket_name: str) -> s3.Bucket: + def __get_bucket(self, bucket_name: str): self.__lock.acquire() try: bucket = self.__buckets.get(bucket_name) @@ -654,7 +746,7 @@ def __get_bucket(self, bucket_name: str) -> s3.Bucket: finally: self.__lock.release() - def __file_exists(self, file_object: Object) -> bool: + def __file_exists(self, file_object) -> bool: try: file_object.load() return True diff --git a/charon/utils/yaml.py b/charon/utils/yaml.py new file mode 100644 index 00000000..ee9b4a98 --- /dev/null +++ b/charon/utils/yaml.py @@ -0,0 +1,90 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import codecs +import json +import logging + +import jsonschema +import yaml +from pkg_resources import resource_stream + +logger = logging.getLogger(__name__) + + +def read_yaml_from_file_path(file_path, schema, package='charon'): + """ + :param file_path: string, yaml file to read + :param schema: string, file path to the JSON schema + :param package: string, package name containing the schema + """ + with open(file_path) as f: + yaml_data = f.read() + return read_yaml(yaml_data, schema, package) + + +def read_yaml(yaml_data, schema, package=None): + """ + :param yaml_data: string, yaml content + :param schema: string, file path to the JSON schema + :param package: string, package name containing the schema + """ + package = package or 'charon' + data = yaml.safe_load(yaml_data) + schema = load_schema(package, schema) + validate_with_schema(data, schema) + return data + + +def load_schema(package, schema): + """ + :param package: string, package name containing the schema + :param schema: string, file path to the JSON schema + """ + # Read schema from file + try: + resource = resource_stream(package, schema) + schema = codecs.getreader('utf-8')(resource) + except ImportError: + logger.error('Unable to find package %s', package) + raise + except (IOError, TypeError): + logger.error('unable to extract JSON schema, cannot validate') + raise + + # Load schema into Dict + try: + schema = json.load(schema) + except ValueError: + logger.error('unable to decode JSON schema, cannot validate') + raise + return schema + + +def validate_with_schema(data, schema): + """ + :param data: dict, data to be validated + :param schema: dict, schema to validate with + """ + validator = jsonschema.Draft7Validator(schema=schema) + try: + jsonschema.Draft7Validator.check_schema(schema) + validator.validate(data) + except jsonschema.SchemaError: + logger.error('invalid schema, cannot validate') + raise + except jsonschema.ValidationError as exc: + logger.error("schema validation error: %s", exc) + raise diff --git a/config/charon.yaml b/config/charon.yaml new file mode 100644 index 00000000..97c37d4a --- /dev/null +++ b/config/charon.yaml @@ -0,0 +1,32 @@ +ignore_patterns: + - ".*^(redhat).*" + - ".*snapshot.*" + +ignore_signature_suffix: + maven: + - ".sha1" + - ".sha256" + - ".md5" + - "maven-metadata.xml" + - "archtype-catalog.xml" + npm: + - "package.json" + +detach_signature_command: "rpm-sign --detach-sign --key {{ key }} {{ file }}" + +targets: + stage-ga: + - bucket: "stage-maven-ga" + prefix: ga + stage-ea: + - bucket: "stage-maven-ea" + prefix: earlyaccess/all + stage-maven: # collection of stage-ea and stage-ga + - bucket: "stage-maven-ga" + prefix: ga + - bucket: "stage-maven-ea" + prefix: earlyaccess/all + stage-npm: + - bucket: "stage-npm-npmjs" + prefix: / + registry: "npm.stage.registry.redhat.com" diff --git a/config/charon.yml b/config/charon.yml deleted file mode 100644 index ab3bb6e4..00000000 --- a/config/charon.yml +++ /dev/null @@ -1,14 +0,0 @@ -ignore_patterns: - - ".*^(redhat).*" - - ".*snapshot.*" - -targets: - ga: - bucket: "maven-prod-ga" - prefix: ga - ea: - bucket: "maven-prod-ea" - prefix: earlyaccess/all - npm: - bucket: "npm-prod" - prefix: npmjs diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index fe70ffdf..00000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = -ra --color=auto --html=__pytest_reports/atomic-reactor-unit-tests.html --self-contained-html -render_collapsed = True diff --git a/requirements.txt b/requirements.txt index e63a211a..043083ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,9 @@ setuptools-rust==1.1.2 jinja2==3.0.3 boto3==1.20.45 botocore==1.23.45 -boto3_type_annotations==0.3.1 click==8.0.3 requests==2.27.1 -ruamel.yaml==0.17.20 +PyYAML==6.0 defusedxml==0.7.1 subresource-integrity==0.2 +jsonschema==3.2.0 diff --git a/setup.py b/setup.py index f26e196e..c9301386 100755 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ from setuptools import setup, find_packages -version = "1.1.1" +version = "1.1.2" # f = open('README.md') # long_description = f.read().strip() @@ -63,6 +63,7 @@ def _get_requirements(path): license="APLv2", packages=find_packages(exclude=["ez_setup", "examples", "tests"]), install_requires=_get_requirements('requirements.txt'), + package_data={'charon': ['schemas/*.json']}, test_suite="tests", entry_points={ "console_scripts": ["charon = charon:cli"], diff --git a/test-coverage.sh b/test-coverage.sh deleted file mode 100755 index 050feacd..00000000 --- a/test-coverage.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -pytest --cov charon -v --cov-report html diff --git a/test.sh b/test.sh deleted file mode 100755 index a359cfb7..00000000 --- a/test.sh +++ /dev/null @@ -1,109 +0,0 @@ -#!/bin/bash -set -eux - -# Prepare env vars -ENGINE=${ENGINE:="podman"} -OS=${OS:="centos"} -OS_VERSION=${OS_VERSION:="7"} -PYTHON_VERSION=${PYTHON_VERSION:="3"} -ACTION=${ACTION:="test"} -IMAGE="$OS:$OS_VERSION" -CONTAINER_NAME="charon-$OS-$OS_VERSION-py$PYTHON_VERSION" - -# Use arrays to prevent globbing and word splitting -engine_mounts=(-v "$PWD":"$PWD":z) -for dir in ${EXTRA_MOUNT:-}; do - engine_mounts=("${engine_mounts[@]}" -v "$dir":"$dir":z) -done - -# Create or resurrect container if needed -if [[ $($ENGINE ps -qa -f name="$CONTAINER_NAME" | wc -l) -eq 0 ]]; then - $ENGINE run --name "$CONTAINER_NAME" -d "${engine_mounts[@]}" -w "$PWD" -ti "$IMAGE" sleep infinity -elif [[ $($ENGINE ps -q -f name="$CONTAINER_NAME" | wc -l) -eq 0 ]]; then - echo found stopped existing container, restarting. volume mounts cannot be updated. - $ENGINE container start "$CONTAINER_NAME" -fi - -function setup_charon() { - RUN="$ENGINE exec -i $CONTAINER_NAME" - PYTHON="python$PYTHON_VERSION" - PIP_PKG="$PYTHON-pip" - PIP="pip$PYTHON_VERSION" - PKG="dnf" - PKG_EXTRA=(dnf-plugins-core git "$PYTHON"-pylint) - BUILDDEP=(dnf builddep) - if [[ $OS == "centos" ]]; then - PKG="yum" - PKG_EXTRA=(yum-utils git "$PYTHON"-pylint) - BUILDDEP=(yum-builddep) - ENABLE_REPO= - else - ENABLE_REPO="--enablerepo=updates-testing" - fi - - - PIP_INST=("$PIP" install --index-url "${PYPI_INDEX:-https://pypi.org/simple}") - - if [[ $OS == "centos" ]]; then - # Don't let builddep enable *-source repos since they give 404 errors - $RUN rm -f /etc/yum.repos.d/CentOS-Sources.repo - # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-AppStream.repo - # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-BaseOS.repo - # This has to run *before* we try installing anything from EPEL - $RUN $PKG $ENABLE_REPO install -y epel-release - fi - - # RPM install basic dependencies - $RUN $PKG $ENABLE_REPO install -y "${PKG_EXTRA[@]}" - # RPM install build dependencies for charon - $RUN "${BUILDDEP[@]}" -y charon.spec - - # Install package - $RUN $PKG install -y $PIP_PKG - - # Upgrade pip to provide latest features for successful installation - $RUN "${PIP_INST[@]}" --upgrade pip - - if [[ $OS == centos ]]; then - # Pip install/upgrade setuptools. Older versions of setuptools don't understand the - # environment markers used by docker-squash's requirements, also - # CentOS needs to have setuptools updates to make pytest-cov work - $RUN "${PIP_INST[@]}" --upgrade setuptools - fi - - # install with RPM_PY_SYS=true to avoid error caused by installing on system python - #$RUN sh -c "RPM_PY_SYS=true ${PIP_INST[*]} rpm-py-installer" - # Setuptools install charon from source - $RUN $PYTHON setup.py install - - # Pip install packages for unit tests - $RUN "${PIP_INST[@]}" -r tests/requirements.txt -} - -case ${ACTION} in -"test") - setup_charon - TEST_CMD="coverage run --source=charon -m pytest tests" - ;; -"pylint") - setup_charon - PACKAGES='charon tests' - TEST_CMD="${PYTHON} -m pylint ${PACKAGES}" - ;; -"bandit") - setup_charon - $RUN "${PIP_INST[@]}" bandit - TEST_CMD="bandit-baseline -r charon -ll -ii" - ;; -*) - echo "Unknown action: ${ACTION}" - exit 2 - ;; -esac - -# Run tests -# shellcheck disable=SC2086 -$RUN ${TEST_CMD} "$@" - -echo "To run tests again:" -echo "$RUN ${TEST_CMD}" diff --git a/tests/base.py b/tests/base.py index 4e89c9ac..49cd2f1e 100644 --- a/tests/base.py +++ b/tests/base.py @@ -25,9 +25,9 @@ from charon.pkgs.pkg_utils import is_metadata from charon.storage import PRODUCT_META_KEY, CHECKSUM_META_KEY from tests.commons import TEST_BUCKET, TEST_MANIFEST_BUCKET -from boto3_type_annotations import s3 from moto import mock_s3 +from tests.constants import HERE SHORT_TEST_PREFIX = "ga" LONG_TEST_PREFIX = "earlyaccess/all" @@ -43,18 +43,32 @@ def setUp(self): - ".*^(redhat).*" - ".*snapshot.*" +ignore_signature_suffix: + maven: + - ".sha1" + - ".sha256" + - ".md5" + - "maven-metadata.xml" + - "archtype-catalog.xml" + npm: + - "package.json" + +detach_signature_command: "touch {{ file }}.asc" + targets: ga: - bucket: "charon-test" - prefix: ga + - bucket: "charon-test" + prefix: ga ea: - bucket: "charon-test-ea" - prefix: earlyaccess/all + - bucket: "charon-test-ea" + prefix: earlyaccess/all npm: - bucket: "charon-test-npm" - registry: "npm1.registry.redhat.com" - """ + - bucket: "charon-test-npm" + registry: "npm1.registry.redhat.com" +aws_profile: "test" +manifest_bucket: "manifest" + """ self.prepare_config(config_base, default_config_content) def tearDown(self): @@ -70,7 +84,7 @@ def change_home(self): def __prepare_template(self, config_base): template_path = os.path.join(config_base, 'template') os.mkdir(config_base) - shutil.copytree(os.path.join(os.getcwd(), "template"), template_path) + shutil.copytree(os.path.join(HERE, "../template"), template_path) if not os.path.isdir(template_path): self.fail("Template initilization failed!") @@ -115,7 +129,7 @@ def cleanBuckets(self, buckets: List[str]): def __prepare_s3(self): return boto3.resource('s3') - def check_product(self, file: str, prods: List[str], bucket: s3.Bucket = None, msg=None): + def check_product(self, file: str, prods: List[str], bucket=None, msg=None): prod_file = file + PROD_INFO_SUFFIX test_bucket = bucket if not test_bucket: @@ -128,7 +142,7 @@ def check_product(self, file: str, prods: List[str], bucket: s3.Bucket = None, m msg=msg ) - def check_content(self, objs: List[s3.ObjectSummary], products: List[str], msg=None): + def check_content(self, objs: List, products: List[str], msg=None): for obj in objs: file_obj = obj.Object() test_bucket = self.mock_s3.Bucket(file_obj.bucket_name) diff --git a/tests/commons.py b/tests/commons.py index 2e5f670b..fdb3ae8b 100644 --- a/tests/commons.py +++ b/tests/commons.py @@ -89,6 +89,19 @@ COMMONS_CLIENT_456_INDEX = "org/apache/httpcomponents/httpclient/4.5.6/index.html" COMMONS_LOGGING_INDEX = "commons-logging/commons-logging/index.html" COMMONS_ROOT_INDEX = "index.html" +COMMONS_LOGGING_SIGNS = [ + "commons-logging/commons-logging/1.2/commons-logging-1.2.jar.asc", + "commons-logging/commons-logging/1.2/commons-logging-1.2-sources.jar.asc", + "commons-logging/commons-logging/1.2/commons-logging-1.2.pom.asc", +] +COMMONS_CLIENT_456_SIGNS = [ + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar.asc", + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.pom.asc", +] +COMMONS_CLIENT_459_SIGNS = [ + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.jar.asc", + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.pom.asc", +] # For npm diff --git a/tests/constants.py b/tests/constants.py new file mode 100644 index 00000000..2e6d111f --- /dev/null +++ b/tests/constants.py @@ -0,0 +1,19 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import os + +HERE = os.path.dirname(__file__) +INPUTS = os.path.join(HERE, 'input') diff --git a/tests/requirements.txt b/tests/requirements.txt index de68f7b1..af22ba64 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,9 +1,9 @@ flexmock>=0.10.6 responses>=0.9.0,<0.10.8 -pytest>=5.0 +pytest<=7.1.3 pytest-cov pytest-html flake8 requests-mock moto==3.0.2.dev12 - +python-gnupg==0.5.0 diff --git a/tests/test_archive.py b/tests/test_archive.py index 9e303028..0e2ac09a 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -2,12 +2,14 @@ from charon.utils.archive import NpmArchiveType, detect_npm_archive import os +from tests.constants import INPUTS + class ArchiveTest(BaseTest): def test_detect_package(self): - mvn_tarball = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + mvn_tarball = os.path.join(INPUTS, "commons-client-4.5.6.zip") self.assertEqual(NpmArchiveType.NOT_NPM, detect_npm_archive(mvn_tarball)) - npm_tarball = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + npm_tarball = os.path.join(INPUTS, "code-frame-7.14.5.tgz") self.assertEqual(NpmArchiveType.TAR_FILE, detect_npm_archive(npm_tarball)) def test_download_archive(self): diff --git a/tests/test_config.py b/tests/test_config.py index 41ce25ad..25bd4649 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -16,8 +16,14 @@ from typing import List import unittest import os + +import pytest +from jsonschema.exceptions import ValidationError + import charon.config as config import re + +from charon.constants import DEFAULT_REGISTRY from tests.base import BaseTest @@ -32,16 +38,16 @@ def test_config(self): self.__base.setUp() conf = config.get_config() self.assertEqual([".*^(redhat).*", ".*snapshot.*"], conf.get_ignore_patterns()) - self.assertEqual('charon-test', conf.get_aws_bucket("ga")) - self.assertEqual('ga', conf.get_bucket_prefix("ga")) - self.assertEqual('charon-test-ea', conf.get_aws_bucket("ea")) - self.assertEqual('earlyaccess/all', conf.get_bucket_prefix("ea")) - self.assertEqual('npm1.registry.redhat.com', conf.get_bucket_registry("npm")) + self.assertEqual([{'bucket': 'charon-test', 'prefix': 'ga'}], conf.get_target('ga')) + self.assertEqual([{'bucket': 'charon-test-ea', 'prefix': 'earlyaccess/all'}], + conf.get_target('ea')) + self.assertEqual([{'bucket': 'charon-test-npm', 'registry': 'npm1.registry.redhat.com'}], + conf.get_target('npm')) def test_no_config(self): self.__base.change_home() - conf = config.get_config() - self.assertIsNone(conf) + with pytest.raises(FileNotFoundError): + config.get_config() def test_config_missing_targets(self): content_missing_targets = """ @@ -50,8 +56,9 @@ def test_config_missing_targets(self): - ".*snapshot.*" """ self.__change_config_content(content_missing_targets) - conf = config.get_config() - self.assertIsNone(conf) + msg = "'targets' is a required property" + with pytest.raises(ValidationError, match=msg): + config.get_config() def test_config_missing_bucket(self): content_missing_targets = """ @@ -61,13 +68,12 @@ def test_config_missing_bucket(self): targets: ga: - prefix: ga + - prefix: ga """ self.__change_config_content(content_missing_targets) - conf = config.get_config() - self.assertIsNotNone(conf) - self.assertEqual("ga", conf.get_bucket_prefix("ga")) - self.assertIsNone(conf.get_aws_bucket("ga")) + msg = "'bucket' is a required property" + with pytest.raises(ValidationError, match=msg): + config.get_config() def test_config_missing_prefix(self): content_missing_targets = """ @@ -77,13 +83,13 @@ def test_config_missing_prefix(self): targets: ga: - bucket: charon-test + - bucket: charon-test """ self.__change_config_content(content_missing_targets) conf = config.get_config() self.assertIsNotNone(conf) - self.assertEqual("charon-test", conf.get_aws_bucket("ga")) - self.assertEqual("", conf.get_bucket_prefix("ga")) + self.assertEqual("charon-test", conf.get_target("ga")[0].get('bucket', '')) + self.assertEqual("", conf.get_target("ga")[0].get('prefix', '')) def test_config_missing_registry(self): content_missing_registry = """ @@ -93,13 +99,13 @@ def test_config_missing_registry(self): targets: npm: - bucket: charon-npm-test + - bucket: charon-npm-test """ self.__change_config_content(content_missing_registry) conf = config.get_config() self.assertIsNotNone(conf) - self.assertEqual("charon-npm-test", conf.get_aws_bucket("npm")) - self.assertEqual("localhost", conf.get_bucket_registry("npm")) + self.assertEqual("charon-npm-test", conf.get_target("npm")[0].get('bucket', '')) + self.assertEqual("localhost", conf.get_target("npm")[0].get('registry', DEFAULT_REGISTRY)) def test_ignore_patterns(self): # pylint: disable=anomalous-backslash-in-string @@ -113,7 +119,7 @@ def test_ignore_patterns(self): targets: ga: - bucket: charon-test + - bucket: charon-test """ self.__change_config_content(content_missing_targets) conf = config.get_config() diff --git a/tests/test_manifest_del.py b/tests/test_manifest_del.py index bea22072..fc5ff35c 100644 --- a/tests/test_manifest_del.py +++ b/tests/test_manifest_del.py @@ -25,6 +25,7 @@ TEST_BUCKET, TEST_MANIFEST_BUCKET, TEST_TARGET, COMMONS_CLIENT_456_MANIFEST, CODE_FRAME_7_14_5_MANIFEST ) +from tests.constants import INPUTS @mock_s3 @@ -38,11 +39,11 @@ def test_maven_manifest_delete(self): self.assertEqual(1, len(manifests)) self.assertIn(COMMONS_CLIENT_456_MANIFEST, manifests) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_del( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -59,11 +60,11 @@ def test_npm_manifest_delete(self): self.assertEqual(1, len(manifests)) self.assertIn(CODE_FRAME_7_14_5_MANIFEST, manifests) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_del( test_tgz, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -73,22 +74,22 @@ def test_npm_manifest_delete(self): self.assertEqual(0, len(manifests)) def __prepare_maven_content(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET ) def __prepare_npm_content(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET diff --git a/tests/test_manifest_upload.py b/tests/test_manifest_upload.py index 0f7251dd..e6aa43e9 100644 --- a/tests/test_manifest_upload.py +++ b/tests/test_manifest_upload.py @@ -26,17 +26,18 @@ COMMONS_CLIENT_META_NUM, COMMONS_CLIENT_456_MANIFEST, COMMONS_CLIENT_456_FILES, COMMONS_LOGGING_FILES, CODE_FRAME_7_14_5_MANIFEST, CODE_FRAME_7_14_5_FILES ) +from tests.constants import INPUTS @mock_s3 class ManifestUploadTest(PackageBaseTest): def test_maven_manifest_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -62,11 +63,11 @@ def test_maven_manifest_upload(self): self.assertIn(f, manifest_content) def test_npm_manifest_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_zip = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET diff --git a/tests/test_maven_del.py b/tests/test_maven_del.py index 712132ff..c26e6d4a 100644 --- a/tests/test_maven_del.py +++ b/tests/test_maven_del.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenDeleteTest(PackageBaseTest): @@ -48,12 +50,12 @@ def test_ignore_del(self): product_459 = "commons-client-4.5.9" product_mix = [product_456, product_459] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") handle_maven_del( test_zip, product_456, ignore_patterns=[".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -98,11 +100,11 @@ def test_ignore_del(self): def __test_prefix_deletion(self, prefix: str): self.__prepare_content(prefix) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -173,10 +175,10 @@ def __test_prefix_deletion(self, prefix: str): self.assertIn("1.2", meta_content_logging) self.assertIn("1.2", meta_content_logging) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -185,20 +187,20 @@ def __test_prefix_deletion(self, prefix: str): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_del_multi_tgts.py b/tests/test_maven_del_multi_tgts.py index abf324bb..ffc60954 100644 --- a/tests/test_maven_del_multi_tgts.py +++ b/tests/test_maven_del_multi_tgts.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenDeleteMultiTgtsTest(PackageBaseTest): @@ -58,12 +60,12 @@ def test_ignore_del(self): product_459 = "commons-client-4.5.9" product_mix = [product_456, product_459] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") handle_maven_del( test_zip, product_456, ignore_patterns=[".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -108,12 +110,12 @@ def test_ignore_del(self): def __test_prefix_deletion(self, prefix: str): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -238,10 +240,10 @@ def __test_prefix_deletion(self, prefix: str): msg=f'{bucket_name}' ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -253,21 +255,21 @@ def __test_prefix_deletion(self, prefix: str): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_index.py b/tests/test_maven_index.py index bc8ccb9d..d5647ecd 100644 --- a/tests/test_maven_index.py +++ b/tests/test_maven_index.py @@ -26,16 +26,18 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenFileIndexTest(PackageBaseTest): def test_uploading_index(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -75,19 +77,19 @@ def test_uploading_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) def test_overlap_upload_index(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -135,11 +137,11 @@ def test_upload_index_with_root_prefix(self): self.__test_upload_index_with_prefix("/") def __test_upload_index_with_prefix(self, prefix: str): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -187,11 +189,11 @@ def __test_upload_index_with_prefix(self, prefix: str): def test_deletion_index(self): self.__prepare_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -236,10 +238,10 @@ def test_deletion_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_459 = "commons-client-4.5.9" - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -258,11 +260,11 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_index_with_prefix(self, prefix: str): self.__prepare_content(prefix) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -306,10 +308,10 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertNotIn("../", index_content) self.assertNotIn(PROD_INFO_SUFFIX, index_content) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -317,18 +319,18 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) diff --git a/tests/test_maven_index_multi_tgts.py b/tests/test_maven_index_multi_tgts.py index 47eb079d..a02707f2 100644 --- a/tests/test_maven_index_multi_tgts.py +++ b/tests/test_maven_index_multi_tgts.py @@ -26,6 +26,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenFileIndexMultiTgtsTest(PackageBaseTest): @@ -40,12 +42,12 @@ def tearDown(self): super().tearDown() def test_uploading_index(self): - targets_ = [(None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -100,20 +102,20 @@ def test_uploading_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') def test_overlap_upload_index(self): - targets_ = [(None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -188,12 +190,12 @@ def test_upload_index_with_root_prefix(self): self.__test_upload_index_with_prefix("/") def __test_upload_index_with_prefix(self, prefix: str): - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -256,11 +258,11 @@ def __test_upload_index_with_prefix(self, prefix: str): def test_deletion_index(self): self.__prepare_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -305,10 +307,10 @@ def test_deletion_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_459 = "commons-client-4.5.9" - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -326,12 +328,12 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_index_with_prefix(self, prefix: str): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -397,10 +399,10 @@ def __test_deletion_index_with_prefix(self, prefix: str): ) self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -411,19 +413,19 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix=None): - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) diff --git a/tests/test_maven_meta.py b/tests/test_maven_meta.py index 7c86c6c4..a905c17d 100644 --- a/tests/test_maven_meta.py +++ b/tests/test_maven_meta.py @@ -21,6 +21,7 @@ import charon.pkgs.maven as mvn import charon.utils.archive as archive from tests.base import BaseTest +from tests.constants import INPUTS class MavenMetadataTest(BaseTest): @@ -58,7 +59,7 @@ def test_parse_gavs(self): def test_gen_meta_file(self): test_zip = zipfile.ZipFile( - os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + os.path.join(INPUTS, "commons-lang3.zip") ) temp_root = os.path.join(self.tempdir, "tmp_zip") os.mkdir(temp_root) diff --git a/tests/test_maven_sign.py b/tests/test_maven_sign.py new file mode 100644 index 00000000..41cab15e --- /dev/null +++ b/tests/test_maven_sign.py @@ -0,0 +1,96 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +from charon.pkgs.maven import handle_maven_uploading +from tests.base import PackageBaseTest +from tests.commons import ( + TEST_BUCKET, COMMONS_CLIENT_456_SIGNS, COMMONS_LOGGING_SIGNS, COMMONS_CLIENT_456_INDEX, + COMMONS_CLIENT_459_SIGNS +) +from moto import mock_s3 +import os + +from tests.constants import INPUTS + + +@mock_s3 +class MavenFileSignTest(PackageBaseTest): + + def test_uploading_sign(self): + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") + product = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_bucket = self.mock_s3.Bucket(TEST_BUCKET) + objs = list(test_bucket.objects.all()) + actual_files = [obj.key for obj in objs] + + self.assertEqual(46, len(actual_files)) + + for f in COMMONS_LOGGING_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_456_SIGNS: + self.assertIn(f, actual_files) + + indedx_obj = test_bucket.Object(COMMONS_CLIENT_456_INDEX) + index_content = str(indedx_obj.get()["Body"].read(), "utf-8") + self.assertIn( + "httpclient-4.5.6.jar.asc", + index_content + ) + + def test_overlap_upload_index(self): + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") + product_456 = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product_456, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") + product_459 = "commons-client-4.5.9" + handle_maven_uploading( + test_zip, product_459, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_bucket = self.mock_s3.Bucket(TEST_BUCKET) + objs = list(test_bucket.objects.all()) + actual_files = [obj.key for obj in objs] + + self.assertEqual(57, len(objs)) + + for f in COMMONS_LOGGING_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_456_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_459_SIGNS: + self.assertIn(f, actual_files) diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index b6165850..431475a8 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -26,6 +26,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenUploadTest(PackageBaseTest): @@ -42,19 +44,19 @@ def test_root_prefix_upload(self): self.__test_prefix_upload("/") def test_overlap_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -109,11 +111,11 @@ def test_overlap_upload(self): self.assertIn("org.apache.httpcomponents", cat_content) def test_ignore_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, [".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -138,11 +140,11 @@ def test_ignore_upload(self): self.assertNotIn(f, actual_files) def __test_prefix_upload(self, prefix: str): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_upload_multi_tgts.py b/tests/test_maven_upload_multi_tgts.py index 85aea608..ffb41d20 100644 --- a/tests/test_maven_upload_multi_tgts.py +++ b/tests/test_maven_upload_multi_tgts.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenUploadMultiTgtsTest(PackageBaseTest): @@ -42,40 +44,40 @@ def tearDown(self): def test_fresh_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, ""), (None, TEST_BUCKET_2, "", None)] + [('', TEST_BUCKET, ""), ('', TEST_BUCKET_2, "", '')] ) def test_short_prefix_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, SHORT_TEST_PREFIX), (None, TEST_BUCKET_2, SHORT_TEST_PREFIX, None)] + [('', TEST_BUCKET, SHORT_TEST_PREFIX), ('', TEST_BUCKET_2, SHORT_TEST_PREFIX, '')] ) def test_long_prefix_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, LONG_TEST_PREFIX), (None, TEST_BUCKET_2, LONG_TEST_PREFIX, None)] + [('', TEST_BUCKET, LONG_TEST_PREFIX), ('', TEST_BUCKET_2, LONG_TEST_PREFIX, '')] ) def test_root_prefix_upload(self): - self.__test_prefix_upload([(None, TEST_BUCKET, "/", None), - (None, TEST_BUCKET_2, "/", None)]) + self.__test_prefix_upload([('', TEST_BUCKET, "/", ''), + ('', TEST_BUCKET_2, "/", '')]) def test_overlap_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" targets_ = [ - (None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None) + ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -178,14 +180,14 @@ def test_overlap_upload(self): ) def test_ignore_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" targets_ = [ - (None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None) + ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( test_zip, product_456, [".*.sha1"], - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -216,11 +218,11 @@ def test_ignore_upload(self): self.assertNotIn(f, actual_files, msg=f'{bucket_name}') def __test_prefix_upload(self, targets: List[Tuple[str, str, str, str]]): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets, + buckets=targets, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_del.py b/tests/test_npm_del.py index b2b1e332..ad2b1f8e 100644 --- a/tests/test_npm_del.py +++ b/tests/test_npm_del.py @@ -20,6 +20,7 @@ from charon.storage import CHECKSUM_META_KEY from tests.base import LONG_TEST_PREFIX, SHORT_TEST_PREFIX, PackageBaseTest from tests.commons import TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_META +from tests.constants import INPUTS @mock_s3 @@ -39,11 +40,11 @@ def test_npm_deletion_with_root_prefix(self): def __test_prefix(self, prefix: str = None): self.__prepare_content(prefix) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -84,28 +85,28 @@ def __test_prefix(self, prefix: str = None): self.assertIn("\"license\": \"MIT\"", meta_content_client) self.assertIn("\"dist_tags\": {\"latest\": \"7.15.8\"}", meta_content_client) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) objs = list(test_bucket.objects.all()) self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_del_multi_tgts.py b/tests/test_npm_del_multi_tgts.py index 918d168a..1746fba5 100644 --- a/tests/test_npm_del_multi_tgts.py +++ b/tests/test_npm_del_multi_tgts.py @@ -20,6 +20,7 @@ from charon.storage import CHECKSUM_META_KEY from tests.base import LONG_TEST_PREFIX, SHORT_TEST_PREFIX, PackageBaseTest from tests.commons import TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_META, TEST_BUCKET_2 +from tests.constants import INPUTS @mock_s3 @@ -48,12 +49,12 @@ def test_npm_deletion_with_root_prefix(self): def __test_prefix(self, prefix: str = None): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -120,10 +121,10 @@ def __test_prefix(self, prefix: str = None): meta_content_client, msg=f'{bucket_name}' ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) for target in targets_: @@ -133,20 +134,20 @@ def __test_prefix(self, prefix: str = None): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_dist_gen.py b/tests/test_npm_dist_gen.py index e7721e41..438cc094 100644 --- a/tests/test_npm_dist_gen.py +++ b/tests/test_npm_dist_gen.py @@ -23,6 +23,7 @@ TEST_BUCKET, TEST_BUCKET_2, CODE_FRAME_META, CODE_FRAME_7_14_5_META ) +from tests.constants import INPUTS @mock_s3 @@ -33,12 +34,12 @@ def setUp(self): self.test_bucket_2 = self.mock_s3.Bucket(TEST_BUCKET_2) def test_dist_gen_in_single_target(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -75,13 +76,13 @@ def test_dist_gen_in_single_target(self): "+vOtCS5ndmJicPJhKAwYRI6UfFw==\"", merged_meta_content_client) def test_dist_gen_in_multi_targets(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com"), - (None, TEST_BUCKET_2, None, "npm2.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com"), + ('', TEST_BUCKET_2, '', "npm2.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket_1 = self.mock_s3.Bucket(TEST_BUCKET) @@ -111,12 +112,12 @@ def test_dist_gen_in_multi_targets(self): "-frame-7.14.5.tgz\"", merged_meta_content_client) def test_overlapping_registry_dist_gen(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -132,12 +133,12 @@ def test_overlapping_registry_dist_gen(self): self.assertIn("\"tarball\": \"https://npm1.registry.redhat.com/@babel/code-frame/-/code" "-frame-7.14.5.tgz\"", merged_meta_content_client) - targets_overlapping_ = [(None, TEST_BUCKET, None, "npm1.overlapping.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_overlapping_ = [('', TEST_BUCKET, '', "npm1.overlapping.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_overlapping_, + buckets=targets_overlapping_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_index.py b/tests/test_npm_index.py index 31a0e71c..fa0ebc3a 100644 --- a/tests/test_npm_index.py +++ b/tests/test_npm_index.py @@ -24,6 +24,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + NAMESPACE_BABEL_INDEX = "@babel/index.html" @@ -42,11 +44,11 @@ def test_uploding_index_with_root_prefix(self): self.__test_upload_prefix("/") def __test_upload_prefix(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, ) @@ -122,11 +124,11 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_prefix(self, prefix: str = None): self.__prepare_content(prefix) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -154,10 +156,10 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_7_15_8 = "code-frame-7.15.8" - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -165,18 +167,18 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir ) diff --git a/tests/test_npm_index_multi_tgts.py b/tests/test_npm_index_multi_tgts.py index 65f3e206..ef653303 100644 --- a/tests/test_npm_index_multi_tgts.py +++ b/tests/test_npm_index_multi_tgts.py @@ -25,6 +25,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + NAMESPACE_BABEL_INDEX = "@babel/index.html" @@ -53,13 +55,13 @@ def test_uploding_index_with_root_prefix(self): self.__test_upload_prefix("/") def __test_upload_prefix(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, ) @@ -118,7 +120,7 @@ def __test_upload_prefix(self, prefix: str = None): def test_overlap_upload_index(self): self.__prepare_content() - targets_ = [(None, TEST_BUCKET, None), (None, TEST_BUCKET_2, None)] + targets_ = [('', TEST_BUCKET, ''), ('', TEST_BUCKET_2, '')] for target in targets_: bucket_name = target[1] bucket = self.mock_s3.Bucket(bucket_name) @@ -162,12 +164,12 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_prefix(self, prefix: str = None): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -206,10 +208,10 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') product_7_15_8 = "code-frame-7.15.8" - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -220,20 +222,20 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) diff --git a/tests/test_npm_meta.py b/tests/test_npm_meta.py index 6737a9e4..df660492 100644 --- a/tests/test_npm_meta.py +++ b/tests/test_npm_meta.py @@ -22,6 +22,7 @@ from charon.storage import S3Client from charon.constants import DEFAULT_REGISTRY from tests.base import BaseTest +from tests.constants import INPUTS MY_BUCKET = "npm_bucket" @@ -63,13 +64,10 @@ def test_handle_npm_uploading_for_old_version(self): Key='@redhat/kogito-tooling-workspace/package.json', Body=str(original_version_0_5_8_package_json) ) - tarball_test_path = os.path.join( - os.getcwd(), - 'tests/input/kogito-tooling-workspace-0.9.0-3.tgz' - ) + tarball_test_path = os.path.join(INPUTS, 'kogito-tooling-workspace-0.9.0-3.tgz') handle_npm_uploading( tarball_test_path, "kogito-tooling-workspace-0.9.0-3", - targets=[(None, MY_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', MY_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) (files, _) = self.s3_client.get_files( @@ -116,13 +114,10 @@ def test_handle_npm_uploading_for_new_version(self): Key='@redhat/kogito-tooling-workspace/package.json', Body=str(original_version_1_0_1_package_json) ) - tarball_test_path = os.path.join( - os.getcwd(), - 'tests/input/kogito-tooling-workspace-0.9.0-3.tgz' - ) + tarball_test_path = os.path.join(INPUTS, 'kogito-tooling-workspace-0.9.0-3.tgz') handle_npm_uploading( tarball_test_path, "kogito-tooling-workspace-0.9.0-3", - targets=[(None, MY_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', MY_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) (files, _) = self.s3_client.get_files( diff --git a/tests/test_npm_upload.py b/tests/test_npm_upload.py index 9bbbb861..1130b4d0 100644 --- a/tests/test_npm_upload.py +++ b/tests/test_npm_upload.py @@ -26,6 +26,7 @@ TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_7_15_8_FILES, CODE_FRAME_META ) +from tests.constants import INPUTS @mock_s3 @@ -44,18 +45,18 @@ def test_upload_with_root_prefix(self): self.__test_prefix("/") def test_double_uploads(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -88,11 +89,11 @@ def test_double_uploads(self): self.assertIn("\"dist_tags\": {\"latest\": \"7.15.8\"}", meta_content_client) def __test_prefix(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_upload_multi_tgts.py b/tests/test_npm_upload_multi_tgts.py index 3a3b7aa5..d95868bd 100644 --- a/tests/test_npm_upload_multi_tgts.py +++ b/tests/test_npm_upload_multi_tgts.py @@ -26,6 +26,7 @@ TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_7_15_8_FILES, CODE_FRAME_META, TEST_BUCKET_2 ) +from tests.constants import INPUTS @mock_s3 @@ -53,20 +54,20 @@ def test_upload_with_root_prefix(self): self.__test_prefix("/") def test_double_uploads(self): - targets_ = [(None, TEST_BUCKET, None, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, None, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, '', DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -124,13 +125,13 @@ def test_double_uploads(self): ) def __test_prefix(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_pkgs_dryrun.py b/tests/test_pkgs_dryrun.py index 8eff2b36..7f2b004e 100644 --- a/tests/test_pkgs_dryrun.py +++ b/tests/test_pkgs_dryrun.py @@ -21,15 +21,17 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class PkgsDryRunTest(PackageBaseTest): def test_maven_upload_dry_run(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -41,11 +43,11 @@ def test_maven_upload_dry_run(self): def test_maven_delete_dry_run(self): self.__prepare_maven_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -55,11 +57,11 @@ def test_maven_delete_dry_run(self): self.assertEqual(50, len(objs)) def test_npm_upload_dry_run(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, dry_run=True ) @@ -71,11 +73,11 @@ def test_npm_upload_dry_run(self): def test_npm_deletion_dry_run(self): self.__prepare_npm_content() - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -85,35 +87,35 @@ def test_npm_deletion_dry_run(self): self.assertEqual(11, len(objs)) def __prepare_maven_content(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) def __prepare_npm_content(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) diff --git a/tests/test_s3client.py b/tests/test_s3client.py index dde8ab49..1c78db2b 100644 --- a/tests/test_s3client.py +++ b/tests/test_s3client.py @@ -14,7 +14,6 @@ limitations under the License. """ from typing import List -from boto3_type_annotations import s3 from charon.storage import S3Client, CHECKSUM_META_KEY from charon.utils.archive import extract_zip_all from charon.utils.files import overwrite_file, read_sha1 @@ -27,6 +26,7 @@ import zipfile import shutil +from tests.constants import INPUTS MY_BUCKET = "my_bucket" MY_PREFIX = "mock_folder" @@ -149,11 +149,11 @@ def test_upload_and_delete_files(self): bucket = self.mock_s3.Bucket(MY_BUCKET) # test upload existed files with the product. The product will be added to metadata self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=root ) - def content_check(products: List[str], objs: List[s3.ObjectSummary]): + def content_check(products: List[str], objs: List): self.assertEqual(COMMONS_LANG3_ZIP_ENTRY, len(objs)) for o in objs: obj = o.Object() @@ -171,7 +171,7 @@ def content_check(products: List[str], objs: List[s3.ObjectSummary]): # test upload existed files with extra product. The extra product will be added to metadata self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="commons-lang3", root=root ) objects = list(bucket.objects.all()) @@ -179,14 +179,14 @@ def content_check(products: List[str], objs: List[s3.ObjectSummary]): # test delete files with one product. The file will not be deleted, but the product will # be removed from metadata. - self.s3_client.delete_files(all_files, target=(MY_BUCKET, None), product="apache-commons", + self.s3_client.delete_files(all_files, target=(MY_BUCKET, ''), product="apache-commons", root=root) objects = list(bucket.objects.all()) content_check(["commons-lang3"], objects) # test delete files with left product. The file will be deleted, because all products # have been removed from metadata. - self.s3_client.delete_files(all_files, target=(MY_BUCKET, None), product="commons-lang3", + self.s3_client.delete_files(all_files, target=(MY_BUCKET, ''), product="commons-lang3", root=root) self.assertEqual(0, len(list(bucket.objects.all()))) @@ -231,7 +231,7 @@ def test_upload_file_with_checksum(self): overwrite_file(file, content1) sha1_1 = read_sha1(file) self.s3_client.upload_files( - [file], targets=[(MY_BUCKET, None)], + [file], targets=[(MY_BUCKET, '')], product="foo-bar-1.0", root=temp_root ) objects = list(bucket.objects.all()) @@ -252,7 +252,7 @@ def test_upload_file_with_checksum(self): sha1_2 = read_sha1(file) self.assertNotEqual(sha1_1, sha1_2) self.s3_client.upload_files( - [file], targets=[(MY_BUCKET, None)], + [file], targets=[(MY_BUCKET, '')], product="foo-bar-1.0-2", root=temp_root ) objects = list(bucket.objects.all()) @@ -290,7 +290,7 @@ def test_upload_metadata_with_checksum(self): overwrite_file(file, content1) sha1_1 = read_sha1(file) self.s3_client.upload_metadatas( - [file], target=(MY_BUCKET, None), root=temp_root + [file], target=(MY_BUCKET, ''), root=temp_root ) objects = list(bucket.objects.all()) self.assertEqual(1, len(objects)) @@ -305,7 +305,7 @@ def test_upload_metadata_with_checksum(self): self.assertEqual(sha1_1, sha1_1_repeated) self.s3_client.upload_metadatas( [file], - target=(MY_BUCKET, None), + target=(MY_BUCKET, ''), root=temp_root, ) objects = list(bucket.objects.all()) @@ -335,7 +335,7 @@ def test_upload_metadata_with_checksum(self): sha1_2 = read_sha1(file) self.assertNotEqual(sha1_1, sha1_2) self.s3_client.upload_metadatas( - [file], target=(MY_BUCKET, None), root=temp_root + [file], target=(MY_BUCKET, ''), root=temp_root ) objects = list(bucket.objects.all()) self.assertEqual(1, len(objects)) @@ -361,7 +361,7 @@ def test_failed_paths(self): shutil.rmtree(root) failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=temp_root ) @@ -370,7 +370,7 @@ def test_failed_paths(self): def test_exists_override_failing(self): (temp_root, _, all_files) = self.__prepare_files() failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=temp_root ) self.assertEqual(0, len(failed_paths)) @@ -383,7 +383,7 @@ def test_exists_override_failing(self): sha1_changed = read_sha1(all_files[0]) self.assertNotEqual(sha1, sha1_changed) failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons-2", root=temp_root ) bucket = self.mock_s3.Bucket(MY_BUCKET) @@ -392,7 +392,7 @@ def test_exists_override_failing(self): def __prepare_files(self): test_zip = zipfile.ZipFile( - os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + os.path.join(INPUTS, "commons-lang3.zip") ) temp_root = os.path.join(self.tempdir, "tmp_zip") os.mkdir(temp_root) diff --git a/tests/test_util.py b/tests/test_util.py index 584920c2..35c9deff 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -17,10 +17,12 @@ import os import unittest +from tests.constants import INPUTS + class UtilTest(unittest.TestCase): def test_digest(self): - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + test_file = os.path.join(INPUTS, "commons-lang3.zip") self.assertEqual("bd4fe0a8111df64430b6b419a91e4218ddf44734", digest(test_file)) self.assertEqual( "61ff1d38cfeb281b05fcd6b9a2318ed47cd62c7f99b8a9d3e819591c03fe6804", @@ -28,7 +30,7 @@ def test_digest(self): ) def test_read_sha1(self): - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + test_file = os.path.join(INPUTS, "commons-lang3.zip") # read the real sha1 hash self.assertEqual("bd4fe0a8111df64430b6b419a91e4218ddf44734", digest(test_file)) # read hash from .sha1 file @@ -37,5 +39,5 @@ def test_read_sha1(self): ) # For .sha1 file itself, will use digest directly - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip.sha1") + test_file = os.path.join(INPUTS, "commons-lang3.zip.sha1") self.assertEqual(digest(test_file), read_sha1(test_file)) diff --git a/tests/utils/test_yaml.py b/tests/utils/test_yaml.py new file mode 100644 index 00000000..cb36cd14 --- /dev/null +++ b/tests/utils/test_yaml.py @@ -0,0 +1,202 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from __future__ import absolute_import + +import json +import os + +import jsonschema +import pkg_resources +import pytest +import yaml +from flexmock import flexmock + +from charon.utils.yaml import (read_yaml, + read_yaml_from_file_path, + load_schema, + validate_with_schema) + + +def test_read_yaml_file_ioerrors(tmpdir): + config_path = os.path.join(str(tmpdir), 'nosuchfile.yaml') + with pytest.raises(IOError): + read_yaml_from_file_path(config_path, 'schemas/nosuchfile.json') + + +@pytest.mark.parametrize('from_file', [True, False]) +@pytest.mark.parametrize('config', [ + ("""\ + targets: + ga: + - bucket: test_bucket + """), +]) +def test_read_yaml_file_or_yaml(tmpdir, from_file, config): + expected = yaml.safe_load(config) + + if from_file: + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w') as fp: + fp.write(config) + output = read_yaml_from_file_path(config_path, 'schemas/charon.json') + else: + output = read_yaml(config, 'schemas/charon.json') + + assert output == expected + + +def test_read_yaml_bad_package(caplog): + with pytest.raises(ImportError): + read_yaml("", 'schemas/charon.json', package='bad_package') + assert 'Unable to find package bad_package' in caplog.text + + +def test_read_yaml_file_bad_extract(tmpdir, caplog): + class FakeProvider(object): + def get_resource_stream(self, pkg, rsc): + raise IOError + + # pkg_resources.resource_stream() cannot be mocked directly + # Instead mock the module-level function it calls. + (flexmock(pkg_resources) + .should_receive('get_provider') + .and_return(FakeProvider())) + + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w'): + pass + + with pytest.raises(IOError): + read_yaml_from_file_path(config_path, 'schemas/charon.json') + assert "unable to extract JSON schema, cannot validate" in caplog.text + + +def test_read_yaml_file_bad_decode(tmpdir, caplog): + (flexmock(json) + .should_receive('load') + .and_raise(ValueError)) + + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w'): + pass + + with pytest.raises(ValueError): + read_yaml_from_file_path(config_path, 'schemas/charon.json') + assert "unable to decode JSON schema, cannot validate" in caplog.text + + +@pytest.mark.parametrize(('config', 'expected'), [ + ("""\ + ignore_patterns: + - test """, + "'targets' is a required property"), + ("""\ + tests: ga """, + "Additional properties are not allowed ('tests' was unexpected)"), +]) +def test_read_yaml_validation_error(config, expected, caplog): + with pytest.raises(jsonschema.ValidationError) as exc_info: + read_yaml(config, 'schemas/charon.json') + + assert "schema validation error" in caplog.text + assert expected in str(exc_info.value) + + +@pytest.mark.parametrize(('package', 'package_pass'), [ + ('charon', True), + ('FOO', False) +]) +def test_load_schema_package(package, package_pass, caplog): + schema = 'schemas/charon.json' + if not package_pass: + with pytest.raises(ImportError): + load_schema(package, schema) + assert "Unable to find package FOO" in caplog.text + else: + assert isinstance(load_schema(package, schema), dict) + + +@pytest.mark.parametrize(('schema', 'schema_pass'), [ + ('schemas/charon.json', True), + ('schemas/charon.json', False) +]) +def test_load_schema_schema(schema, schema_pass, caplog): + package = 'charon' + if not schema_pass: + (flexmock(json) + .should_receive('load') + .and_raise(ValueError)) + with pytest.raises(ValueError): + load_schema(package, schema) + assert "unable to decode JSON schema, cannot validate" in caplog.text + else: + assert isinstance(load_schema(package, schema), dict) + + +@pytest.mark.parametrize(('config', 'validation_pass', 'expected'), [ + ({ + 'name': 1 + }, False, + "1 is not of type 'string" + ), + ( + { + 'name': 'foo', + 'module': 'bar' + }, + False, + "'module' was unexpected", + ), ({ + 'name': 'foo' + }, True, '') +]) +def test_validate_with_schema_validation(config, validation_pass, expected, caplog): + schema = { + 'type': 'object', + 'required': ['name'], + 'properties': { + 'name': { + 'type': 'string' + } + }, + 'additionalProperties': False + } + if not validation_pass: + with pytest.raises(jsonschema.ValidationError) as exc_info: + validate_with_schema(config, schema) + assert 'schema validation error' in caplog.text + assert expected in str(exc_info.value) + else: + validate_with_schema(config, schema) + assert expected == '' + + +def test_validate_with_schema_bad_schema(caplog): + config = { + 'name': 'foo' + } + schema = { + 'type': 'bakagaki', # Nonexistent type + 'properties': { + 'name': { + 'type': 'string' + } + } + } + with pytest.raises(jsonschema.SchemaError): + validate_with_schema(config, schema) + assert 'invalid schema, cannot validate' in caplog.text diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..218f0a61 --- /dev/null +++ b/tox.ini @@ -0,0 +1,40 @@ +[tox] +envlist = test,flake8,pylint,bandit + +[testenv] +basepython=python3 +skip_install = true + +[testenv:test] +sitepackages = true +deps = -r requirements-dev.txt +commands = python3 -m pytest --cov=charon {posargs:"tests"} + +[testenv:pylint] +deps = pylint==2.9.6 +commands = python3 -m pylint charon tests + +[testenv:flake8] +deps = flake8 +commands = python3 -m flake8 charon tests + +[testenv:bandit] +deps = bandit +commands = bandit-baseline -r charon -ll -ii + +[testenv:mypy] +deps = mypy==0.910 +commands = + mypy \ + --install-types \ + --non-interactive \ + --ignore-missing-imports \ + --package {posargs:"charon"} + +[coverage:report] +skip_covered = true +sort = Cover + +[pytest] +addopts = -ra --color=auto --html=__pytest_reports/charon-unit-tests.html --self-contained-html +render_collapsed = True