diff --git a/charon/cmd/command.py b/charon/cmd/command.py index 36ca99ec..357785c2 100644 --- a/charon/cmd/command.py +++ b/charon/cmd/command.py @@ -148,6 +148,7 @@ def upload( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" prefix_ = conf.get_bucket_prefix(target) + manifest_bucket_name = conf.get_manifest_bucket() if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir = handle_npm_uploading( @@ -157,7 +158,9 @@ def upload( prefix=prefix_, aws_profile=aws_profile, dir_=work_dir, - dry_run=dryrun + dry_run=dryrun, + target=target, + manifest_bucket_name=manifest_bucket_name ) else: ignore_patterns_list = None @@ -175,7 +178,9 @@ def upload( aws_profile=aws_profile, prefix=prefix_, dir_=work_dir, - dry_run=dryrun + dry_run=dryrun, + target=target, + manifest_bucket_name=manifest_bucket_name ) except Exception: print(traceback.format_exc()) @@ -301,6 +306,7 @@ def delete( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" prefix_ = conf.get_bucket_prefix(target) + manifest_bucket_name = conf.get_manifest_bucket() if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir = handle_npm_del( @@ -310,7 +316,9 @@ def delete( prefix=prefix_, aws_profile=aws_profile, dir_=work_dir, - dry_run=dryrun + dry_run=dryrun, + target=target, + manifest_bucket_name=manifest_bucket_name ) else: ignore_patterns_list = None @@ -328,7 +336,9 @@ def delete( aws_profile=aws_profile, prefix=prefix_, dir_=work_dir, - dry_run=dryrun + dry_run=dryrun, + target=target, + manifest_bucket_name=manifest_bucket_name ) except Exception: print(traceback.format_exc()) diff --git a/charon/config.py b/charon/config.py index 0311967d..c4324250 100644 --- a/charon/config.py +++ b/charon/config.py @@ -38,6 +38,7 @@ def __init__(self, data: Dict): self.__targets: Dict = data.get("targets", None) if not self.__targets or not isinstance(self.__targets, Dict): raise TypeError("Charon configuration is not correct: targets is invalid.") + self.__manifest_bucket: str = data.get("manifest_bucket", None) def get_ignore_patterns(self) -> List[str]: return self.__ignore_patterns @@ -72,6 +73,9 @@ def get_bucket_prefix(self, target: str) -> str: prefix = remove_prefix(prefix, "/") return prefix + def get_manifest_bucket(self) -> str: + return self.__manifest_bucket + def get_config() -> CharonConfig: config_file = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE) diff --git a/charon/constants.py b/charon/constants.py index 2b560eb7..1a642465 100644 --- a/charon/constants.py +++ b/charon/constants.py @@ -175,5 +175,5 @@ ''' PROD_INFO_SUFFIX = ".prodinfo" - +MANIFEST_SUFFIX = ".txt" DEFAULT_ERRORS_LOG = "errors.log" diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index fcadc67c..4a49e8e7 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -15,7 +15,7 @@ """ from charon.utils.files import HashType import charon.pkgs.indexing as indexing -from charon.utils.files import overwrite_file, digest +from charon.utils.files import overwrite_file, digest, write_manifest from charon.utils.archive import extract_zip_all from charon.utils.strings import remove_prefix from charon.storage import S3Client @@ -261,7 +261,9 @@ def handle_maven_uploading( prefix=None, dir_=None, do_index=True, - dry_run=False + dry_run=False, + target=None, + manifest_bucket_name=None ) -> str: """ Handle the maven product release tarball uploading process. * repo is the location of the tarball in filesystem @@ -302,6 +304,7 @@ def handle_maven_uploading( # Question: should we exit here? prefix_ = remove_prefix(prefix, "/") + # 4. Do uploading logger.info("Start uploading files to s3") s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run) @@ -312,7 +315,18 @@ def handle_maven_uploading( ) logger.info("Files uploading done\n") - # 5. Use uploaded poms to scan s3 for metadata refreshment + # 5. Do manifest uploading + logger.info("Start uploading manifest to s3") + if not manifest_bucket_name: + logger.warning( + 'Warning: No manifest bucket is provided, will ignore the process of manifest ' + 'uploading') + else: + manifest_name, manifest_full_path = write_manifest(valid_mvn_paths, top_level, prod_key) + s3_client.upload_manifest(manifest_name, manifest_full_path, target, manifest_bucket_name) + logger.info("Manifest uploading is done\n") + + # 6. Use uploaded poms to scan s3 for metadata refreshment logger.info("Start generating maven-metadata.xml files for all artifacts") meta_files = _generate_metadatas( s3=s3_client, bucket=bucket, @@ -322,7 +336,7 @@ def handle_maven_uploading( logger.info("maven-metadata.xml files generation done\n") failed_metas = meta_files.get(META_FILE_FAILED, []) - # 6. Upload all maven-metadata.xml + # 7. Upload all maven-metadata.xml if META_FILE_GEN_KEY in meta_files: logger.info("Start updating maven-metadata.xml to s3") (_, _failed_metas) = s3_client.upload_metadatas( @@ -335,7 +349,7 @@ def handle_maven_uploading( failed_metas.extend(_failed_metas) logger.info("maven-metadata.xml updating done\n") - # 7. Determine refreshment of archetype-catalog.xml + # 8. Determine refreshment of archetype-catalog.xml if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): logger.info("Start generating archetype-catalog.xml") upload_archetype_file = _generate_upload_archetype_catalog( @@ -345,7 +359,7 @@ def handle_maven_uploading( ) logger.info("archetype-catalog.xml files generation done\n") - # 8. Upload archetype-catalog.xml if it has changed + # 9. Upload archetype-catalog.xml if it has changed if upload_archetype_file: archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)] archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME)) @@ -397,7 +411,9 @@ def handle_maven_del( prefix=None, dir_=None, do_index=True, - dry_run=False + dry_run=False, + target=None, + manifest_bucket_name=None ) -> str: """ Handle the maven product release tarball deletion process. * repo is the location of the tarball in filesystem @@ -425,7 +441,6 @@ def handle_maven_del( valid_dirs) = _scan_paths(tmp_root, ignore_patterns, root) # 3. Delete all valid_paths from s3 - logger.info("Start generating maven-metadata.xml files for all artifacts") logger.debug("Valid poms: %s", valid_poms) prefix_ = remove_prefix(prefix, "/") logger.info("Start deleting files from s3") @@ -440,7 +455,12 @@ def handle_maven_del( ) logger.info("Files deletion done\n") - # 4. Use changed GA to scan s3 for metadata refreshment + # 4. Delete related manifest from s3 + logger.info("Start deleting manifest from s3") + s3_client.delete_manifest(prod_key, target, manifest_bucket_name) + logger.info("Manifest deletion is done\n") + + # 5. Use changed GA to scan s3 for metadata refreshment logger.info("Start generating maven-metadata.xml files for all changed GAs") meta_files = _generate_metadatas( s3=s3_client, bucket=bucket, @@ -450,7 +470,7 @@ def handle_maven_del( logger.info("maven-metadata.xml files generation done\n") - # 5. Upload all maven-metadata.xml. We need to delete metadata files + # 6. Upload all maven-metadata.xml. We need to delete metadata files # firstly for all affected GA, and then replace the theirs content. logger.info("Start updating maven-metadata.xml to s3") all_meta_files = [] @@ -475,7 +495,7 @@ def handle_maven_del( failed_metas.extend(_failed_metas) logger.info("maven-metadata.xml updating done\n") - # 6. Determine refreshment of archetype-catalog.xml + # 7. Determine refreshment of archetype-catalog.xml if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): logger.info("Start generating archetype-catalog.xml") archetype_action = _generate_rollback_archetype_catalog( @@ -485,7 +505,7 @@ def handle_maven_del( ) logger.info("archetype-catalog.xml files generation done\n") - # 7. Upload or Delete archetype-catalog.xml if it has changed + # 8. Upload or Delete archetype-catalog.xml if it has changed archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)] archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME)) if archetype_action < 0: diff --git a/charon/pkgs/npm.py b/charon/pkgs/npm.py index 53a226d8..e49c31f9 100644 --- a/charon/pkgs/npm.py +++ b/charon/pkgs/npm.py @@ -29,6 +29,7 @@ from charon.utils.archive import extract_npm_tarball from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process from charon.utils.strings import remove_prefix +from charon.utils.files import write_manifest logger = logging.getLogger(__name__) @@ -62,11 +63,16 @@ def __init__(self, metadata, is_version): def handle_npm_uploading( - tarball_path: str, product: str, - bucket_name=None, prefix=None, + tarball_path: str, + product: str, + bucket_name=None, + prefix=None, aws_profile=None, - dir_=None, do_index=True, - dry_run=False + dir_=None, + do_index=True, + dry_run=False, + target=None, + manifest_bucket_name=None ) -> str: """ Handle the npm product release tarball uploading process. For NPM uploading, tgz file and version metadata will be relocated based @@ -90,6 +96,7 @@ def handle_npm_uploading( valid_dirs = __get_path_tree(valid_paths, target_dir) prefix_ = remove_prefix(prefix, "/") + logger.info("Start uploading files to s3") client = S3Client(aws_profile=aws_profile, dry_run=dry_run) bucket = bucket_name @@ -102,6 +109,16 @@ def handle_npm_uploading( ) logger.info("Files uploading done\n") + logger.info("Start uploading manifest to s3") + if not manifest_bucket_name: + logger.warning( + 'Warning: No manifest bucket is provided, will ignore the process of manifest ' + 'uploading') + else: + manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product) + client.upload_manifest(manifest_name, manifest_full_path, target, manifest_bucket_name) + logger.info("Manifest uploading is done\n") + logger.info("Start generating package.json for package: %s", package_metadata.name) meta_files = _gen_npm_package_metadata_for_upload( client, bucket, target_dir, package_metadata, prefix_ @@ -145,10 +162,16 @@ def handle_npm_uploading( def handle_npm_del( - tarball_path: str, product: str, - bucket_name=None, prefix=None, - aws_profile=None, dir_=None, - do_index=True, dry_run=False + tarball_path: str, + product: str, + bucket_name=None, + prefix=None, + aws_profile=None, + dir_=None, + do_index=True, + dry_run=False, + target=None, + manifest_bucket_name=None ) -> str: """ Handle the npm product release tarball deletion process. * tarball_path is the location of the tarball in filesystem @@ -177,6 +200,10 @@ def handle_npm_del( ) logger.info("Files deletion done\n") + logger.info("Start deleting manifest from s3") + client.delete_manifest(product, target, manifest_bucket_name) + logger.info("Manifest deletion is done\n") + logger.info("Start generating package.json for package: %s", package_name_path) meta_files = _gen_npm_package_metadata_for_del( client, bucket, target_dir, package_name_path, prefix_ @@ -251,8 +278,10 @@ def _gen_npm_package_metadata_for_upload( package_metadata_key = os.path.join(source_package.name, PACKAGE_JSON) if prefix and prefix != "/": package_metadata_key = os.path.join(prefix, package_metadata_key) - (package_json_files, success) = client.get_files(bucket_name=bucket, - prefix=package_metadata_key) + (package_json_files, success) = client.get_files( + bucket_name=bucket, + prefix=package_metadata_key + ) if not success: logger.warning("Error to get remote metadata files for %s", package_metadata_key) result = source_package @@ -319,8 +348,8 @@ def _gen_npm_package_metadata_for_del( return meta_files -def _scan_metadata_paths_from_archive(path: str, prod="", dir__=None) -> Tuple[ - str, list, NPMPackageMetadata]: +def _scan_metadata_paths_from_archive(path: str, prod="", dir__=None) -> Tuple[str, list, + NPMPackageMetadata]: tmp_root = mkdtemp(prefix=f"npm-charon-{prod}-", dir=dir__) try: _, valid_paths = extract_npm_tarball(path, tmp_root, True) diff --git a/charon/storage.py b/charon/storage.py index c8cabb72..b387a675 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -14,9 +14,11 @@ limitations under the License. """ import asyncio + +from boto3.exceptions import S3UploadFailedError from boto3_type_annotations.s3.service_resource import Object from charon.utils.files import read_sha1 -from charon.constants import PROD_INFO_SUFFIX +from charon.constants import PROD_INFO_SUFFIX, MANIFEST_SUFFIX from boto3 import session from botocore.errorfactory import ClientError @@ -301,6 +303,25 @@ async def path_upload_handler( file_paths=meta_file_paths, path_handler=path_upload_handler, root=root )) + def upload_manifest( + self, manifest_name: str, manifest_full_path: str, target: str, + manifest_bucket_name: str + ): + target = target if target else "default" + env_folder = "-".join([target, "charon-metadata"]) + path_key = os.path.join(env_folder, manifest_name) + manifest_bucket = self.__get_bucket(manifest_bucket_name) + try: + file_object: s3.Object = manifest_bucket.Object(path_key) + file_object.upload_file( + Filename=manifest_full_path, + ExtraArgs={'ContentType': DEFAULT_MIME_TYPE} + ) + except S3UploadFailedError: + logger.warning( + 'Warning: Manifest bucket %s does not exist in S3, will ignore uploading of ' + 'manifest file %s', manifest_bucket_name, manifest_name) + def delete_files( self, file_paths: List[str], bucket_name: str, product: Optional[str], root="/", key_prefix: str = None @@ -398,6 +419,26 @@ async def path_delete_handler( return (deleted_files, failed_files) + def delete_manifest(self, product_key: str, target: str, manifest_bucket_name: str): + if not manifest_bucket_name: + logger.warning( + 'Warning: No manifest bucket is provided, will ignore the process of manifest ' + 'deleting') + return + manifest_name = product_key + MANIFEST_SUFFIX + target = target if target else "default" + env_folder = "-".join([target, "charon-metadata"]) + path_key = os.path.join(env_folder, manifest_name) + + manifest_bucket = self.__get_bucket(manifest_bucket_name) + file_object: s3.Object = manifest_bucket.Object(path_key) + if self.__file_exists(file_object): + manifest_bucket.delete_objects(Delete={"Objects": [{"Key": path_key}]}) + else: + logger.warning( + 'Warning: Manifest %s does not exist in S3 bucket %s, will ignore its deleting', + manifest_name, manifest_bucket_name) + def get_files(self, bucket_name: str, prefix=None, suffix=None) -> Tuple[List[str], bool]: """Get the file names from s3 bucket. Can use prefix and suffix to filter the files wanted. If some error happend, will return an empty file list and false result diff --git a/charon/utils/files.py b/charon/utils/files.py index d2889b79..ffe08bef 100644 --- a/charon/utils/files.py +++ b/charon/utils/files.py @@ -17,6 +17,8 @@ import os import hashlib import errno +from typing import List, Tuple +from charon.constants import MANIFEST_SUFFIX class HashType(Enum): @@ -77,3 +79,22 @@ def digest(file: str, hash_type=HashType.SHA1) -> str: hash_obj.update(data) return hash_obj.hexdigest() + + +def write_manifest(paths: List[str], root: str, product_key: str) -> Tuple[str, str]: + manifest_name = product_key + MANIFEST_SUFFIX + manifest_path = os.path.join(root, manifest_name) + artifacts = [] + for path in paths: + if path.startswith(root): + path = path[len(root):] + if path.startswith("/"): + path = path[1:] + artifacts.append(path) + + if not os.path.isfile(manifest_path): + with open(manifest_path, mode="a", encoding="utf-8"): + pass + with open(manifest_path, mode="w", encoding="utf-8") as f: + f.write('\n'.join(artifacts)) + return manifest_name, manifest_path diff --git a/tests/base.py b/tests/base.py index 2249d4e6..d2dfe94c 100644 --- a/tests/base.py +++ b/tests/base.py @@ -24,7 +24,7 @@ from charon.constants import PROD_INFO_SUFFIX from charon.pkgs.pkg_utils import is_metadata from charon.storage import PRODUCT_META_KEY, CHECKSUM_META_KEY -from tests.commons import TEST_BUCKET +from tests.commons import TEST_BUCKET, TEST_MANIFEST_BUCKET from boto3_type_annotations import s3 from moto import mock_s3 @@ -90,13 +90,16 @@ def setUp(self): # mock_s3 is used to generate expected content self.mock_s3 = self.__prepare_s3() self.mock_s3.create_bucket(Bucket=TEST_BUCKET) + self.mock_s3.create_bucket(Bucket=TEST_MANIFEST_BUCKET) self.test_bucket = self.mock_s3.Bucket(TEST_BUCKET) + self.test_manifest_bucket = self.mock_s3.Bucket(TEST_MANIFEST_BUCKET) def tearDown(self): - bucket = self.mock_s3.Bucket(TEST_BUCKET) + buckets = [self.mock_s3.Bucket(TEST_BUCKET), self.mock_s3.Bucket(TEST_MANIFEST_BUCKET)] try: - bucket.objects.all().delete() - bucket.delete() + for bucket in buckets: + bucket.objects.all().delete() + bucket.delete() except ValueError: pass super().tearDown() diff --git a/tests/commons.py b/tests/commons.py index be478e5b..9ae84d1b 100644 --- a/tests/commons.py +++ b/tests/commons.py @@ -92,7 +92,6 @@ # For npm -TEST_NPM_BUCKET = "npm_bucket" CODE_FRAME_7_14_5_FILES = [ "@babel/code-frame/7.14.5/package.json", "@babel/code-frame/-/code-frame-7.14.5.tgz", @@ -114,3 +113,10 @@ CODE_FRAME_7_14_5_INDEX = "@babel/code-frame/7.14.5/index.html" CODE_FRAME_INDEX = "@babel/code-frame/index.html" COMMONS_ROOT_INDEX = "index.html" + + +# For manifest +TEST_MANIFEST_BUCKET = "test_manifest_bucket" +TEST_TARGET = "stage" +COMMONS_CLIENT_456_MANIFEST = "stage-charon-metadata/commons-client-4.5.6.txt" +CODE_FRAME_7_14_5_MANIFEST = "stage-charon-metadata/code-frame-7.14.5.txt" diff --git a/tests/test_manifest_del.py b/tests/test_manifest_del.py new file mode 100644 index 00000000..9806ff13 --- /dev/null +++ b/tests/test_manifest_del.py @@ -0,0 +1,98 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import os + +from moto import mock_s3 + +from charon.pkgs.maven import handle_maven_uploading, handle_maven_del +from charon.pkgs.npm import handle_npm_uploading, handle_npm_del +from tests.base import PackageBaseTest +from tests.commons import ( + TEST_BUCKET, TEST_MANIFEST_BUCKET, TEST_TARGET, COMMONS_CLIENT_456_MANIFEST, + CODE_FRAME_7_14_5_MANIFEST +) + + +@mock_s3 +class ManifestDeleteTest(PackageBaseTest): + + def test_maven_manifest_delete(self): + self.__prepare_maven_content() + + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(1, len(manifests)) + self.assertIn(COMMONS_CLIENT_456_MANIFEST, manifests) + + test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + product = "commons-client-4.5.6" + handle_maven_del( + test_zip, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(0, len(manifests)) + + def test_npm_manifest_delete(self): + self.__prepare_npm_content() + + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(1, len(manifests)) + self.assertIn(CODE_FRAME_7_14_5_MANIFEST, manifests) + + test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + product = "code-frame-7.14.5" + handle_npm_del( + test_tgz, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(0, len(manifests)) + + def __prepare_maven_content(self): + test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + product = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) + + def __prepare_npm_content(self): + test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + product = "code-frame-7.14.5" + handle_npm_uploading( + test_tgz, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) diff --git a/tests/test_manifest_upload.py b/tests/test_manifest_upload.py new file mode 100644 index 00000000..a507ab2d --- /dev/null +++ b/tests/test_manifest_upload.py @@ -0,0 +1,91 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import os + +from moto import mock_s3 + +from charon.pkgs.maven import handle_maven_uploading +from charon.pkgs.npm import handle_npm_uploading +from tests.base import PackageBaseTest +from tests.commons import ( + TEST_BUCKET, TEST_MANIFEST_BUCKET, TEST_TARGET, COMMONS_CLIENT_456_MVN_NUM, + COMMONS_CLIENT_META_NUM, COMMONS_CLIENT_456_MANIFEST, COMMONS_CLIENT_456_FILES, + COMMONS_LOGGING_FILES, CODE_FRAME_7_14_5_MANIFEST, CODE_FRAME_7_14_5_FILES +) + + +@mock_s3 +class ManifestUploadTest(PackageBaseTest): + + def test_maven_manifest_upload(self): + test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + product = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) + + uploaded_contents = list(self.test_bucket.objects.all()) + actual_files = [obj.key for obj in uploaded_contents] + self.assertEqual( + COMMONS_CLIENT_456_MVN_NUM * 2 + COMMONS_CLIENT_META_NUM, + len(actual_files) + ) + + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(1, len(manifests)) + self.assertIn(COMMONS_CLIENT_456_MANIFEST, manifests) + + manifest_obj = self.test_manifest_bucket.Object(COMMONS_CLIENT_456_MANIFEST) + manifest_content = str(manifest_obj.get()["Body"].read(), "utf-8") + for f in COMMONS_CLIENT_456_FILES: + self.assertIn(f, manifest_content) + for f in COMMONS_LOGGING_FILES: + self.assertIn(f, manifest_content) + + def test_npm_manifest_upload(self): + test_zip = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + product = "code-frame-7.14.5" + handle_npm_uploading( + test_zip, product, + bucket_name=TEST_BUCKET, + dir_=self.tempdir, + do_index=False, + target=TEST_TARGET, + manifest_bucket_name=TEST_MANIFEST_BUCKET + ) + + uploaded_contents = list(self.test_bucket.objects.all()) + actual_files = [obj.key for obj in uploaded_contents] + self.assertEqual( + len(CODE_FRAME_7_14_5_FILES) * 2 + 1, + len(actual_files) + ) + + uploaded_manifest = list(self.test_manifest_bucket.objects.all()) + manifests = [obj.key for obj in uploaded_manifest] + self.assertEqual(1, len(manifests)) + self.assertIn(CODE_FRAME_7_14_5_MANIFEST, manifests) + + manifest_obj = self.test_manifest_bucket.Object(CODE_FRAME_7_14_5_MANIFEST) + manifest_content = str(manifest_obj.get()["Body"].read(), "utf-8") + for f in CODE_FRAME_7_14_5_FILES: + self.assertIn(f, manifest_content)