Skip to content

Commit c32fead

Browse files
committed
Feature of adding product manifest generating and removing
1 parent be07efe commit c32fead

File tree

11 files changed

+351
-33
lines changed

11 files changed

+351
-33
lines changed

charon/cmd/command.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,7 @@ def upload(
148148
npm_archive_type = detect_npm_archive(archive_path)
149149
product_key = f"{product}-{version}"
150150
prefix_ = conf.get_bucket_prefix(target)
151+
manifest_bucket_name = conf.get_manifest_bucket()
151152
if npm_archive_type != NpmArchiveType.NOT_NPM:
152153
logger.info("This is a npm archive")
153154
tmp_dir = handle_npm_uploading(
@@ -157,7 +158,9 @@ def upload(
157158
prefix=prefix_,
158159
aws_profile=aws_profile,
159160
dir_=work_dir,
160-
dry_run=dryrun
161+
dry_run=dryrun,
162+
target=target,
163+
manifest_bucket_name=manifest_bucket_name
161164
)
162165
else:
163166
ignore_patterns_list = None
@@ -175,7 +178,9 @@ def upload(
175178
aws_profile=aws_profile,
176179
prefix=prefix_,
177180
dir_=work_dir,
178-
dry_run=dryrun
181+
dry_run=dryrun,
182+
target=target,
183+
manifest_bucket_name=manifest_bucket_name
179184
)
180185
except Exception:
181186
print(traceback.format_exc())
@@ -301,6 +306,7 @@ def delete(
301306
npm_archive_type = detect_npm_archive(archive_path)
302307
product_key = f"{product}-{version}"
303308
prefix_ = conf.get_bucket_prefix(target)
309+
manifest_bucket_name = conf.get_manifest_bucket()
304310
if npm_archive_type != NpmArchiveType.NOT_NPM:
305311
logger.info("This is a npm archive")
306312
tmp_dir = handle_npm_del(
@@ -310,7 +316,9 @@ def delete(
310316
prefix=prefix_,
311317
aws_profile=aws_profile,
312318
dir_=work_dir,
313-
dry_run=dryrun
319+
dry_run=dryrun,
320+
target=target,
321+
manifest_bucket_name=manifest_bucket_name
314322
)
315323
else:
316324
ignore_patterns_list = None
@@ -328,7 +336,9 @@ def delete(
328336
aws_profile=aws_profile,
329337
prefix=prefix_,
330338
dir_=work_dir,
331-
dry_run=dryrun
339+
dry_run=dryrun,
340+
target=target,
341+
manifest_bucket_name=manifest_bucket_name
332342
)
333343
except Exception:
334344
print(traceback.format_exc())

charon/config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ def __init__(self, data: Dict):
3838
self.__targets: Dict = data.get("targets", None)
3939
if not self.__targets or not isinstance(self.__targets, Dict):
4040
raise TypeError("Charon configuration is not correct: targets is invalid.")
41+
self.__manifest_bucket: str = data.get("manifest_bucket", None)
4142

4243
def get_ignore_patterns(self) -> List[str]:
4344
return self.__ignore_patterns
@@ -72,6 +73,9 @@ def get_bucket_prefix(self, target: str) -> str:
7273
prefix = remove_prefix(prefix, "/")
7374
return prefix
7475

76+
def get_manifest_bucket(self) -> str:
77+
return self.__manifest_bucket
78+
7579

7680
def get_config() -> CharonConfig:
7781
config_file = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE)

charon/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,5 +175,5 @@
175175
'''
176176

177177
PROD_INFO_SUFFIX = ".prodinfo"
178-
178+
MANIFEST_SUFFIX = ".txt"
179179
DEFAULT_ERRORS_LOG = "errors.log"

charon/pkgs/maven.py

Lines changed: 27 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"""
1616
from charon.utils.files import HashType
1717
import charon.pkgs.indexing as indexing
18-
from charon.utils.files import overwrite_file, digest
18+
from charon.utils.files import overwrite_file, digest, write_manifest
1919
from charon.utils.archive import extract_zip_all
2020
from charon.utils.strings import remove_prefix
2121
from charon.storage import S3Client
@@ -261,7 +261,9 @@ def handle_maven_uploading(
261261
prefix=None,
262262
dir_=None,
263263
do_index=True,
264-
dry_run=False
264+
dry_run=False,
265+
target=None,
266+
manifest_bucket_name=None
265267
) -> str:
266268
""" Handle the maven product release tarball uploading process.
267269
* repo is the location of the tarball in filesystem
@@ -302,6 +304,8 @@ def handle_maven_uploading(
302304
# Question: should we exit here?
303305

304306
prefix_ = remove_prefix(prefix, "/")
307+
308+
manifest_name, manifest_full_path = write_manifest(valid_mvn_paths, top_level, prod_key)
305309
# 4. Do uploading
306310
logger.info("Start uploading files to s3")
307311
s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run)
@@ -312,7 +316,12 @@ def handle_maven_uploading(
312316
)
313317
logger.info("Files uploading done\n")
314318

315-
# 5. Use uploaded poms to scan s3 for metadata refreshment
319+
# 5. Do manifest uploading
320+
logger.info("Start uploading manifest to s3")
321+
s3_client.upload_manifest(manifest_name, manifest_full_path, target, manifest_bucket_name)
322+
logger.info("Manifest uploading is done\n")
323+
324+
# 6. Use uploaded poms to scan s3 for metadata refreshment
316325
logger.info("Start generating maven-metadata.xml files for all artifacts")
317326
meta_files = _generate_metadatas(
318327
s3=s3_client, bucket=bucket,
@@ -322,7 +331,7 @@ def handle_maven_uploading(
322331
logger.info("maven-metadata.xml files generation done\n")
323332

324333
failed_metas = meta_files.get(META_FILE_FAILED, [])
325-
# 6. Upload all maven-metadata.xml
334+
# 7. Upload all maven-metadata.xml
326335
if META_FILE_GEN_KEY in meta_files:
327336
logger.info("Start updating maven-metadata.xml to s3")
328337
(_, _failed_metas) = s3_client.upload_metadatas(
@@ -335,7 +344,7 @@ def handle_maven_uploading(
335344
failed_metas.extend(_failed_metas)
336345
logger.info("maven-metadata.xml updating done\n")
337346

338-
# 7. Determine refreshment of archetype-catalog.xml
347+
# 8. Determine refreshment of archetype-catalog.xml
339348
if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")):
340349
logger.info("Start generating archetype-catalog.xml")
341350
upload_archetype_file = _generate_upload_archetype_catalog(
@@ -345,7 +354,7 @@ def handle_maven_uploading(
345354
)
346355
logger.info("archetype-catalog.xml files generation done\n")
347356

348-
# 8. Upload archetype-catalog.xml if it has changed
357+
# 9. Upload archetype-catalog.xml if it has changed
349358
if upload_archetype_file:
350359
archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)]
351360
archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME))
@@ -397,7 +406,9 @@ def handle_maven_del(
397406
prefix=None,
398407
dir_=None,
399408
do_index=True,
400-
dry_run=False
409+
dry_run=False,
410+
target=None,
411+
manifest_bucket_name=None
401412
) -> str:
402413
""" Handle the maven product release tarball deletion process.
403414
* repo is the location of the tarball in filesystem
@@ -425,7 +436,6 @@ def handle_maven_del(
425436
valid_dirs) = _scan_paths(tmp_root, ignore_patterns, root)
426437

427438
# 3. Delete all valid_paths from s3
428-
logger.info("Start generating maven-metadata.xml files for all artifacts")
429439
logger.debug("Valid poms: %s", valid_poms)
430440
prefix_ = remove_prefix(prefix, "/")
431441
logger.info("Start deleting files from s3")
@@ -440,7 +450,12 @@ def handle_maven_del(
440450
)
441451
logger.info("Files deletion done\n")
442452

443-
# 4. Use changed GA to scan s3 for metadata refreshment
453+
# 4. Delete related manifest from s3
454+
logger.info("Start deleting manifest from s3")
455+
s3_client.delete_manifest(prod_key, target, manifest_bucket_name)
456+
logger.info("Manifest deletion is done\n")
457+
458+
# 5. Use changed GA to scan s3 for metadata refreshment
444459
logger.info("Start generating maven-metadata.xml files for all changed GAs")
445460
meta_files = _generate_metadatas(
446461
s3=s3_client, bucket=bucket,
@@ -450,7 +465,7 @@ def handle_maven_del(
450465

451466
logger.info("maven-metadata.xml files generation done\n")
452467

453-
# 5. Upload all maven-metadata.xml. We need to delete metadata files
468+
# 6. Upload all maven-metadata.xml. We need to delete metadata files
454469
# firstly for all affected GA, and then replace the theirs content.
455470
logger.info("Start updating maven-metadata.xml to s3")
456471
all_meta_files = []
@@ -475,7 +490,7 @@ def handle_maven_del(
475490
failed_metas.extend(_failed_metas)
476491
logger.info("maven-metadata.xml updating done\n")
477492

478-
# 6. Determine refreshment of archetype-catalog.xml
493+
# 7. Determine refreshment of archetype-catalog.xml
479494
if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")):
480495
logger.info("Start generating archetype-catalog.xml")
481496
archetype_action = _generate_rollback_archetype_catalog(
@@ -485,7 +500,7 @@ def handle_maven_del(
485500
)
486501
logger.info("archetype-catalog.xml files generation done\n")
487502

488-
# 7. Upload or Delete archetype-catalog.xml if it has changed
503+
# 8. Upload or Delete archetype-catalog.xml if it has changed
489504
archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)]
490505
archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME))
491506
if archetype_action < 0:

charon/pkgs/npm.py

Lines changed: 33 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from charon.utils.archive import extract_npm_tarball
3030
from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process
3131
from charon.utils.strings import remove_prefix
32+
from charon.utils.files import write_manifest
3233

3334
logger = logging.getLogger(__name__)
3435

@@ -62,11 +63,16 @@ def __init__(self, metadata, is_version):
6263

6364

6465
def handle_npm_uploading(
65-
tarball_path: str, product: str,
66-
bucket_name=None, prefix=None,
66+
tarball_path: str,
67+
product: str,
68+
bucket_name=None,
69+
prefix=None,
6770
aws_profile=None,
68-
dir_=None, do_index=True,
69-
dry_run=False
71+
dir_=None,
72+
do_index=True,
73+
dry_run=False,
74+
target=None,
75+
manifest_bucket_name=None
7076
) -> str:
7177
""" Handle the npm product release tarball uploading process.
7278
For NPM uploading, tgz file and version metadata will be relocated based
@@ -90,6 +96,9 @@ def handle_npm_uploading(
9096
valid_dirs = __get_path_tree(valid_paths, target_dir)
9197

9298
prefix_ = remove_prefix(prefix, "/")
99+
100+
manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product)
101+
93102
logger.info("Start uploading files to s3")
94103
client = S3Client(aws_profile=aws_profile, dry_run=dry_run)
95104
bucket = bucket_name
@@ -102,6 +111,10 @@ def handle_npm_uploading(
102111
)
103112
logger.info("Files uploading done\n")
104113

114+
logger.info("Start uploading manifest to s3")
115+
client.upload_manifest(manifest_name, manifest_full_path, target, manifest_bucket_name)
116+
logger.info("Manifest uploading is done\n")
117+
105118
logger.info("Start generating package.json for package: %s", package_metadata.name)
106119
meta_files = _gen_npm_package_metadata_for_upload(
107120
client, bucket, target_dir, package_metadata, prefix_
@@ -145,10 +158,16 @@ def handle_npm_uploading(
145158

146159

147160
def handle_npm_del(
148-
tarball_path: str, product: str,
149-
bucket_name=None, prefix=None,
150-
aws_profile=None, dir_=None,
151-
do_index=True, dry_run=False
161+
tarball_path: str,
162+
product: str,
163+
bucket_name=None,
164+
prefix=None,
165+
aws_profile=None,
166+
dir_=None,
167+
do_index=True,
168+
dry_run=False,
169+
target=None,
170+
manifest_bucket_name=None
152171
) -> str:
153172
""" Handle the npm product release tarball deletion process.
154173
* tarball_path is the location of the tarball in filesystem
@@ -177,6 +196,10 @@ def handle_npm_del(
177196
)
178197
logger.info("Files deletion done\n")
179198

199+
logger.info("Start deleting manifest from s3")
200+
client.delete_manifest(product, target, manifest_bucket_name)
201+
logger.info("Manifest deletion is done\n")
202+
180203
logger.info("Start generating package.json for package: %s", package_name_path)
181204
meta_files = _gen_npm_package_metadata_for_del(
182205
client, bucket, target_dir, package_name_path, prefix_
@@ -252,7 +275,7 @@ def _gen_npm_package_metadata_for_upload(
252275
if prefix and prefix != "/":
253276
package_metadata_key = os.path.join(prefix, package_metadata_key)
254277
(package_json_files, success) = client.get_files(bucket_name=bucket,
255-
prefix=package_metadata_key)
278+
prefix=package_metadata_key)
256279
if not success:
257280
logger.warning("Error to get remote metadata files for %s", package_metadata_key)
258281
result = source_package
@@ -320,7 +343,7 @@ def _gen_npm_package_metadata_for_del(
320343

321344

322345
def _scan_metadata_paths_from_archive(path: str, prod="", dir__=None) -> Tuple[
323-
str, list, NPMPackageMetadata]:
346+
str, list, NPMPackageMetadata]:
324347
tmp_root = mkdtemp(prefix=f"npm-charon-{prod}-", dir=dir__)
325348
try:
326349
_, valid_paths = extract_npm_tarball(path, tmp_root, True)

charon/storage.py

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,11 @@
1414
limitations under the License.
1515
"""
1616
import asyncio
17+
18+
from boto3.exceptions import S3UploadFailedError
1719
from boto3_type_annotations.s3.service_resource import Object
1820
from charon.utils.files import read_sha1
19-
from charon.constants import PROD_INFO_SUFFIX
21+
from charon.constants import PROD_INFO_SUFFIX, MANIFEST_SUFFIX
2022

2123
from boto3 import session
2224
from botocore.errorfactory import ClientError
@@ -301,6 +303,31 @@ async def path_upload_handler(
301303
file_paths=meta_file_paths, path_handler=path_upload_handler, root=root
302304
))
303305

306+
def upload_manifest(
307+
self, manifest_name: str, manifest_full_path: str, target: str,
308+
manifest_bucket_name: str
309+
):
310+
target = target if target else "default"
311+
env_folder = "-".join([target, "charon-metadata"])
312+
path_key = os.path.join(env_folder, manifest_name)
313+
if not manifest_bucket_name:
314+
logger.warning(
315+
'Warning: No manifest bucket provided, will ignore the process of manifest '
316+
'uploading')
317+
return
318+
319+
manifest_bucket = self.__client.Bucket(manifest_bucket_name)
320+
try:
321+
file_object: s3.Object = manifest_bucket.Object(path_key)
322+
file_object.upload_file(
323+
Filename=manifest_full_path,
324+
ExtraArgs={'ContentType': DEFAULT_MIME_TYPE}
325+
)
326+
except S3UploadFailedError:
327+
logger.warning(
328+
'Warning: Manifest bucket %s does not exist in S3, will ignore uploading of '
329+
'manifest file %s', manifest_bucket_name, manifest_name)
330+
304331
def delete_files(
305332
self, file_paths: List[str], bucket_name: str,
306333
product: Optional[str], root="/", key_prefix: str = None
@@ -398,6 +425,26 @@ async def path_delete_handler(
398425

399426
return (deleted_files, failed_files)
400427

428+
def delete_manifest(self, product_key: str, target: str, manifest_bucket_name: str):
429+
manifest_name = product_key + MANIFEST_SUFFIX
430+
target = target if target else "default"
431+
env_folder = "-".join([target, "charon-metadata"])
432+
path_key = os.path.join(env_folder, manifest_name)
433+
if not manifest_bucket_name:
434+
logger.warning(
435+
'Warning: No manifest bucket provided, will ignore the process of manifest '
436+
'deleting')
437+
return
438+
439+
manifest_bucket = self.__client.Bucket(manifest_bucket_name)
440+
file_object: s3.Object = manifest_bucket.Object(path_key)
441+
if self.__file_exists(file_object):
442+
manifest_bucket.delete_objects(Delete={"Objects": [{"Key": path_key}]})
443+
else:
444+
logger.warning(
445+
'Warning: Manifest %s does not exist in S3 bucket %s, will ignore its deleting',
446+
manifest_name, manifest_bucket_name)
447+
401448
def get_files(self, bucket_name: str, prefix=None, suffix=None) -> Tuple[List[str], bool]:
402449
"""Get the file names from s3 bucket. Can use prefix and suffix to filter the
403450
files wanted. If some error happend, will return an empty file list and false result

0 commit comments

Comments
 (0)