3030from charon .pkgs .pkg_utils import upload_post_process , rollback_post_process
3131from charon .utils .strings import remove_prefix
3232from charon .utils .files import write_manifest
33+ from charon .utils .map import del_none
3334
3435logger = logging .getLogger (__name__ )
3536
@@ -65,7 +66,7 @@ def __init__(self, metadata, is_version):
6566def handle_npm_uploading (
6667 tarball_path : str ,
6768 product : str ,
68- targets : List [Tuple [str , str , str ]] = None ,
69+ targets : List [Tuple [str , str , str , str ]] = None ,
6970 aws_profile = None ,
7071 dir_ = None ,
7172 do_index = True ,
@@ -86,48 +87,47 @@ def handle_npm_uploading(
8687
8788 Returns the directory used for archive processing and if uploading is successful
8889 """
89- target_dir , valid_paths , package_metadata = _scan_metadata_paths_from_archive (
90- tarball_path , prod = product , dir__ = dir_
91- )
92- if not os .path .isdir (target_dir ):
93- logger .error ("Error: the extracted target_dir path %s does not exist." , target_dir )
94- sys .exit (1 )
90+ for target in targets :
91+ bucket_ = target [1 ]
92+ prefix__ = remove_prefix (target [2 ], "/" )
93+ registry__ = target [3 ]
94+ target_dir , valid_paths , package_metadata = _scan_metadata_paths_from_archive (
95+ tarball_path , registry__ , prod = product , dir__ = dir_
96+ )
97+ if not os .path .isdir (target_dir ):
98+ logger .error ("Error: the extracted target_dir path %s does not exist." , target_dir )
99+ sys .exit (1 )
100+ valid_dirs = __get_path_tree (valid_paths , target_dir )
101+
102+ # main_target = targets[0]
103+ client = S3Client (aws_profile = aws_profile , dry_run = dry_run )
104+ logger .info ("Start uploading files to s3 buckets: %s" , bucket_ )
105+ failed_files = client .upload_files (
106+ file_paths = valid_paths ,
107+ targets = [(bucket_ , prefix__ )],
108+ product = product ,
109+ root = target_dir
110+ )
95111
96- valid_dirs = __get_path_tree ( valid_paths , target_dir )
112+ logger . info ( "Files uploading done \n " )
97113
98- # main_target = targets[0]
99- client = S3Client (aws_profile = aws_profile , dry_run = dry_run )
100- targets_ = [(target [1 ], remove_prefix (target [2 ], "/" )) for target in targets ]
101- logger .info (
102- "Start uploading files to s3 buckets: %s" ,
103- [target [1 ] for target in targets ]
104- )
105- failed_files = client .upload_files (
106- file_paths = valid_paths ,
107- targets = targets_ ,
108- product = product ,
109- root = target_dir
110- )
111- logger .info ("Files uploading done\n " )
114+ succeeded = True
112115
113- succeeded = True
114- for target in targets :
115116 if not manifest_bucket_name :
116117 logger .warning (
117118 'Warning: No manifest bucket is provided, will ignore the process of manifest '
118119 'uploading\n ' )
119120 else :
120121 logger .info ("Start uploading manifest to s3 bucket %s" , manifest_bucket_name )
121- manifest_folder = target [ 1 ]
122+ manifest_folder = bucket_
122123 manifest_name , manifest_full_path = write_manifest (valid_paths , target_dir , product )
124+
123125 client .upload_manifest (
124126 manifest_name , manifest_full_path ,
125127 manifest_folder , manifest_bucket_name
126128 )
127129 logger .info ("Manifest uploading is done\n " )
128130
129- bucket_ = target [1 ]
130- prefix__ = remove_prefix (target [2 ], "/" )
131131 logger .info (
132132 "Start generating package.json for package: %s in s3 bucket %s" ,
133133 package_metadata .name , bucket_
@@ -178,7 +178,7 @@ def handle_npm_uploading(
178178def handle_npm_del (
179179 tarball_path : str ,
180180 product : str ,
181- targets : List [Tuple [str , str , str ]] = None ,
181+ targets : List [Tuple [str , str , str , str ]] = None ,
182182 aws_profile = None ,
183183 dir_ = None ,
184184 do_index = True ,
@@ -381,11 +381,11 @@ def _gen_npm_package_metadata_for_del(
381381 return meta_files
382382
383383
384- def _scan_metadata_paths_from_archive (path : str , prod = "" , dir__ = None ) -> Tuple [ str , list ,
385- NPMPackageMetadata ]:
384+ def _scan_metadata_paths_from_archive (path : str , registry : str , prod = "" , dir__ = None ) -> \
385+ Tuple [ str , list , NPMPackageMetadata ]:
386386 tmp_root = mkdtemp (prefix = f"npm-charon-{ prod } -" , dir = dir__ )
387387 try :
388- _ , valid_paths = extract_npm_tarball (path , tmp_root , True )
388+ _ , valid_paths = extract_npm_tarball (path , tmp_root , True , registry )
389389 if len (valid_paths ) > 1 :
390390 version = _scan_for_version (valid_paths [1 ])
391391 package = NPMPackageMetadata (version , True )
@@ -502,23 +502,14 @@ def _write_package_metadata_to_file(package_metadata: NPMPackageMetadata, root='
502502 final_package_metadata_path = os .path .join (root , package_metadata .name , PACKAGE_JSON )
503503 try :
504504 with open (final_package_metadata_path , mode = 'w' , encoding = 'utf-8' ) as f :
505- dump (_del_none (package_metadata .__dict__ .copy ()), f )
505+ dump (del_none (package_metadata .__dict__ .copy ()), f )
506506 return final_package_metadata_path
507507 except FileNotFoundError :
508508 logger .error (
509509 'Can not create file %s because of some missing folders' , final_package_metadata_path
510510 )
511511
512512
513- def _del_none (d ):
514- for key , value in list (d .items ()):
515- if value is None :
516- del d [key ]
517- elif isinstance (value , dict ):
518- _del_none (value )
519- return d
520-
521-
522513def __get_path_tree (paths : str , prefix : str ) -> Set [str ]:
523514 valid_dirs = set ()
524515 for f in paths :
0 commit comments