1313See the License for the specific language governing permissions and
1414limitations under the License.
1515"""
16- from charon .utils .files import digest
16+ from charon .utils .files import digest , HashType
17+ from charon .storage import S3Client
1718from typing import Tuple , List , Dict
1819from html .parser import HTMLParser
1920import tempfile
@@ -36,9 +37,8 @@ def handle_checksum_validation_http(
3637 skips : List [str ] = None
3738):
3839 """ Handle the checksum check for maven artifacts.
39- * target contains bucket name and prefix for the bucket, which will
40- be used to store artifacts with the prefix. See target definition
41- in Charon configuration for details.
40+ * bucket contains store artifacts with the prefix. See target
41+ definition in Charon configuration for details.
4242 * path is the root path where to start the validation in the bucket.
4343 * includes are the file suffixes which will decide the types of files
4444 to do the validation.
@@ -266,3 +266,84 @@ def _decide_root_url(bucket: str) -> str:
266266 if bucket .strip ().startswith ("stage-maven" ):
267267 return "https://maven.stage.repository.redhat.com"
268268 return None
269+
270+
271+ def refresh_checksum (
272+ target : Tuple [str , str ],
273+ paths : List [str ],
274+ aws_profile : str = None
275+ ):
276+ """Refresh checksum for files in a given bucket.
277+ * bucket contains store artifacts with the prefix. See target
278+ definition in Charon configuration for details.
279+ * paths are the exact files whose checksum files will be
280+ refreshed with.
281+ """
282+ bucket_name = target [0 ]
283+ prefix = target [1 ]
284+ s3_client = S3Client (aws_profile = aws_profile )
285+ real_prefix = prefix if prefix .strip () != "/" else ""
286+ filetype_filter = [".prodinfo" , ".sha1" , ".sha256" , ".md5" ]
287+ for path in paths :
288+ is_artifact = True
289+ for filetype in filetype_filter :
290+ if path .strip ().endswith (filetype ):
291+ is_artifact = False
292+ continue
293+ if not is_artifact :
294+ logger .info (
295+ "%s is not an artifact file for maven products. Skipped." ,
296+ path
297+ )
298+ continue
299+ s3_path = os .path .join (real_prefix , path )
300+ checksums = {
301+ ".md5" : HashType .MD5 ,
302+ ".sha1" : HashType .SHA1 ,
303+ ".sha256" : HashType .SHA256 ,
304+ ".sha512" : HashType .SHA512
305+ }
306+ if s3_client .file_exists_in_bucket (bucket_name , s3_path ):
307+ temp_f = os .path .join (tempfile .gettempdir (), path )
308+ folder = os .path .dirname (temp_f )
309+ try :
310+ if not os .path .exists (folder ):
311+ os .makedirs (folder )
312+ s3_client .download_file (bucket_name , s3_path , temp_f )
313+ existed_checksum_types = []
314+ for file_type in checksums :
315+ s3_checksum_path = s3_path + file_type
316+ if s3_client .file_exists_in_bucket (bucket_name , s3_checksum_path ):
317+ existed_checksum_types .append (file_type )
318+ if existed_checksum_types :
319+ for file_type in existed_checksum_types :
320+ checksum_path = path + file_type
321+ s3_checksum_path = s3_path + file_type
322+ hash_type = checksums [file_type ]
323+ correct_checksum_c = digest (temp_f , hash_type )
324+ original_checksum_c = s3_client .read_file_content (
325+ bucket_name , s3_checksum_path
326+ )
327+ if correct_checksum_c == original_checksum_c :
328+ logger .info ("Checksum %s matches, no need to refresh." , checksum_path )
329+ else :
330+ logger .info ("Checksum %s does not match, refreshing..." , checksum_path )
331+ s3_client .simple_upload_file (
332+ file_path = checksum_path ,
333+ file_content = correct_checksum_c ,
334+ target = (bucket_name , prefix ),
335+ mime_type = "text/plain" ,
336+ force = True
337+ )
338+ else :
339+ logger .warning (
340+ "No valid checksum files exist for %s, Skipped."
341+ " Are you sure it is a valid maven artifact?" ,
342+ path
343+ )
344+ finally :
345+ if folder and folder != tempfile .gettempdir () and os .path .exists (folder ):
346+ shutil .rmtree (folder )
347+ logger .info ("Checksums are refreshed for artifact %s" , path )
348+ else :
349+ logger .warning ("File %s does not exist in bucket %s" , s3_path , bucket_name )
0 commit comments