11import argparse
22import hashlib
33import os
4+ import subprocess
45import sys
56import tarfile
6- import subprocess
77from pathlib import Path
8- import boto3
98
9+ import boto3
1010from botocore .exceptions import ClientError , NoCredentialsError , PartialCredentialsError
11- # from github import Github, GithubException
11+ from github import Github , GithubException
12+
13+ from lib .base_logger import logger
1214
13- GITHUB_REPO = "mongodb/mongodb-kubernetes"
1415GITHUB_TOKEN = os .environ .get ("GH_TOKEN" )
16+ GITHUB_REPO = "mongodb/mongodb-kubernetes"
17+
18+ AWS_REGION = "eu-north-1"
19+
20+ STAGING_S3_BUCKET_NAME = "mongodb-kubernetes-dev"
21+ RELEASE_S3_BUCKET_NAME = "mongodb-kubernetes-staging"
22+
23+ KUBECTL_PLUGIN_BINARY_NAME = "kubectl-mongodb"
24+ S3_BUCKET_KUBECTL_PLUGIN_SUBPATH = KUBECTL_PLUGIN_BINARY_NAME
1525
1626LOCAL_ARTIFACTS_DIR = "artifacts"
1727CHECKSUMS_PATH = f"{ LOCAL_ARTIFACTS_DIR } /checksums.txt"
1828
19- DEV_S3_BUCKET_NAME = "mongodb-kubernetes-dev"
20- STAGING_S3_BUCKET_NAME = "mongodb-kubernetes-staging"
21-
22- S3_BUCKET_KUBECTL_PLUGIN_SUBPATH = "kubectl-mongodb"
23- AWS_REGION = "eu-north-1"
2429
2530def main ():
2631 parser = argparse .ArgumentParser ()
@@ -44,57 +49,69 @@ def main():
4449
4550 promote_artifacts (artifacts , args .release_version )
4651
47- # upload_assets_to_github_release(artifacts_tar , args.release_version)
52+ upload_assets_to_github_release (artifacts , args .release_version )
4853
54+
55+ # generate_checksums generates checksums for the artifacts that we are going to upload to github release as assets.
56+ # It's formatted: checksum artifact_name
4957def generate_checksums (artifacts : list [str ]):
5058 checksums_path = Path (CHECKSUMS_PATH )
5159
5260 with checksums_path .open ("w" ) as out_file :
5361 for artifact in artifacts :
5462 artifact_path = Path (artifact )
5563 if not artifact_path .is_file () or not artifact_path .name .endswith (".tar.gz" ):
56- print (f"skipping invalid tar file: { artifact_path } " )
64+ logger . info (f"skipping invalid tar file: { artifact_path } " )
5765 continue
5866
5967 sha256 = hashlib .sha256 ()
6068 with open (artifact_path , "rb" ) as f :
61- for chunk in iter (lambda : f .read (8192 ), b"" ):
69+ # read chunk of 8192 bites until end of file (b"") is received
70+ for chunk in iter (lambda : f .read (8192 ), b"" ):
6271 sha256 .update (chunk )
6372
6473 checksum_line = f"{ sha256 .hexdigest ()} { artifact_path .name } "
65- out_file .write (checksum_line + "\n " )
74+ out_file .write (checksum_line + "\n " )
6675
67- print (f"Checksums written to { checksums_path } " )
76+ logger . info (f"Checksums written to { checksums_path } " )
6877 all_artifacts = list (artifacts ) + [str (checksums_path .resolve ())]
69- return all_artifacts
78+ return all_artifacts
79+
7080
81+ # promote_artifacts promotes (copies) the downloaded staging artifacts to release S3 bucket.
7182def promote_artifacts (artifacts : list [str ], release_version : str ):
7283 s3_client = boto3 .client ("s3" , region_name = AWS_REGION )
7384 for file in artifacts :
74- if not os .path .isfile (file ) or not file .endswith ((' .tar.gz' , ' .txt' )):
75- print (f"skipping invalid or non-tar file: { file } " )
85+ if not os .path .isfile (file ) or not file .endswith ((" .tar.gz" , " .txt" )):
86+ logger . info (f"Skipping invalid or non-tar/checksum file: { file } " )
7687 continue
7788
7889 file_name = os .path .basename (file )
7990 s3_key = os .path .join (S3_BUCKET_KUBECTL_PLUGIN_SUBPATH , release_version , file_name )
8091
8192 try :
82- s3_client .upload_file (file , STAGING_S3_BUCKET_NAME , s3_key )
93+ s3_client .upload_file (file , RELEASE_S3_BUCKET_NAME , s3_key )
8394 except ClientError as e :
84- print (f"failed to upload the file { file } : { e } " )
95+ logger . debug (f"failed to upload the file { file } : { e } " )
8596 sys .exit (1 )
8697
87- print ( "artifacts were promoted to release bucket successfully" )
98+ logger . info ( "Artifacts were promoted to release bucket successfully" )
8899
89100
101+ # notarize_artifacts notarizes the darwin goreleaser binaries in-place.
90102def notarize_artifacts (release_version : str ):
91- notarize_result = subprocess .run (["scripts/release/kubectl-mongodb/kubectl_mac_notarize.sh" , release_version ], capture_output = True , text = True )
103+ notarize_result = subprocess .run (
104+ ["scripts/release/kubectl_mongodb/kubectl_mac_notarize.sh" , release_version ], capture_output = True , text = True
105+ )
92106 if notarize_result .returncode == 0 :
93- print ( "notarization of artifacts was successful" )
107+ logger . info ( "Notarization of artifacts was successful" )
94108 else :
95- print (f"notarization of artifacts failed. \n stdout: { notarize_result .stdout } \n stderr: { notarize_result .stderr } " )
109+ logger .debug (
110+ f"Notarization of artifacts failed. \n stdout: { notarize_result .stdout } \n stderr: { notarize_result .stderr } "
111+ )
96112 sys .exit (1 )
97113
114+
98115# sign_and_verify_artifacts iterates over the goreleaser artifacts, that have been downloaded from S3, and
99116# signs and verifies them.
100117def sign_and_verify_artifacts ():
@@ -110,18 +127,27 @@ def sign_and_verify_artifacts():
110127 file_path = os .path .join (subdir_path , file )
111128
112129 if os .path .isfile (file_path ):
113- sign_result = subprocess .run (["scripts/release/kubectl-mongodb/sign.sh" , file_path ], capture_output = True , text = True )
130+ # signing an already signed artifact fails with `Signature already exixts. Displaying proof`.
131+ sign_result = subprocess .run (
132+ ["scripts/release/kubectl_mongodb/sign.sh" , file_path ], capture_output = True , text = True
133+ )
114134 if sign_result .returncode == 0 :
115- print (f"artifact { file_path } was signed successfully" )
135+ logger . info (f"Artifact { file_path } was signed successfully" )
116136 else :
117- print (f"signing the artifact { file_path } failed. \n stdout: { sign_result .stdout } \n stderr: { sign_result .stderr } " )
137+ logger .debug (
138+ f"Signing the artifact { file_path } failed. \n stdout: { sign_result .stdout } \n stderr: { sign_result .stderr } "
139+ )
118140 sys .exit (1 )
119141
120- verify_result = subprocess .run (["scripts/release/kubectl-mongodb/verify.sh" , file_path ], capture_output = True , text = True )
142+ verify_result = subprocess .run (
143+ ["scripts/release/kubectl_mongodb/verify.sh" , file_path ], capture_output = True , text = True
144+ )
121145 if verify_result .returncode == 0 :
122- print (f"artifact { file_path } was verified successfully" )
146+ logger . info (f"Artifact { file_path } was verified successfully" )
123147 else :
124- print (f"verification of the artifact { file_path } failed. \n stdout: { verify_result .stdout } \n stderr: { verify_result .stderr } " )
148+ logger .debug (
149+ f"Verification of the artifact { file_path } failed. \n stdout: { verify_result .stdout } \n stderr: { verify_result .stderr } "
150+ )
125151 sys .exit (1 )
126152
127153
@@ -142,18 +168,19 @@ def s3_artifacts_path_to_local_path(release_version: str, commit_sha: str):
142168 }
143169
144170
145- # download_artifacts_from_s3 downloads the staging artifacts from S3 and puts them in the local dir LOCAL_ARTIFACTS_DIR
171+ # download_artifacts_from_s3 downloads the staging artifacts (only that ones that we would later promote) from S3 and puts
172+ # them in the local dir LOCAL_ARTIFACTS_DIR.
146173# ToDo: if the artifacts are not present at correct location, this is going to fail silently, we should instead fail this
147174def download_artifacts_from_s3 (release_version : str , commit_sha : str ):
148- print (f"\n Starting download of artifacts from S3 bucket: { DEV_S3_BUCKET_NAME } " )
175+ logger . info (f"Starting download of artifacts from staging S3 bucket: { STAGING_S3_BUCKET_NAME } " )
149176
150177 try :
151178 s3_client = boto3 .client ("s3" , region_name = AWS_REGION )
152179 except (NoCredentialsError , PartialCredentialsError ):
153- print ("ERROR: AWS credentials were not set." )
180+ logger . debug ("ERROR: AWS credentials were not set." )
154181 sys .exit (1 )
155182 except Exception as e :
156- print (f"An error occurred connecting to S3: { e } " )
183+ logger . debug (f"An error occurred connecting to S3: { e } " )
157184 sys .exit (1 )
158185
159186 artifacts_to_promote = s3_artifacts_path_to_local_path (release_version , commit_sha )
@@ -165,7 +192,7 @@ def download_artifacts_from_s3(release_version: str, commit_sha: str):
165192 for s3_artifact_dir , local_subdir in artifacts_to_promote .items ():
166193 try :
167194 paginator = s3_client .get_paginator ("list_objects_v2" )
168- pages = paginator .paginate (Bucket = DEV_S3_BUCKET_NAME , Prefix = s3_artifact_dir )
195+ pages = paginator .paginate (Bucket = STAGING_S3_BUCKET_NAME , Prefix = s3_artifact_dir )
169196 for page in pages :
170197 # "Contents" corresponds to the directory in the S3 bucket
171198 if "Contents" not in page :
@@ -186,20 +213,21 @@ def download_artifacts_from_s3(release_version: str, commit_sha: str):
186213 # Create the local directory structure if it doesn't exist
187214 os .makedirs (os .path .dirname (final_local_path ), exist_ok = True )
188215
189- print (f"Downloading { s3_key } to { final_local_path } " )
190- s3_client .download_file (DEV_S3_BUCKET_NAME , s3_key , final_local_path )
216+ logger . info (f"Downloading staging artifact { s3_key } to { final_local_path } " )
217+ s3_client .download_file (STAGING_S3_BUCKET_NAME , s3_key , final_local_path )
191218 download_count += 1
192219
193220 except ClientError as e :
194- print (f"ERROR: Failed to list or download from prefix '{ s3_artifact_dir } '. S3 Client Error: { e } " )
221+ logger . debug (f"ERROR: Failed to list or download from prefix '{ s3_artifact_dir } '. S3 Client Error: { e } " )
195222 return False
196223
197- print ("All the artifacts have been downloaded successfully." )
224+ logger . info ("All the artifacts have been downloaded successfully." )
198225 return True
199226
200227
228+ # create_tarballs creates `.tar.gz` archives for the artifacts that before promoting them.
201229def create_tarballs ():
202- print (f"\n Creating archives for subdirectories in { LOCAL_ARTIFACTS_DIR } " )
230+ logger . info (f"Creating archives for subdirectories in { LOCAL_ARTIFACTS_DIR } " )
203231 created_archives = []
204232 original_cwd = os .getcwd ()
205233 try :
@@ -213,47 +241,51 @@ def create_tarballs():
213241 tar .add (dir_name )
214242
215243 full_archive_path = os .path .join (original_cwd , LOCAL_ARTIFACTS_DIR , archive_name )
216- print (f"Successfully created archive at { full_archive_path } " )
244+ logger . info (f"Successfully created archive at { full_archive_path } " )
217245 created_archives .append (full_archive_path )
218246
219247 except Exception as e :
220- print (f"ERROR: Failed to create tar.gz archives: { e } " )
221- return []
248+ logger . debug (f"ERROR: Failed to create tar.gz archives: { e } " )
249+ sys . exit ( 1 )
222250 finally :
223251 os .chdir (original_cwd )
224252
225253 return created_archives
226254
227255
228- # def upload_assets_to_github_release(asset_paths, release_version: str):
229- # if not GITHUB_TOKEN:
230- # print("ERROR: GITHUB_TOKEN environment variable not set.")
231- # sys.exit(1)
232- #
233- # try:
234- # g = Github(GITHUB_TOKEN)
235- # repo = g.get_repo(GITHUB_REPO)
236- # except GithubException as e:
237- # print(f"ERROR: Could not connect to GitHub or find repository '{GITHUB_REPO}', Error {e}.")
238- # sys.exit(1)
239- #
240- # try:
241- # release = repo.get_release(release_version)
242- # except GithubException as e:
243- # print(
244- # f"ERROR: Could not find release with tag '{release_version}'. Please ensure release exists already. Error: {e}"
245- # )
246- # return
247- #
248- # for asset_path in asset_paths:
249- # asset_name = os.path.basename(asset_path)
250- # print(f"Uploading artifact '{asset_name}' to github release as asset")
251- # try:
252- # release.upload_asset(path=asset_path, name=asset_name, content_type="application/gzip")
253- # except GithubException as e:
254- # print(f"ERROR: Failed to upload asset {asset_name}. Error: {e}")
255- # except Exception as e:
256- # print(f"An unexpected error occurred during upload of {asset_name}: {e}")
256+ # upload_assets_to_github_release uploads the release artifacts (downloaded notarized/signed staging artifacts) to
257+ # the github release as assets.
258+ def upload_assets_to_github_release (asset_paths , release_version : str ):
259+ if not GITHUB_TOKEN :
260+ logger .info ("ERROR: GITHUB_TOKEN environment variable not set." )
261+ sys .exit (1 )
262+
263+ try :
264+ g = Github (GITHUB_TOKEN )
265+ repo = g .get_repo (GITHUB_REPO )
266+ except GithubException as e :
267+ logger .info (f"ERROR: Could not connect to GitHub or find repository '{ GITHUB_REPO } ', Error { e } ." )
268+ sys .exit (1 )
269+
270+ try :
271+ release = repo .get_release (release_version )
272+ except GithubException as e :
273+ logger .debug (
274+ f"ERROR: Could not find release with tag '{ release_version } '. Please ensure release exists already. Error: { e } "
275+ )
276+ sys .exit (2 )
277+
278+ for asset_path in asset_paths :
279+ asset_name = os .path .basename (asset_path )
280+ logger .info (f"Uploading artifact '{ asset_name } ' to github release as asset" )
281+ try :
282+ release .upload_asset (path = asset_path , name = asset_name , content_type = "application/gzip" )
283+ except GithubException as e :
284+ logger .debug (f"ERROR: Failed to upload asset { asset_name } . Error: { e } " )
285+ sys .exit (2 )
286+ except Exception as e :
287+ logger .debug (f"An unexpected error occurred during upload of { asset_name } : { e } " )
288+ sys .exit (2 )
257289
258290
259291if __name__ == "__main__" :
0 commit comments