Skip to content

Commit 6a17d64

Browse files
Remove - from pkg and add comments
1 parent c6c2c20 commit 6a17d64

File tree

11 files changed

+110
-77
lines changed

11 files changed

+110
-77
lines changed

.evergreen-functions.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -560,7 +560,7 @@ functions:
560560
MACOS_NOTARY_KEY: ${macos_notary_keyid}
561561
MACOS_NOTARY_SECRET: ${macos_notary_secret}
562562
working_dir: src/github.com/mongodb/mongodb-kubernetes
563-
binary: scripts/dev/run_python.sh scripts/release/kubectl-mongodb/python/build_kubectl_plugin.py
563+
binary: scripts/dev/run_python.sh scripts/release/kubectl_mongodb/python/build_kubectl_plugin.py
564564

565565
promote_kubectl_plugin_and_release:
566566
- command: subprocess.exec
@@ -584,7 +584,7 @@ functions:
584584
MACOS_NOTARY_KEY: ${macos_notary_keyid}
585585
MACOS_NOTARY_SECRET: ${macos_notary_secret}
586586
working_dir: src/github.com/mongodb/mongodb-kubernetes
587-
binary: scripts/dev/run_python.sh scripts/release/kubectl-mongodb/python/promote_kubectl_plugin.py --release_version ${release_version} --staging_commit ${staging_commit_sha}
587+
binary: scripts/dev/run_python.sh scripts/release/kubectl_mongodb/python/promote_kubectl_plugin.py --release_version ${release_version} --staging_commit ${staging_commit_sha}
588588

589589
build_and_push_appdb_database:
590590
- command: subprocess.exec

.evergreen.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ tasks:
421421
expansion_name: GH_TOKEN
422422
- func: promote_kubectl_plugin_and_release
423423
vars:
424-
release_version: 6.0.0
424+
release_version: 9.0.9
425425
staging_commit_sha: 68a45e3b9f9b2b000754926f
426426

427427
- name: build_test_image

.goreleaser.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@ builds:
1919
hooks:
2020
# This will notarize Apple binaries and replace goreleaser bins with the notarized ones
2121
post:
22-
- cmd: ./scripts/release/kubectl-mongodb/kubectl_mac_notarize.sh
22+
- cmd: ./scripts/release/kubectl_mongodb/kubectl_mac_notarize.sh
2323
output: true
24-
- cmd: ./scripts/release/kubectl-mongodb/sign.sh {{ .Path }}
24+
- cmd: ./scripts/release/kubectl_mongodb/sign.sh {{ .Path }}
2525
env:
2626
- GRS_USERNAME={{ .Env.GRS_USERNAME }}
2727
- GRS_PASSWORD={{ .Env.GRS_PASSWORD }}
@@ -30,7 +30,7 @@ builds:
3030
- SIGNING_IMAGE_URI={{ .Env.SIGNING_IMAGE_URI }}
3131
- ARTIFACTORY_USERNAME=mongodb-enterprise-kubernetes-operator
3232
- ARTIFACTORY_PASSWORD={{ .Env.ARTIFACTORY_PASSWORD }}
33-
- cmd: ./scripts/release/kubectl-mongodb/verify.sh {{ .Path }} && echo "VERIFIED OK"
33+
- cmd: ./scripts/release/kubectl_mongodb/verify.sh {{ .Path }} && echo "VERIFIED OK"
3434

3535
archives:
3636
- format: tar.gz

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ botocore==1.40.7
3535
boto3==1.40.7
3636
python-frontmatter==1.1.0
3737
python-on-whales==0.78.0
38+
PyGithub==2.7.0
3839

3940
# from kubeobject
4041
freezegun==1.5.5

scripts/release/kubectl-mongodb/install_istio_separate_network.sh renamed to scripts/release/kubectl_mongodb/install_istio_separate_network.sh

File renamed without changes.

scripts/release/kubectl-mongodb/kubectl_mac_notarize.sh renamed to scripts/release/kubectl_mongodb/kubectl_mac_notarize.sh

File renamed without changes.

scripts/release/kubectl-mongodb/python/build_kubectl_plugin.py renamed to scripts/release/kubectl_mongodb/python/build_kubectl_plugin.py

File renamed without changes.

scripts/release/kubectl-mongodb/python/promote_kubectl_plugin.py renamed to scripts/release/kubectl_mongodb/python/promote_kubectl_plugin.py

Lines changed: 103 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,31 @@
11
import argparse
22
import hashlib
33
import os
4+
import subprocess
45
import sys
56
import tarfile
6-
import subprocess
77
from pathlib import Path
8-
import boto3
98

9+
import boto3
1010
from botocore.exceptions import ClientError, NoCredentialsError, PartialCredentialsError
11-
# from github import Github, GithubException
11+
from github import Github, GithubException
12+
13+
from lib.base_logger import logger
1214

13-
GITHUB_REPO = "mongodb/mongodb-kubernetes"
1415
GITHUB_TOKEN = os.environ.get("GH_TOKEN")
16+
GITHUB_REPO = "mongodb/mongodb-kubernetes"
17+
18+
AWS_REGION = "eu-north-1"
19+
20+
STAGING_S3_BUCKET_NAME = "mongodb-kubernetes-dev"
21+
RELEASE_S3_BUCKET_NAME = "mongodb-kubernetes-staging"
22+
23+
KUBECTL_PLUGIN_BINARY_NAME = "kubectl-mongodb"
24+
S3_BUCKET_KUBECTL_PLUGIN_SUBPATH = KUBECTL_PLUGIN_BINARY_NAME
1525

1626
LOCAL_ARTIFACTS_DIR = "artifacts"
1727
CHECKSUMS_PATH = f"{LOCAL_ARTIFACTS_DIR}/checksums.txt"
1828

19-
DEV_S3_BUCKET_NAME = "mongodb-kubernetes-dev"
20-
STAGING_S3_BUCKET_NAME = "mongodb-kubernetes-staging"
21-
22-
S3_BUCKET_KUBECTL_PLUGIN_SUBPATH = "kubectl-mongodb"
23-
AWS_REGION = "eu-north-1"
2429

2530
def main():
2631
parser = argparse.ArgumentParser()
@@ -44,57 +49,69 @@ def main():
4449

4550
promote_artifacts(artifacts, args.release_version)
4651

47-
# upload_assets_to_github_release(artifacts_tar, args.release_version)
52+
upload_assets_to_github_release(artifacts, args.release_version)
4853

54+
55+
# generate_checksums generates checksums for the artifacts that we are going to upload to github release as assets.
56+
# It's formatted: checksum artifact_name
4957
def generate_checksums(artifacts: list[str]):
5058
checksums_path = Path(CHECKSUMS_PATH)
5159

5260
with checksums_path.open("w") as out_file:
5361
for artifact in artifacts:
5462
artifact_path = Path(artifact)
5563
if not artifact_path.is_file() or not artifact_path.name.endswith(".tar.gz"):
56-
print(f"skipping invalid tar file: {artifact_path}")
64+
logger.info(f"skipping invalid tar file: {artifact_path}")
5765
continue
5866

5967
sha256 = hashlib.sha256()
6068
with open(artifact_path, "rb") as f:
61-
for chunk in iter(lambda : f.read(8192), b""):
69+
# read chunk of 8192 bites until end of file (b"") is received
70+
for chunk in iter(lambda: f.read(8192), b""):
6271
sha256.update(chunk)
6372

6473
checksum_line = f"{sha256.hexdigest()} {artifact_path.name}"
65-
out_file.write(checksum_line+"\n")
74+
out_file.write(checksum_line + "\n")
6675

67-
print(f"Checksums written to {checksums_path}")
76+
logger.info(f"Checksums written to {checksums_path}")
6877
all_artifacts = list(artifacts) + [str(checksums_path.resolve())]
69-
return all_artifacts
78+
return all_artifacts
79+
7080

81+
# promote_artifacts promotes (copies) the downloaded staging artifacts to release S3 bucket.
7182
def promote_artifacts(artifacts: list[str], release_version: str):
7283
s3_client = boto3.client("s3", region_name=AWS_REGION)
7384
for file in artifacts:
74-
if not os.path.isfile(file) or not file.endswith(('.tar.gz', '.txt')):
75-
print(f"skipping invalid or non-tar file: {file}")
85+
if not os.path.isfile(file) or not file.endswith((".tar.gz", ".txt")):
86+
logger.info(f"Skipping invalid or non-tar/checksum file: {file}")
7687
continue
7788

7889
file_name = os.path.basename(file)
7990
s3_key = os.path.join(S3_BUCKET_KUBECTL_PLUGIN_SUBPATH, release_version, file_name)
8091

8192
try:
82-
s3_client.upload_file(file, STAGING_S3_BUCKET_NAME, s3_key)
93+
s3_client.upload_file(file, RELEASE_S3_BUCKET_NAME, s3_key)
8394
except ClientError as e:
84-
print(f"failed to upload the file {file}: {e}")
95+
logger.debug(f"failed to upload the file {file}: {e}")
8596
sys.exit(1)
8697

87-
print("artifacts were promoted to release bucket successfully")
98+
logger.info("Artifacts were promoted to release bucket successfully")
8899

89100

101+
# notarize_artifacts notarizes the darwin goreleaser binaries in-place.
90102
def notarize_artifacts(release_version: str):
91-
notarize_result = subprocess.run(["scripts/release/kubectl-mongodb/kubectl_mac_notarize.sh", release_version], capture_output=True, text=True)
103+
notarize_result = subprocess.run(
104+
["scripts/release/kubectl_mongodb/kubectl_mac_notarize.sh", release_version], capture_output=True, text=True
105+
)
92106
if notarize_result.returncode == 0:
93-
print("notarization of artifacts was successful")
107+
logger.info("Notarization of artifacts was successful")
94108
else:
95-
print(f"notarization of artifacts failed. \nstdout: {notarize_result.stdout} \nstderr: {notarize_result.stderr}")
109+
logger.debug(
110+
f"Notarization of artifacts failed. \nstdout: {notarize_result.stdout} \nstderr: {notarize_result.stderr}"
111+
)
96112
sys.exit(1)
97113

114+
98115
# sign_and_verify_artifacts iterates over the goreleaser artifacts, that have been downloaded from S3, and
99116
# signs and verifies them.
100117
def sign_and_verify_artifacts():
@@ -110,18 +127,27 @@ def sign_and_verify_artifacts():
110127
file_path = os.path.join(subdir_path, file)
111128

112129
if os.path.isfile(file_path):
113-
sign_result = subprocess.run(["scripts/release/kubectl-mongodb/sign.sh", file_path], capture_output=True, text=True)
130+
# signing an already signed artifact fails with `Signature already exixts. Displaying proof`.
131+
sign_result = subprocess.run(
132+
["scripts/release/kubectl_mongodb/sign.sh", file_path], capture_output=True, text=True
133+
)
114134
if sign_result.returncode == 0:
115-
print(f"artifact {file_path} was signed successfully")
135+
logger.info(f"Artifact {file_path} was signed successfully")
116136
else:
117-
print(f"signing the artifact {file_path} failed. \nstdout: {sign_result.stdout} \nstderr: {sign_result.stderr}")
137+
logger.debug(
138+
f"Signing the artifact {file_path} failed. \nstdout: {sign_result.stdout} \nstderr: {sign_result.stderr}"
139+
)
118140
sys.exit(1)
119141

120-
verify_result = subprocess.run(["scripts/release/kubectl-mongodb/verify.sh", file_path], capture_output=True, text=True)
142+
verify_result = subprocess.run(
143+
["scripts/release/kubectl_mongodb/verify.sh", file_path], capture_output=True, text=True
144+
)
121145
if verify_result.returncode == 0:
122-
print(f"artifact {file_path} was verified successfully")
146+
logger.info(f"Artifact {file_path} was verified successfully")
123147
else:
124-
print(f"verification of the artifact {file_path} failed. \nstdout: {verify_result.stdout} \nstderr: {verify_result.stderr}")
148+
logger.debug(
149+
f"Verification of the artifact {file_path} failed. \nstdout: {verify_result.stdout} \nstderr: {verify_result.stderr}"
150+
)
125151
sys.exit(1)
126152

127153

@@ -142,18 +168,19 @@ def s3_artifacts_path_to_local_path(release_version: str, commit_sha: str):
142168
}
143169

144170

145-
# download_artifacts_from_s3 downloads the staging artifacts from S3 and puts them in the local dir LOCAL_ARTIFACTS_DIR
171+
# download_artifacts_from_s3 downloads the staging artifacts (only that ones that we would later promote) from S3 and puts
172+
# them in the local dir LOCAL_ARTIFACTS_DIR.
146173
# ToDo: if the artifacts are not present at correct location, this is going to fail silently, we should instead fail this
147174
def download_artifacts_from_s3(release_version: str, commit_sha: str):
148-
print(f"\nStarting download of artifacts from S3 bucket: {DEV_S3_BUCKET_NAME}")
175+
logger.info(f"Starting download of artifacts from staging S3 bucket: {STAGING_S3_BUCKET_NAME}")
149176

150177
try:
151178
s3_client = boto3.client("s3", region_name=AWS_REGION)
152179
except (NoCredentialsError, PartialCredentialsError):
153-
print("ERROR: AWS credentials were not set.")
180+
logger.debug("ERROR: AWS credentials were not set.")
154181
sys.exit(1)
155182
except Exception as e:
156-
print(f"An error occurred connecting to S3: {e}")
183+
logger.debug(f"An error occurred connecting to S3: {e}")
157184
sys.exit(1)
158185

159186
artifacts_to_promote = s3_artifacts_path_to_local_path(release_version, commit_sha)
@@ -165,7 +192,7 @@ def download_artifacts_from_s3(release_version: str, commit_sha: str):
165192
for s3_artifact_dir, local_subdir in artifacts_to_promote.items():
166193
try:
167194
paginator = s3_client.get_paginator("list_objects_v2")
168-
pages = paginator.paginate(Bucket=DEV_S3_BUCKET_NAME, Prefix=s3_artifact_dir)
195+
pages = paginator.paginate(Bucket=STAGING_S3_BUCKET_NAME, Prefix=s3_artifact_dir)
169196
for page in pages:
170197
# "Contents" corresponds to the directory in the S3 bucket
171198
if "Contents" not in page:
@@ -186,20 +213,21 @@ def download_artifacts_from_s3(release_version: str, commit_sha: str):
186213
# Create the local directory structure if it doesn't exist
187214
os.makedirs(os.path.dirname(final_local_path), exist_ok=True)
188215

189-
print(f"Downloading {s3_key} to {final_local_path}")
190-
s3_client.download_file(DEV_S3_BUCKET_NAME, s3_key, final_local_path)
216+
logger.info(f"Downloading staging artifact {s3_key} to {final_local_path}")
217+
s3_client.download_file(STAGING_S3_BUCKET_NAME, s3_key, final_local_path)
191218
download_count += 1
192219

193220
except ClientError as e:
194-
print(f"ERROR: Failed to list or download from prefix '{s3_artifact_dir}'. S3 Client Error: {e}")
221+
logger.debug(f"ERROR: Failed to list or download from prefix '{s3_artifact_dir}'. S3 Client Error: {e}")
195222
return False
196223

197-
print("All the artifacts have been downloaded successfully.")
224+
logger.info("All the artifacts have been downloaded successfully.")
198225
return True
199226

200227

228+
# create_tarballs creates `.tar.gz` archives for the artifacts that before promoting them.
201229
def create_tarballs():
202-
print(f"\nCreating archives for subdirectories in {LOCAL_ARTIFACTS_DIR}")
230+
logger.info(f"Creating archives for subdirectories in {LOCAL_ARTIFACTS_DIR}")
203231
created_archives = []
204232
original_cwd = os.getcwd()
205233
try:
@@ -213,47 +241,51 @@ def create_tarballs():
213241
tar.add(dir_name)
214242

215243
full_archive_path = os.path.join(original_cwd, LOCAL_ARTIFACTS_DIR, archive_name)
216-
print(f"Successfully created archive at {full_archive_path}")
244+
logger.info(f"Successfully created archive at {full_archive_path}")
217245
created_archives.append(full_archive_path)
218246

219247
except Exception as e:
220-
print(f"ERROR: Failed to create tar.gz archives: {e}")
221-
return []
248+
logger.debug(f"ERROR: Failed to create tar.gz archives: {e}")
249+
sys.exit(1)
222250
finally:
223251
os.chdir(original_cwd)
224252

225253
return created_archives
226254

227255

228-
# def upload_assets_to_github_release(asset_paths, release_version: str):
229-
# if not GITHUB_TOKEN:
230-
# print("ERROR: GITHUB_TOKEN environment variable not set.")
231-
# sys.exit(1)
232-
#
233-
# try:
234-
# g = Github(GITHUB_TOKEN)
235-
# repo = g.get_repo(GITHUB_REPO)
236-
# except GithubException as e:
237-
# print(f"ERROR: Could not connect to GitHub or find repository '{GITHUB_REPO}', Error {e}.")
238-
# sys.exit(1)
239-
#
240-
# try:
241-
# release = repo.get_release(release_version)
242-
# except GithubException as e:
243-
# print(
244-
# f"ERROR: Could not find release with tag '{release_version}'. Please ensure release exists already. Error: {e}"
245-
# )
246-
# return
247-
#
248-
# for asset_path in asset_paths:
249-
# asset_name = os.path.basename(asset_path)
250-
# print(f"Uploading artifact '{asset_name}' to github release as asset")
251-
# try:
252-
# release.upload_asset(path=asset_path, name=asset_name, content_type="application/gzip")
253-
# except GithubException as e:
254-
# print(f"ERROR: Failed to upload asset {asset_name}. Error: {e}")
255-
# except Exception as e:
256-
# print(f"An unexpected error occurred during upload of {asset_name}: {e}")
256+
# upload_assets_to_github_release uploads the release artifacts (downloaded notarized/signed staging artifacts) to
257+
# the github release as assets.
258+
def upload_assets_to_github_release(asset_paths, release_version: str):
259+
if not GITHUB_TOKEN:
260+
logger.info("ERROR: GITHUB_TOKEN environment variable not set.")
261+
sys.exit(1)
262+
263+
try:
264+
g = Github(GITHUB_TOKEN)
265+
repo = g.get_repo(GITHUB_REPO)
266+
except GithubException as e:
267+
logger.info(f"ERROR: Could not connect to GitHub or find repository '{GITHUB_REPO}', Error {e}.")
268+
sys.exit(1)
269+
270+
try:
271+
release = repo.get_release(release_version)
272+
except GithubException as e:
273+
logger.debug(
274+
f"ERROR: Could not find release with tag '{release_version}'. Please ensure release exists already. Error: {e}"
275+
)
276+
sys.exit(2)
277+
278+
for asset_path in asset_paths:
279+
asset_name = os.path.basename(asset_path)
280+
logger.info(f"Uploading artifact '{asset_name}' to github release as asset")
281+
try:
282+
release.upload_asset(path=asset_path, name=asset_name, content_type="application/gzip")
283+
except GithubException as e:
284+
logger.debug(f"ERROR: Failed to upload asset {asset_name}. Error: {e}")
285+
sys.exit(2)
286+
except Exception as e:
287+
logger.debug(f"An unexpected error occurred during upload of {asset_name}: {e}")
288+
sys.exit(2)
257289

258290

259291
if __name__ == "__main__":
File renamed without changes.

0 commit comments

Comments
 (0)