diff --git a/.github/workflows/build-package.yml b/.github/workflows/build-package.yml index 7fea124a..fc0116b4 100644 --- a/.github/workflows/build-package.yml +++ b/.github/workflows/build-package.yml @@ -20,13 +20,6 @@ jobs: with: python-version: '3.x' # Specify your Python version - - name: Install Poetry - run: | - curl -sSL https://install.python-poetry.org | python3 - - - name: Build wheel - run: poetry build -f wheel - - name: Extract version id: get_version run: | @@ -39,6 +32,13 @@ jobs: sed -i "s/COMMIT_HASH = \".*\"/COMMIT_HASH = \"$COMMIT_HASH\"/" src/emd/revision.py echo "SHORT_SHA=$COMMIT_HASH" >> $GITHUB_ENV + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 + + - name: Build wheel + run: poetry build -f wheel + - name: Upload wheel artifact uses: actions/upload-artifact@v4 with: diff --git a/src/emd/cfn/codepipeline/template.yaml b/src/emd/cfn/codepipeline/template.yaml index e63539a5..beea7a70 100644 --- a/src/emd/cfn/codepipeline/template.yaml +++ b/src/emd/cfn/codepipeline/template.yaml @@ -1,5 +1,8 @@ AWSTemplateFormatVersion: '2010-09-09' -Description: CodePipeline for model deployment +Description: | + Easy Model Deployer bootstrap environment. + If you delete this stack, you will not be able to deploy any new models. + Parameters: ArtifactBucketName: Type: String @@ -195,7 +198,7 @@ Resources: phases: pre_build: commands: - - echo Build started on `date` + - echo model build pipeline started on `date` build: commands: - |- @@ -214,21 +217,24 @@ Resources: pip install --upgrade pip pip install -r requirements.txt python pipeline.py --region $region --model_id $model_id --model_tag $ModelTag --framework_type $FrameworkType --service_type $service --backend_type $backend_name --model_s3_bucket $model_s3_bucket --instance_type $instance_type --extra_params "$extra_params" --skip_deploy - cd .. - echo pipeline build completed on `date` + # cd .. + echo model build pipeline completed on `date` post_build: commands: - |- + echo post build started on `date` SERVICE_TYPE=$(echo "$ServiceType" | tr '[:upper:]' '[:lower:]') + if [ -f ../cfn/$ServiceType/post_build.py ]; then + # copy post_build.py to pipeline so that the post_build.py can use the same module + cp ../cfn/$ServiceType/post_build.py $ServiceType_post_build.py + python $ServiceType_post_build.py --region $region --model_id $model_id --model_tag $ModelTag --framework_type $FrameworkType --service_type $service --backend_type $backend_name --model_s3_bucket $model_s3_bucket --instance_type $instance_type --extra_params "$extra_params" + fi + cd .. cp cfn/$ServiceType/template.yaml template.yaml cp pipeline/parameters.json parameters.json - if [ -f cfn/$ServiceType/post_build.py ]; then - cp cfn/$ServiceType/post_build.py post_build.py - python post_build.py --region $region --model_id $model_id --model_tag $ModelTag --framework_type $FrameworkType --service_type $service --backend_type $backend_name --model_s3_bucket $model_s3_bucket --instance_type $instance_type --extra_params "$extra_params" - fi cat parameters.json - echo Build completed on `date` + echo post build completed on `date` artifacts: files: diff --git a/src/emd/cfn/ecs/post_build.py b/src/emd/cfn/ecs/post_build.py index a5a335c6..d73458e8 100644 --- a/src/emd/cfn/ecs/post_build.py +++ b/src/emd/cfn/ecs/post_build.py @@ -3,160 +3,167 @@ import json import os import argparse +from emd.models.utils.serialize_utils import load_extra_params # Post build script for ECS, it will deploy the VPC and ECS cluster. - -CFN_ROOT_PATH = 'cfn' +CFN_ROOT_PATH = "../cfn" WAIT_SECONDS = 10 -# CFN_ROOT_PATH = '../../cfn' -JSON_DOUBLE_QUOTE_REPLACE = '' - -def load_extra_params(string): - string = string.replace(JSON_DOUBLE_QUOTE_REPLACE,'"') - try: - return json.loads(string) - except json.JSONDecodeError: - raise argparse.ArgumentTypeError(f"Invalid dictionary format: {string}") -def dump_extra_params(d:dict): - return json.dumps(d).replace('"', JSON_DOUBLE_QUOTE_REPLACE) -def wait_for_stack_completion(client, stack_id, stack_name): +def wait_for_stack_completion(client, stack_name): while True: - stack_status = client.describe_stacks(StackName=stack_id)['Stacks'][0]['StackStatus'] - if stack_status in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']: + response = client.describe_stacks(StackName=stack_name) + stack_status = response["Stacks"][0]["StackStatus"] + while stack_status.endswith("IN_PROGRESS"): + print( + f"Stack {stack_name} is currently {stack_status}. Waiting for completion..." + ) + time.sleep(WAIT_SECONDS) + response = client.describe_stacks(StackName=stack_name) + stack_status = response["Stacks"][0]["StackStatus"] + + if stack_status in ["CREATE_COMPLETE", "UPDATE_COMPLETE"]: print(f"Stack {stack_name} deployment complete") break - elif stack_status in ['CREATE_IN_PROGRESS', 'UPDATE_IN_PROGRESS']: - print(f"Stack {stack_name} is still being deployed...") - time.sleep(WAIT_SECONDS) else: - raise Exception(f"Stack {stack_name} deployment failed with status {stack_status}") + raise Exception( + f"Post build stage failed. The stack {stack_name} is in an unexpected status: {stack_status}. Please visit the AWS CloudFormation Console to delete the stack." + ) + def get_stack_outputs(client, stack_name): response = client.describe_stacks(StackName=stack_name) - return response['Stacks'][0].get('Outputs', []) + return response["Stacks"][0].get("Outputs", []) + def create_or_update_stack(client, stack_name, template_path, parameters=[]): try: + wait_for_stack_completion(client, stack_name) response = client.describe_stacks(StackName=stack_name) - stack_status = response['Stacks'][0]['StackStatus'] - if stack_status in ['ROLLBACK_COMPLETE', 'ROLLBACK_FAILED', 'DELETE_FAILED']: - print(f"Stack {stack_name} is in {stack_status} state. Deleting the stack to allow for recreation.") - client.delete_stack(StackName=stack_name) - while True: - try: - response = client.describe_stacks(StackName=stack_name) - stack_status = response['Stacks'][0]['StackStatus'] - if stack_status == 'DELETE_IN_PROGRESS': - print(f"Stack {stack_name} is being deleted...") - time.sleep(WAIT_SECONDS) - else: - raise Exception(f"Unexpected status {stack_status} while waiting for stack deletion.") - except client.exceptions.ClientError as e: - if 'does not exist' in str(e): - print(f"Stack {stack_name} successfully deleted.") - break - else: - raise - while stack_status not in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']: - if stack_status in ['CREATE_IN_PROGRESS', 'UPDATE_IN_PROGRESS']: - print(f"Stack {stack_name} is currently {stack_status}. Waiting for it to complete...") - time.sleep(WAIT_SECONDS) - response = client.describe_stacks(StackName=stack_name) - stack_status = response['Stacks'][0]['StackStatus'] - else: - raise Exception(f"Stack {stack_name} is in an unexpected state: {stack_status}") - print(f"Stack {stack_name} already exists with status {stack_status}") + stack_status = response["Stacks"][0]["StackStatus"] + + if stack_status in ["CREATE_COMPLETE", "UPDATE_COMPLETE"]: + print(f"Stack {stack_name} already exists. Proceeding with update.") + with open(template_path, "r") as template_file: + template_body = template_file.read() + + response = client.update_stack( + StackName=stack_name, + TemplateBody=template_body, + Capabilities=["CAPABILITY_NAMED_IAM"], + Parameters=parameters + ) + + print(f"Started update of stack {stack_name}") + wait_for_stack_completion(client, stack_name) + except client.exceptions.ClientError as e: - if 'does not exist' in str(e): + if "does not exist" in str(e): print(f"Stack {stack_name} does not exist. Proceeding with creation.") - with open(template_path, 'r') as template_file: + with open(template_path, "r") as template_file: template_body = template_file.read() response = client.create_stack( StackName=stack_name, TemplateBody=template_body, - Capabilities=['CAPABILITY_NAMED_IAM'], - Parameters=parameters + Capabilities=["CAPABILITY_NAMED_IAM"], + Parameters=parameters, + EnableTerminationProtection=True, ) - stack_id = response['StackId'] + stack_id = response["StackId"] print(f"Started deployment of stack {stack_name} with ID {stack_id}") - wait_for_stack_completion(client, stack_id, stack_name) + wait_for_stack_completion(client, stack_name) else: - raise + raise Exception( + f"Post build stage failed. The stack {stack_name} is in an unexpected status: {stack_status}. Please visit the AWS CloudFormation Console to delete the stack." + ) + def update_parameters_file(parameters_path, updates): - with open(parameters_path, 'r') as file: + with open(parameters_path, "r") as file: data = json.load(file) - data['Parameters'].update(updates) + data["Parameters"].update(updates) - with open(parameters_path, 'w') as file: + with open(parameters_path, "w") as file: json.dump(data, file, indent=4) + def deploy_vpc_template(region): - client = boto3.client('cloudformation', region_name=region) - stack_name = 'EMD-VPC' - template_path = f'{CFN_ROOT_PATH}/vpc/template.yaml' + client = boto3.client("cloudformation", region_name=region) + stack_name = "EMD-VPC" + template_path = f"{CFN_ROOT_PATH}/vpc/template.yaml" create_or_update_stack(client, stack_name, template_path) outputs = get_stack_outputs(client, stack_name) vpc_id = None subnets = None for output in outputs: - if output['OutputKey'] == 'VPCID': - vpc_id = output['OutputValue'] - elif output['OutputKey'] == 'Subnets': - subnets = output['OutputValue'] - update_parameters_file('parameters.json', {'VPCID': vpc_id, 'Subnets': subnets}) + if output["OutputKey"] == "VPCID": + vpc_id = output["OutputValue"] + elif output["OutputKey"] == "Subnets": + subnets = output["OutputValue"] + update_parameters_file("parameters.json", {"VPCID": vpc_id, "Subnets": subnets}) return vpc_id, subnets def deploy_ecs_cluster_template(region, vpc_id, subnets): - client = boto3.client('cloudformation', region_name=region) - stack_name = 'EMD-ECS-Cluster' - template_path = f'{CFN_ROOT_PATH}/ecs/cluster.yaml' - create_or_update_stack(client, stack_name, template_path, [ - { - 'ParameterKey': 'VPCID', - 'ParameterValue': vpc_id, - }, - { - 'ParameterKey': 'Subnets', - 'ParameterValue': subnets, - }, - ]) + client = boto3.client("cloudformation", region_name=region) + stack_name = "EMD-ECS-Cluster" + template_path = f"{CFN_ROOT_PATH}/ecs/cluster.yaml" + create_or_update_stack( + client, + stack_name, + template_path, + [ + { + "ParameterKey": "VPCID", + "ParameterValue": vpc_id, + }, + { + "ParameterKey": "Subnets", + "ParameterValue": subnets, + }, + ], + ) outputs = get_stack_outputs(client, stack_name) for output in outputs: - update_parameters_file('parameters.json', {output['OutputKey']: output['OutputValue']}) + update_parameters_file( + "parameters.json", {output["OutputKey"]: output["OutputValue"]} + ) def post_build(): parser = argparse.ArgumentParser() - parser.add_argument('--region', type=str, required=False) - parser.add_argument('--model_id', type=str, required=False) - parser.add_argument('--model_tag', type=str, required=False) - parser.add_argument('--framework_type', type=str, required=False) - parser.add_argument('--service_type', type=str, required=False) - parser.add_argument('--backend_type', type=str, required=False) - parser.add_argument('--model_s3_bucket', type=str, required=False) - parser.add_argument('--instance_type', type=str, required=False) - parser.add_argument('--extra_params', type=load_extra_params, required=False, default=os.environ.get("extra_params","{}")) + parser.add_argument("--region", type=str, required=False) + parser.add_argument("--model_id", type=str, required=False) + parser.add_argument("--model_tag", type=str, required=False) + parser.add_argument("--framework_type", type=str, required=False) + parser.add_argument("--service_type", type=str, required=False) + parser.add_argument("--backend_type", type=str, required=False) + parser.add_argument("--model_s3_bucket", type=str, required=False) + parser.add_argument("--instance_type", type=str, required=False) + parser.add_argument( + "--extra_params", + type=load_extra_params, + required=False, + default=os.environ.get("extra_params", "{}"), + ) args = parser.parse_args() - service_params = args.extra_params.get('service_params',{}) + service_params = args.extra_params.get("service_params", {}) - if 'vpc_id' not in service_params: + if "vpc_id" not in service_params: vpc_id, subnets = deploy_vpc_template(args.region) else: - vpc_id = service_params.get('vpc_id') - subnets = service_params.get('subnet_ids') - update_parameters_file('parameters.json', {'VPCID': vpc_id, 'Subnets': subnets}) + vpc_id = service_params.get("vpc_id") + subnets = service_params.get("subnet_ids") + update_parameters_file("parameters.json", {"VPCID": vpc_id, "Subnets": subnets}) deploy_ecs_cluster_template(args.region, vpc_id, subnets) + if __name__ == "__main__": post_build() diff --git a/src/emd/commands/deploy.py b/src/emd/commands/deploy.py index 71095b80..14fbb734 100644 --- a/src/emd/commands/deploy.py +++ b/src/emd/commands/deploy.py @@ -521,19 +521,19 @@ def deploy( raise typer.Exit(0) # log the deployment parameters - engine_info = model.find_current_engine(engine_type) - framework_info = model.find_current_framework(framework_type) - - engine_info_str = json.dumps(engine_info,indent=2,ensure_ascii=False) - framework_info_str = json.dumps(framework_info, indent=2, ensure_ascii=False) - extra_params_info = json.dumps(extra_params, indent=2, ensure_ascii=False) - console.print(f"[bold blue]Deployment parameters:[/bold blue]") - console.print(f"[bold blue]model_id: {model_id},model_tag: {model_tag}[/bold blue]") - console.print(f"[bold blue]instance_type: {instance_type}[/bold blue]") - console.print(f"[bold blue]service_type: {service_type}[/bold blue]") - console.print(f"[bold blue]engine info:\n {engine_info_str}[/bold blue]") - console.print(f"[bold blue]framework info:\n {framework_info_str}[/bold blue]") - console.print(f"[bold blue]extra_params:\n {extra_params_info}[/bold blue]") + # engine_info = model.find_current_engine(engine_type) + # framework_info = model.find_current_framework(framework_type) + + # engine_info_str = json.dumps(engine_info,indent=2,ensure_ascii=False) + # framework_info_str = json.dumps(framework_info, indent=2, ensure_ascii=False) + # extra_params_info = json.dumps(extra_params, indent=2, ensure_ascii=False) + # console.print(f"[bold blue]Deployment parameters:[/bold blue]") + # console.print(f"[bold blue]model_id: {model_id},model_tag: {model_tag}[/bold blue]") + # console.print(f"[bold blue]instance_type: {instance_type}[/bold blue]") + # console.print(f"[bold blue]service_type: {service_type}[/bold blue]") + # console.print(f"[bold blue]engine info:\n {engine_info_str}[/bold blue]") + # console.print(f"[bold blue]framework info:\n {framework_info_str}[/bold blue]") + # console.print(f"[bold blue]extra_params:\n {extra_params_info}[/bold blue]") # Start pipeline execution if service_type != ServiceType.LOCAL: response = sdk_deploy( diff --git a/src/emd/commands/destroy.py b/src/emd/commands/destroy.py index 7ef47793..2e7fa040 100644 --- a/src/emd/commands/destroy.py +++ b/src/emd/commands/destroy.py @@ -2,7 +2,7 @@ from rich.console import Console from rich.panel import Panel -from emd.constants import MODEL_DEFAULT_TAG, VERSION_MODIFY +from emd.constants import MODEL_DEFAULT_TAG from typing_extensions import Annotated from emd.sdk.destroy import destroy as sdk_destroy from emd.utils.decorators import catch_aws_credential_errors,check_emd_env_exist,load_aws_profile @@ -25,11 +25,7 @@ def destroy( ], model_tag: Annotated[ str, typer.Argument(help="Model tag") - ] = MODEL_DEFAULT_TAG, - model_deploy_version: Annotated[ - str, typer.Option("-v", "--deploy-version", help="The version of the model deployment to destroy"), - ] = VERSION_MODIFY + ] = MODEL_DEFAULT_TAG ): - model_deploy_version = convert_version_name_to_stack_name(model_deploy_version) # console.print("[bold blue]Checking AWS environment...[/bold blue]") - sdk_destroy(model_id,model_tag=model_tag,waiting_until_complete=True, model_deploy_version=model_deploy_version) + sdk_destroy(model_id,model_tag=model_tag,waiting_until_complete=True) diff --git a/src/emd/commands/status.py b/src/emd/commands/status.py index 5a96a898..522b8e2f 100644 --- a/src/emd/commands/status.py +++ b/src/emd/commands/status.py @@ -43,14 +43,10 @@ def status( "model_id":d['model_id'], "model_tag":d['model_tag'], "status": f"{d['status']} ({d['stage_name']})", - "region":d['region'], "service_type":d['service_type'], "instance_type":d['instance_type'], - "engine_type":d['engine_type'], - "framework_type":d['framework_type'], "create_time":d['create_time'], "outputs":d['outputs'], - "deploy_version":d['deploy_version'] }) for d in completed: @@ -58,28 +54,19 @@ def status( "model_id":d['model_id'], "model_tag":d['model_tag'], "status": d['stack_status'], - "region":d['region'], "service_type":d['service_type'], "instance_type":d['instance_type'], - "engine_type":d['engine_type'], - "framework_type":d['framework_type'], "create_time":d['create_time'], "outputs":d['outputs'], - "deploy_version":d['deploy_version'] }) account_id = get_account_id() table = Table(show_lines=True, expand=True) table.add_column("ModelId", justify="left",overflow='fold') table.add_column("ModelTag", justify="left",overflow='fold') - table.add_column("DeployVersion", justify="left",overflow='fold') table.add_column("Status", justify="left",overflow='fold') - table.add_column("Region", justify="left",overflow='fold') - table.add_column("Account", justify="left",overflow='fold') - table.add_column("Service", justify="left",overflow='fold') + table.add_column("Service", justify="left",overflow='fold',max_width=19) table.add_column("Instance", justify="left",overflow='fold') - table.add_column("Engine", justify="left",overflow='fold') - table.add_column("Framework", justify="left",overflow='fold') table.add_column("CreateTime", justify="left",overflow='fold') table.add_column("Outputs", justify="left",overflow='fold') @@ -87,16 +74,11 @@ def status( for d in data: table.add_row( d['model_id'], - "" if d['model_tag'] == MODEL_DEFAULT_TAG else d['model_tag'], - d['deploy_version'], + d['model_tag'], d['status'], - d['region'], - str(account_id), d['service_type'], d['instance_type'], - d['engine_type'], - d['framework_type'], - d['create_time'], + d['create_time'].replace(" ", "\n"), d['outputs'] ) # table.add_row([d['model_id'], d['status']]) diff --git a/src/emd/constants.py b/src/emd/constants.py index 1051f33c..a3f949b1 100644 --- a/src/emd/constants.py +++ b/src/emd/constants.py @@ -1,6 +1,5 @@ from .revision import VERSION, convert_version_name_to_stack_name -VERSION_MODIFY = convert_version_name_to_stack_name(VERSION) -ENV_STACK_NAME = f'EMD-Env-{VERSION_MODIFY}' +ENV_STACK_NAME = f'EMD-Env' MODEL_STACK_NAME_PREFIX = f"EMD-Model" ENV_BUCKET_NAME_PREFIX = "emd-env-artifactbucket" CODEPIPELINE_NAME = f"{ENV_STACK_NAME}-Pipeline" diff --git a/src/emd/models/model.py b/src/emd/models/model.py index 15b1b27e..21fa23e6 100644 --- a/src/emd/models/model.py +++ b/src/emd/models/model.py @@ -17,8 +17,7 @@ from .utils.text_utilities import normalize from emd.constants import ( MODEL_STACK_NAME_PREFIX, - MODEL_DEFAULT_TAG, - VERSION_MODIFY + MODEL_DEFAULT_TAG ) from emd.revision import convert_stack_name_to_version_name @@ -355,11 +354,11 @@ def normalize_model_id(cls,model_id): return normalize(model_id).lower() @classmethod - def get_model_stack_name_prefix(cls,model_id,model_tag=MODEL_DEFAULT_TAG, model_deploy_version=VERSION_MODIFY): + def get_model_stack_name_prefix(cls,model_id,model_tag=MODEL_DEFAULT_TAG): model_id_with_tag = model_id if model_tag and model_tag != MODEL_DEFAULT_TAG: model_id_with_tag = f"{model_id_with_tag}-{model_tag}" - return f"{MODEL_STACK_NAME_PREFIX}-{model_deploy_version}-{cls.normalize_model_id(model_id_with_tag)}" + return f"{MODEL_STACK_NAME_PREFIX}-{cls.normalize_model_id(model_id_with_tag)}" @classmethod def get_deploy_version_from_stack_name(cls,stack_name): diff --git a/src/emd/sdk/bootstrap.py b/src/emd/sdk/bootstrap.py index 08a7bba4..849ba4af 100644 --- a/src/emd/sdk/bootstrap.py +++ b/src/emd/sdk/bootstrap.py @@ -107,7 +107,7 @@ def create_env_stack( {'ParameterKey': 'CodePipelineRoleName', 'ParameterValue': CODEPIPELINE_ROLE_NAME_TEMPLATE.format(region=region)}, {'ParameterKey': 'CloudFormationRoleName', 'ParameterValue': CLOUDFORMATION_ROLE_NAME_TEMPLATE.format(region=region)} ] - logger.info(f"boostrap stack params: {json.dumps(stack_params,ensure_ascii=False,indent=2)}") + # logger.info(f"boostrap stack params: {json.dumps(stack_params,ensure_ascii=False,indent=2)}") def create_stack(): response = cloudformation.create_stack( StackName=stack_name, diff --git a/src/emd/sdk/deploy.py b/src/emd/sdk/deploy.py index 55277712..d1af66ac 100644 --- a/src/emd/sdk/deploy.py +++ b/src/emd/sdk/deploy.py @@ -64,7 +64,7 @@ def prepare_deploy( # check if model_id is inprogress in pipeline execution if check_stack_exists(model_stack_name): raise RuntimeError( - f"model: {model_id}, tag: {model_tag} is exists, please delete model first" + f"A model with the ID: {model_id} and tag: {model_tag} already exists. Kindly use a different tag to proceed." ) client = boto3.client("codepipeline") @@ -195,9 +195,9 @@ def deploy( {"name": "FrameworkType", "value": framework_type}, {"name": "Region", "value": region}, ] - logger.info( - f"start pipeline execution.\nvariables:\n{json.dumps(variables,ensure_ascii=False,indent=2)}" - ) + # logger.info( + # f"start pipeline execution.\nvariables:\n{json.dumps(variables,ensure_ascii=False,indent=2)}" + # ) start_deploy_time = time.time() diff --git a/src/emd/sdk/destroy.py b/src/emd/sdk/destroy.py index 52505703..54d07de4 100644 --- a/src/emd/sdk/destroy.py +++ b/src/emd/sdk/destroy.py @@ -16,7 +16,6 @@ get_stack_info ) from emd.models.utils.constants import ServiceType -from emd.constants import VERSION_MODIFY from emd.models import Model logger = get_logger(__name__) @@ -64,9 +63,9 @@ def destroy_ecs(model_id,model_tag,stack_name): cf_client.delete_stack(StackName=stack_name) logger.warning(f"model: {model_id}, model_tag: {model_tag} is a ECS service, if the destruction fails, please destroy it manually using the guide at https://amzn-chn.feishu.cn/docx/YjTadv82Po7IBXxmS1RcmMGHndg") -def destroy(model_id:str,model_tag=MODEL_DEFAULT_TAG,waiting_until_complete=True, model_deploy_version=VERSION_MODIFY): +def destroy(model_id:str,model_tag=MODEL_DEFAULT_TAG,waiting_until_complete=True): check_env_stack_exist_and_complete() - stack_name = Model.get_model_stack_name_prefix(model_id,model_tag=model_tag,model_deploy_version=model_deploy_version) + stack_name = Model.get_model_stack_name_prefix(model_id,model_tag=model_tag) if not check_stack_exists(stack_name): stop_pipeline_execution(model_id,model_tag,waiting_until_complete=waiting_until_complete) return diff --git a/src/emd/utils/aws_service_utils.py b/src/emd/utils/aws_service_utils.py index 66d1ea16..7e41dfdc 100644 --- a/src/emd/utils/aws_service_utils.py +++ b/src/emd/utils/aws_service_utils.py @@ -306,8 +306,6 @@ def get_model_stacks(): for output in status_info.get("Outputs", []) } outputs_d["outputs"] = str(stack_output_d) - outputs_d["deploy_version"] = outputs_d.get("DeployVersion", "") or \ - Model.get_deploy_version_from_stack_name(stack_name) if outputs_d["stack_status"] == "ROLLBACK_COMPLETE": # find failed event