Skip to content

Commit b663877

Browse files
committed
Merge branch 'feature/restore-pipeline-infra' into 'develop'
feat: restore pipeline infrastructure with genaiic-sdlc naming See merge request genaiic-reusable-assets/engagement-artifacts/genaiic-idp-accelerator!418
2 parents 38a9461 + cfa1fc6 commit b663877

File tree

9 files changed

+153
-153
lines changed

9 files changed

+153
-153
lines changed

.gitlab-ci.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ integration_tests:
125125
- unzip awscliv2.zip
126126
- ./aws/install
127127
# Install boto3 for Python script
128-
- pip install boto3
128+
- pip install boto3 rich
129129

130130
script:
131131
- aws --version
@@ -134,7 +134,7 @@ integration_tests:
134134
# Set environment variables for Python script
135135
- export IDP_ACCOUNT_ID=${IDP_ACCOUNT_ID:-020432867916}
136136
- export AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-us-east-1}
137-
- export IDP_PIPELINE_NAME=idp-sdlc-deploy-pipeline
137+
- export IDP_PIPELINE_NAME=genaiic-sdlc-deploy-pipeline
138138

139139
# Run integration test deployment
140140
- python3 scripts/integration_test_deployment.py
@@ -147,7 +147,7 @@ integration_tests:
147147
echo "Pipeline Execution: $EXECUTION_ID"
148148
149149
# Get CodeBuild ID from the pipeline execution
150-
BUILD_ID=$(aws codepipeline list-action-executions --pipeline-name ${IDP_PIPELINE_NAME:-idp-sdlc-deploy-pipeline} --filter pipelineExecutionId=$EXECUTION_ID --query 'actionExecutionDetails[?actionName==`BuildAction`].output.executionResult.externalExecutionId' --output text 2>/dev/null || echo "")
150+
BUILD_ID=$(aws codepipeline list-action-executions --pipeline-name ${IDP_PIPELINE_NAME:-genaiic-sdlc-deploy-pipeline} --filter pipelineExecutionId=$EXECUTION_ID --query 'actionExecutionDetails[?actionName==`BuildAction`].output.executionResult.externalExecutionId' --output text 2>/dev/null || echo "")
151151
152152
if [ "$BUILD_ID" != "" ] && [ "$BUILD_ID" != "None" ]; then
153153
echo "CodeBuild ID: $BUILD_ID"
@@ -175,7 +175,6 @@ integration_tests:
175175
else
176176
echo "⚠️ No deployment summary found - check codebuild_logs.txt for details"
177177
fi
178-
fi
179178
else
180179
echo "Could not find CodeBuild execution"
181180
fi

scripts/cleanup-logs.py

Lines changed: 0 additions & 53 deletions
This file was deleted.

scripts/codebuild_deployment.py

Lines changed: 4 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def publish_templates():
7979
region = get_env_var("AWS_DEFAULT_REGION", "us-east-1")
8080

8181
# Generate bucket name and prefix
82-
bucket_basename = f"idp-sdlc-sourcecode-{account_id}-{region}"
82+
bucket_basename = f"genaiic-sdlc-sourcecode-{account_id}-{region}"
8383
prefix = f"codebuild-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
8484

8585
# Run publish.sh
@@ -450,37 +450,16 @@ def cleanup_stack(stack_name, pattern_name):
450450

451451
# Delete the stack and wait for completion
452452
print(f"[{pattern_name}] Attempting stack deletion...")
453-
run_command(f"idp-cli delete --stack-name {stack_name} --force --empty-buckets --wait", check=False)
453+
run_command(f"idp-cli delete --stack-name {stack_name} --force --empty-buckets --force-delete-all --wait", check=False)
454454

455-
# Always clean up orphaned resources after deletion attempt
456-
print(f"[{pattern_name}] Cleaning up orphaned resources...")
455+
# Clean up additional log groups that might not be caught by idp-cli
456+
print(f"[{pattern_name}] Cleaning up additional log groups...")
457457

458458
# Set AWS retry configuration to handle throttling
459459
os.environ['AWS_MAX_ATTEMPTS'] = '10'
460460
os.environ['AWS_RETRY_MODE'] = 'adaptive'
461-
462-
# ECR repositories
463-
print(f"[{pattern_name}] Cleaning up ECR repositories...")
464-
result = run_command(f"aws ecr describe-repositories --query 'repositories[?contains(repositoryName, `{stack_name}`)].repositoryName' --output text", check=False)
465-
if result.stdout.strip():
466-
repo_names = [name for name in result.stdout.strip().split('\t') if name]
467-
for repo_name in repo_names:
468-
print(f"[{pattern_name}] Deleting ECR repository: {repo_name}")
469-
run_command(f"aws ecr delete-repository --repository-name '{repo_name}' --force", check=False)
470-
471-
# S3 buckets (empty and delete orphaned buckets)
472-
print(f"[{pattern_name}] Cleaning up S3 buckets...")
473-
result = run_command(f"aws s3api list-buckets --query 'Buckets[?contains(Name, `{stack_name}`)].Name' --output text", check=False)
474-
if result.stdout.strip():
475-
bucket_names = [name for name in result.stdout.strip().split('\t') if name]
476-
for bucket_name in bucket_names:
477-
print(f"[{pattern_name}] Deleting bucket: {bucket_name}")
478-
# Try versioned bucket deletion first, fallback to regular deletion
479-
if not delete_versioned_bucket(bucket_name):
480-
run_command(f"aws s3 rb s3://{bucket_name} --force", check=False)
481461

482462
# CloudWatch log groups
483-
print(f"[{pattern_name}] Cleaning up CloudWatch log groups...")
484463
result = run_command(f"aws logs describe-log-groups --query 'logGroups[?contains(logGroupName, `{stack_name}`)].logGroupName' --output json", check=False)
485464
if result.stdout.strip():
486465
try:
@@ -494,7 +473,6 @@ def cleanup_stack(stack_name, pattern_name):
494473
print(f"[{pattern_name}] Failed to parse log group names")
495474

496475
# AppSync logs
497-
print(f"[{pattern_name}] Cleaning up AppSync logs...")
498476
result = run_command(f"aws appsync list-graphql-apis --query 'graphqlApis[?contains(name, `{stack_name}`)].apiId' --output json", check=False)
499477
if result.stdout.strip():
500478
try:
@@ -508,7 +486,6 @@ def cleanup_stack(stack_name, pattern_name):
508486
print(f"[{pattern_name}] Failed to parse AppSync API IDs")
509487

510488
# Clean up CloudWatch Logs Resource Policy only if stack-specific
511-
print(f"[{pattern_name}] Checking CloudWatch resource policies...")
512489
result = run_command(f"aws logs describe-resource-policies --query 'resourcePolicies[?contains(policyName, `{stack_name}`)].policyName' --output text", check=False)
513490
if result.stdout.strip():
514491
policy_names = [name for name in result.stdout.strip().split('\t') if name]

scripts/integration_test_deployment.py

Lines changed: 90 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@
1111
import time
1212

1313
import boto3
14+
from rich.console import Console
15+
from rich.progress import Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
1416

1517

1618
def run_command(cmd, check=True):
@@ -104,76 +106,108 @@ def upload_to_s3(bucket_name):
104106

105107
def find_pipeline_execution_by_version(pipeline_name, version_id, max_wait=300):
106108
"""Find pipeline execution that corresponds to specific S3 version ID"""
107-
print(f"Finding pipeline execution for version: {version_id}")
109+
console = Console()
110+
console.print(f"[cyan]Finding pipeline execution for version:[/cyan] {version_id}")
108111

109112
codepipeline = boto3.client("codepipeline")
110113
start_time = time.time()
111114

112-
while time.time() - start_time < max_wait:
113-
try:
114-
response = codepipeline.list_pipeline_executions(
115-
pipelineName=pipeline_name, maxResults=10
116-
)
117-
118-
for execution in response["pipelineExecutionSummaries"]:
119-
execution_id = execution["pipelineExecutionId"]
120-
121-
# Get execution details to check source version
122-
details = codepipeline.get_pipeline_execution(
123-
pipelineName=pipeline_name,
124-
pipelineExecutionId=execution_id
115+
with Progress(
116+
SpinnerColumn(),
117+
TextColumn("[progress.description]{task.description}"),
118+
TimeElapsedColumn(),
119+
console=console,
120+
transient=False,
121+
) as progress:
122+
123+
task = progress.add_task("[yellow]Searching for pipeline execution...", total=None)
124+
125+
while time.time() - start_time < max_wait:
126+
try:
127+
response = codepipeline.list_pipeline_executions(
128+
pipelineName=pipeline_name, maxResults=10
125129
)
126130

127-
# Check if this execution matches our version ID
128-
for artifact in details["pipelineExecution"].get("artifactRevisions", []):
129-
if artifact.get("revisionId") == version_id:
130-
print(f"✅ Found matching execution: {execution_id}")
131-
return execution_id
131+
for execution in response["pipelineExecutionSummaries"]:
132+
execution_id = execution["pipelineExecutionId"]
133+
134+
# Get execution details to check source version
135+
details = codepipeline.get_pipeline_execution(
136+
pipelineName=pipeline_name,
137+
pipelineExecutionId=execution_id
138+
)
139+
140+
# Check if this execution matches our version ID
141+
for artifact in details["pipelineExecution"].get("artifactRevisions", []):
142+
if artifact.get("revisionId") == version_id:
143+
progress.update(task, description="[green]✅ Found matching execution!")
144+
console.print(f"[green]✅ Found matching execution:[/green] {execution_id}")
145+
return execution_id
146+
147+
elapsed = int(time.time() - start_time)
148+
progress.update(task, description=f"[yellow]Waiting for pipeline trigger ({elapsed}s)...")
132149

133-
except Exception as e:
134-
print(f"Error finding execution: {e}")
135-
136-
time.sleep(10)
137-
138-
print(f"❌ Could not find pipeline execution for version {version_id}")
150+
except Exception as e:
151+
progress.update(task, description=f"[red]Error: {str(e)[:50]}...")
152+
console.print(f"[red]Error finding execution: {e}[/red]")
153+
154+
time.sleep(10)
155+
156+
progress.update(task, description="[red]❌ No matching execution found")
157+
console.print(f"[red]❌ Could not find pipeline execution for version {version_id}[/red]")
139158
return None
140159

141160

142161
def monitor_pipeline_execution(pipeline_name, execution_id, max_wait=7200):
143-
"""Monitor specific pipeline execution until completion"""
144-
print(f"Monitoring execution: {execution_id}")
162+
"""Monitor specific pipeline execution until completion with live progress"""
163+
console = Console()
164+
console.print(f"[cyan]Monitoring pipeline execution:[/cyan] {execution_id}")
145165

146166
codepipeline = boto3.client("codepipeline")
147-
wait_time = 0
148167
poll_interval = 30
149168

150-
while wait_time < max_wait:
151-
try:
152-
response = codepipeline.get_pipeline_execution(
153-
pipelineName=pipeline_name,
154-
pipelineExecutionId=execution_id
155-
)
156-
157-
status = response["pipelineExecution"]["status"]
158-
print(f"Pipeline execution {execution_id}: {status}")
159-
160-
if status == "Succeeded":
161-
print("✅ Pipeline completed successfully!")
162-
return True
163-
elif status in ["Failed", "Cancelled", "Superseded"]:
164-
print(f"❌ Pipeline failed with status: {status}")
165-
return False
166-
elif status == "InProgress":
167-
print(f"⏳ Pipeline still running... ({wait_time}s elapsed)")
169+
with Progress(
170+
SpinnerColumn(),
171+
TextColumn("[progress.description]{task.description}"),
172+
TimeElapsedColumn(),
173+
console=console,
174+
transient=False,
175+
) as progress:
176+
177+
task = progress.add_task("[yellow]Pipeline executing...", total=None)
178+
179+
wait_time = 0
180+
while wait_time < max_wait:
181+
try:
182+
response = codepipeline.get_pipeline_execution(
183+
pipelineName=pipeline_name,
184+
pipelineExecutionId=execution_id
185+
)
168186

169-
except Exception as e:
170-
print(f"Error checking pipeline status: {e}")
171-
172-
time.sleep(poll_interval)
173-
wait_time += poll_interval
174-
175-
print(f"❌ Pipeline monitoring timed out after {max_wait} seconds")
176-
return False
187+
status = response["pipelineExecution"]["status"]
188+
elapsed_mins = wait_time // 60
189+
190+
if status == "Succeeded":
191+
progress.update(task, description="[green]✅ Pipeline completed successfully!")
192+
console.print("[green]✅ Pipeline completed successfully![/green]")
193+
return True
194+
elif status in ["Failed", "Cancelled", "Superseded"]:
195+
progress.update(task, description=f"[red]❌ Pipeline failed: {status}")
196+
console.print(f"[red]❌ Pipeline failed with status: {status}[/red]")
197+
return False
198+
elif status == "InProgress":
199+
progress.update(task, description=f"[yellow]⏳ Pipeline running ({elapsed_mins}m elapsed)...")
200+
201+
except Exception as e:
202+
progress.update(task, description=f"[red]Error: {str(e)[:50]}...")
203+
console.print(f"[red]Error checking pipeline status: {e}[/red]")
204+
205+
time.sleep(poll_interval)
206+
wait_time += poll_interval
207+
208+
progress.update(task, description=f"[red]❌ Timeout after {max_wait//60} minutes")
209+
console.print(f"[red]❌ Pipeline monitoring timed out after {max_wait} seconds[/red]")
210+
return False
177211

178212

179213
def monitor_pipeline(pipeline_name, version_id, max_wait=7200):
@@ -200,8 +234,8 @@ def main():
200234
# Get configuration from environment
201235
account_id = get_env_var("IDP_ACCOUNT_ID", "020432867916")
202236
region = get_env_var("AWS_DEFAULT_REGION", "us-east-1")
203-
bucket_name = f"idp-sdlc-sourcecode-{account_id}-{region}"
204-
pipeline_name = get_env_var("IDP_PIPELINE_NAME", "idp-sdlc-deploy-pipeline")
237+
bucket_name = f"genaiic-sdlc-sourcecode-{account_id}-{region}"
238+
pipeline_name = get_env_var("IDP_PIPELINE_NAME", "genaiic-sdlc-deploy-pipeline")
205239

206240
print(f"Account ID: {account_id}")
207241
print(f"Region: {region}")

scripts/sdlc/cfn/README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@ SPDX-License-Identifier: MIT-0
1212
* Install the `credential-vendor.yml` cloudformation template.
1313
* Enter the gitlab group name (e.g. `genaiic-reusable-assets/engagement-artifacts`)
1414
* Enter the gitlap project name (e.g. `genaiic-idp-accelerator`)
15-
* Enter the bucket name created in the last step (e.g. `idp-sdlc-source-code-YOUR_AWS_ACCOUNT-YOUR_REGION`)
15+
* Enter the bucket name created in the last step (e.g. `genaiic-sdlc-source-code-YOUR_AWS_ACCOUNT-YOUR_REGION`)
1616
* Customize the environment variables in your CodePipeline/CodeBuild configuration.
1717
* The deployment will use the new `scripts/codebuild_deployment.py` script automatically.
1818
* This will ensure that an archive is there to install, when
1919
* Optional: Install the `sdlc-iam-role.yml` for least privilege sdlc operation (coming soon!)
2020
* Install the `codepipeline-s3.yml` cloudformation template.
21-
* Optional: add the iam role created in the last step (e.g. `arn:aws:iam::YOUR_AWS_ACCOUNT:role/idp-sdlc-role`)
22-
* Be sure to replace the `idp-sdlc-source-code-YOUR_AWS_ACCOUNT-YOUR_REGION` with the name of the sourcecode bucket you created.
21+
* Optional: add the iam role created in the last step (e.g. `arn:aws:iam::YOUR_AWS_ACCOUNT:role/genaiic-sdlc-role`)
22+
* Be sure to replace the `genaiic-sdlc-source-code-YOUR_AWS_ACCOUNT-YOUR_REGION` with the name of the sourcecode bucket you created.

0 commit comments

Comments
 (0)