Skip to content

Commit 93e619e

Browse files
author
Taniya Mathur
committed
Add CloudWatch Events trigger and fix GitLab log timing
- Replace S3 polling with CloudWatch Events for immediate pipeline triggers - Add 30-second delay in GitLab CI for complete log retrieval - Fix CodeBuild log stream parsing in Bedrock summary generation
1 parent db40c76 commit 93e619e

File tree

3 files changed

+89
-10
lines changed

3 files changed

+89
-10
lines changed

.gitlab-ci.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,9 @@ integration_tests:
156156
echo "Log Stream: $LOG_STREAM_NAME"
157157
echo ""
158158
echo "=== CODEBUILD LOGS ==="
159+
# Wait for CloudWatch Logs to flush all events
160+
echo "Waiting 30 seconds for logs to flush..."
161+
sleep 30
159162
aws logs get-log-events --log-group-name "/aws/codebuild/app-sdlc" --log-stream-name "$LOG_STREAM_NAME" --start-from-head --query 'events[].message' --output text 2>/dev/null || echo "Could not retrieve CodeBuild logs"
160163
else
161164
echo "Could not find CodeBuild execution"

scripts/codebuild_deployment.py

Lines changed: 37 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def get_codebuild_logs():
245245

246246
# Extract log group and stream from build ID
247247
log_group = f"/aws/codebuild/{build_id.split(':')[0]}"
248-
log_stream = build_id.split('/')[-1]
248+
log_stream = build_id.split(':')[-1]
249249

250250
# Get logs from CloudWatch
251251
logs_client = boto3.client('logs')
@@ -282,6 +282,10 @@ def generate_deployment_summary(deployment_results, stack_prefix, template_url):
282282
# Get CodeBuild logs
283283
deployment_logs = get_codebuild_logs()
284284

285+
# Check if log retrieval failed
286+
if deployment_logs.startswith("Failed to retrieve CodeBuild logs"):
287+
raise Exception("CodeBuild logs unavailable")
288+
285289
# Initialize Bedrock client
286290
bedrock = boto3.client('bedrock-runtime')
287291

@@ -372,6 +376,31 @@ def generate_deployment_summary(deployment_results, stack_prefix, template_url):
372376
return manual_summary
373377

374378

379+
def delete_versioned_bucket(bucket_name):
380+
"""Delete all versions and delete markers from a versioned S3 bucket, then delete the bucket."""
381+
import boto3
382+
try:
383+
s3 = boto3.client('s3')
384+
paginator = s3.get_paginator('list_object_versions')
385+
386+
for page in paginator.paginate(Bucket=bucket_name):
387+
# Delete object versions
388+
if 'Versions' in page:
389+
for version in page['Versions']:
390+
s3.delete_object(Bucket=bucket_name, Key=version['Key'], VersionId=version['VersionId'])
391+
392+
# Delete delete markers
393+
if 'DeleteMarkers' in page:
394+
for marker in page['DeleteMarkers']:
395+
s3.delete_object(Bucket=bucket_name, Key=marker['Key'], VersionId=marker['VersionId'])
396+
397+
# Delete the bucket
398+
s3.delete_bucket(Bucket=bucket_name)
399+
return True
400+
except Exception:
401+
return False
402+
403+
375404
def cleanup_stack(stack_name, pattern_name):
376405
print(f"[{pattern_name}] Cleaning up: {stack_name}")
377406
try:
@@ -394,13 +423,12 @@ def cleanup_stack(stack_name, pattern_name):
394423

395424
# ECR repositories
396425
print(f"[{pattern_name}] Cleaning up ECR repositories...")
397-
stack_name_lower = stack_name.lower()
398-
result = run_command(f"aws ecr describe-repositories --query 'repositories[?contains(repositoryName, `{stack_name_lower}`)].repositoryName' --output text", check=False)
426+
result = run_command(f"aws ecr describe-repositories --query 'repositories[?contains(repositoryName, `{stack_name}`)].repositoryName' --output text", check=False)
399427
if result.stdout.strip():
400428
repo_names = [name for name in result.stdout.strip().split('\t') if name]
401429
for repo_name in repo_names:
402430
print(f"[{pattern_name}] Deleting ECR repository: {repo_name}")
403-
run_command(f"aws ecr delete-repository --repository-name {repo_name} --force", check=False)
431+
run_command(f"aws ecr delete-repository --repository-name '{repo_name}' --force", check=False)
404432

405433
# S3 buckets (empty and delete orphaned buckets)
406434
print(f"[{pattern_name}] Cleaning up S3 buckets...")
@@ -409,8 +437,9 @@ def cleanup_stack(stack_name, pattern_name):
409437
bucket_names = [name for name in result.stdout.strip().split('\t') if name]
410438
for bucket_name in bucket_names:
411439
print(f"[{pattern_name}] Deleting bucket: {bucket_name}")
412-
run_command(f"aws s3 rm s3://{bucket_name} --recursive", check=False)
413-
run_command(f"aws s3api delete-bucket --bucket {bucket_name}", check=False)
440+
# Try versioned bucket deletion first, fallback to regular deletion
441+
if not delete_versioned_bucket(bucket_name):
442+
run_command(f"aws s3 rb s3://{bucket_name} --force", check=False)
414443

415444
# CloudWatch log groups
416445
print(f"[{pattern_name}] Cleaning up CloudWatch log groups...")
@@ -419,7 +448,7 @@ def cleanup_stack(stack_name, pattern_name):
419448
log_group_names = [name for name in result.stdout.strip().split('\t') if name]
420449
for log_group_name in log_group_names:
421450
print(f"[{pattern_name}] Deleting log group: {log_group_name}")
422-
run_command(f"aws logs delete-log-group --log-group-name {log_group_name}", check=False)
451+
run_command(f"aws logs delete-log-group --log-group-name '{log_group_name}'", check=False)
423452

424453
# AppSync logs
425454
print(f"[{pattern_name}] Cleaning up AppSync logs...")
@@ -437,7 +466,7 @@ def cleanup_stack(stack_name, pattern_name):
437466
policy_names = [name for name in result.stdout.strip().split('\t') if name]
438467
for policy_name in policy_names:
439468
print(f"[{pattern_name}] Deleting resource policy: {policy_name}")
440-
run_command(f"aws logs delete-resource-policy --policy-name {policy_name}", check=False)
469+
run_command(f"aws logs delete-resource-policy --policy-name '{policy_name}'", check=False)
441470

442471
print(f"[{pattern_name}] ✅ Cleanup completed")
443472
except Exception as e:

scripts/sdlc/cfn/codepipeline-s3.yml

Lines changed: 49 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,7 @@ Resources:
173173
Name: !Ref PipelineName
174174
RoleArn: !GetAtt CodePipelineRole.Arn
175175
ExecutionMode: PARALLEL
176+
PipelineType: V2
176177
ArtifactStore:
177178
Type: S3
178179
Location: !Ref ArtifactBucket
@@ -191,7 +192,7 @@ Resources:
191192
Configuration:
192193
S3Bucket: !Ref BucketName
193194
S3ObjectKey: !Ref FileKey
194-
PollForSourceChanges: true
195+
PollForSourceChanges: false
195196
OutputArtifacts:
196197
- Name: SourceOutput
197198
RunOrder: 1
@@ -273,4 +274,50 @@ Resources:
273274
Action:
274275
- 'bedrock:InvokeModel'
275276
Resource:
276-
- !Sub 'arn:aws:bedrock:*::foundation-model/anthropic.claude-3-5-sonnet-20240620-v1:0'
277+
- !Sub 'arn:aws:bedrock:*::foundation-model/anthropic.claude-3-5-sonnet-20240620-v1:0'
278+
279+
# CloudWatch Events Rule to trigger pipeline on S3 changes
280+
PipelineTriggerRule:
281+
Type: AWS::Events::Rule
282+
Properties:
283+
Name: idp-pipeline-trigger
284+
Description: Trigger CodePipeline on S3 object changes
285+
EventPattern:
286+
source:
287+
- aws.s3
288+
detail-type:
289+
- Object Created
290+
detail:
291+
bucket:
292+
name:
293+
- !Ref BucketName
294+
object:
295+
key:
296+
- !Ref FileKey
297+
State: ENABLED
298+
Targets:
299+
- Arn: !Sub 'arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${PipelineName}'
300+
Id: PipelineTrigger
301+
RoleArn: !GetAtt PipelineTriggerRole.Arn
302+
303+
# IAM Role for CloudWatch Events to trigger CodePipeline
304+
PipelineTriggerRole:
305+
Type: AWS::IAM::Role
306+
Properties:
307+
RoleName: idp-pipeline-trigger-role
308+
AssumeRolePolicyDocument:
309+
Version: '2012-10-17'
310+
Statement:
311+
- Effect: Allow
312+
Principal:
313+
Service: events.amazonaws.com
314+
Action: sts:AssumeRole
315+
Policies:
316+
- PolicyName: TriggerPipelinePolicy
317+
PolicyDocument:
318+
Version: '2012-10-17'
319+
Statement:
320+
- Effect: Allow
321+
Action:
322+
- codepipeline:StartPipelineExecution
323+
Resource: !Sub 'arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${PipelineName}'

0 commit comments

Comments
 (0)