Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion azure/utils-pr-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
clean: all
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.8'
displayName: 'Use Python 3.9'
inputs:
versionSpec: 3.9

Expand Down
86 changes: 61 additions & 25 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ flake8 = "^3.7.9"
mypy = "^0.770"
pytest = "^5.4.2"
pytest-cov = "^2.8.1"
ansible = "^2.10.7"
ansible = "^8.5.0"
pyyaml = "^5.4.1"
jmespath = "^0.10.0"
awscli = "^1.18.80"
Expand Down
105 changes: 50 additions & 55 deletions scripts/test_pull_request_deployments.py
Original file line number Diff line number Diff line change
@@ -1,55 +1,50 @@
import os
import sys
from multiprocessing import Process
from trigger_pipelines import AzureDevOps


PULL_REQUEST_PIPELINES = {
"canary-api": {
"build": 222,
"pr": 223,
"branch": "refs/heads/main"
}
}


def trigger_pipelines(pipeline_ids: dict, service: str):
azure_dev_ops = AzureDevOps()
build_status = azure_dev_ops.run_pipeline(
service=service,
pipeline_type="build",
pipeline_id=pipeline_ids["build"],
pipeline_branch=pipeline_ids["branch"]
)
if build_status != "succeeded":
sys.exit(1)
return
# azure_dev_ops.run_pipeline(
# service=service,
# pipeline_type="pr",
# pipeline_id=pipeline_ids["pr"],
# pipeline_branch=pipeline_ids["branch"]
# )


def main():
jobs = []
for service, pipeline_ids in PULL_REQUEST_PIPELINES.items():
process = Process(
target=trigger_pipelines,
args=(pipeline_ids, service,)
)
process.start()
jobs.append(process)
for process in jobs:
process.join()
# check return code of jobs and fail if there is a problem
for process in jobs:
if process.exitcode != 0:
print("A job failed")
sys.exit(1)
sys.exit(0)


if __name__ == "__main__":
main()
# import os
# import sys
# from multiprocessing import Process
# from trigger_pipelines import AzureDevOps


# PULL_REQUEST_PIPELINES = {
# "canary-api": {
# "build": 222,
# "pr": 223,
# "branch": "refs/heads/main"
# }
# }


# def trigger_pipelines(pipeline_ids: dict, service: str):
# azure_dev_ops = AzureDevOps()
# build_status = azure_dev_ops.run_pipeline(
# service=service,
# pipeline_type="build",
# pipeline_id=pipeline_ids["build"],
# pipeline_branch=pipeline_ids["branch"]
# )
# if build_status != "succeeded":
# sys.exit(1)
# print(f"Build pipeline for {service} failed with status: {build_status}")
# return


# def main():
# jobs = []
# for service, pipeline_ids in PULL_REQUEST_PIPELINES.items():
# process = Process(
# target=trigger_pipelines,
# args=(pipeline_ids, service,)
# )
# process.start()
# jobs.append(process)
# for process in jobs:
# process.join()
# # check return code of jobs and fail if there is a problem
# for process in jobs:
# if process.exitcode != 0:
# print("A job failed")
# sys.exit(1)
# sys.exit(0)


# if __name__ == "__main__":
# main()
2 changes: 2 additions & 0 deletions scripts/trigger_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ def __init__(self):
self.client_id = os.environ["AZ_CLIENT_ID"]
self.client_secret = os.environ["AZ_CLIENT_SECRET"]
self.client_tenant = os.environ["AZ_CLIENT_TENANT"]
if not all([self.client_id, self.client_secret, self.client_tenant]):
raise ValueError("Client ID, Secret, or Tenant is not set in the environment variables.")
self.access_token = self._get_access_token()
self.notify_commit_sha = os.environ["NOTIFY_COMMIT_SHA"]
self.utils_pr_number = os.environ["UTILS_PR_NUMBER"]
Expand Down
Loading