|
| 1 | +pipeline: |
| 2 | + name: WIP REUSABLE PIPELINE to build images |
| 3 | + identifier: WIP_build_images |
| 4 | + projectIdentifier: datarobotusermodels |
| 5 | + orgIdentifier: Custom_Models |
| 6 | + tags: {} |
| 7 | + properties: |
| 8 | + ci: |
| 9 | + codebase: |
| 10 | + connectorRef: account.svc_harness_git1 |
| 11 | + repoName: <+pipeline.variables.target_repo> |
| 12 | + build: |
| 13 | + type: branch |
| 14 | + spec: |
| 15 | + branch: <+pipeline.variables.target_branch> |
| 16 | + stages: |
| 17 | + - stage: |
| 18 | + name: Detect changes and output images build matrix |
| 19 | + identifier: Detect_changes_and_output_images_build_matrix |
| 20 | + description: "" |
| 21 | + type: CI |
| 22 | + spec: |
| 23 | + cloneCodebase: true |
| 24 | + caching: |
| 25 | + enabled: true |
| 26 | + override: false |
| 27 | + buildIntelligence: |
| 28 | + enabled: true |
| 29 | + platform: |
| 30 | + os: Linux |
| 31 | + arch: Amd64 |
| 32 | + runtime: |
| 33 | + type: Cloud |
| 34 | + spec: {} |
| 35 | + execution: |
| 36 | + steps: |
| 37 | + - step: |
| 38 | + type: Run |
| 39 | + name: Build params matrix |
| 40 | + identifier: Build_params_matrix |
| 41 | + spec: |
| 42 | + shell: Bash |
| 43 | + command: |- |
| 44 | + echo "Hello" |
| 45 | + pwd |
| 46 | + ls -la |
| 47 | + IFS=',' read -ra DIR_ARRAY <<< "<+pipeline.variables.envs_folders>" |
| 48 | +
|
| 49 | + json_array="[]" |
| 50 | +
|
| 51 | + for DIR in "${DIR_ARRAY[@]}"; do |
| 52 | + cd "${DIR}" || exit 1 |
| 53 | + echo "Processing dir: ${DIR}" |
| 54 | +
|
| 55 | + # Read fields from env_info.json |
| 56 | + REPO_NAME=$(jq -r '.imageRepository' ./env_info.json) |
| 57 | + ENV_VERSION_ID=$(jq -r '.environmentVersionId' ./env_info.json) |
| 58 | +
|
| 59 | + # Create JSON object for this env |
| 60 | + env_json=$(jq -n \ |
| 61 | + --arg path "${DIR}" \ |
| 62 | + --arg dockerfile "Dockerfile" \ |
| 63 | + --arg repo "${REPO_NAME}" \ |
| 64 | + --arg tag "${ENV_VERSION_ID}" \ |
| 65 | + '{path: $path, repository: $repo, tag: $tag, dockerfile: $dockerfile}') |
| 66 | +
|
| 67 | + # Append to the JSON array |
| 68 | + json_array=$(echo "${json_array}" | jq --argjson obj "${env_json}" '. + [$obj]') |
| 69 | +
|
| 70 | + # repeat if Dockerfile.local exists and add local suffix to dockerfile and tag |
| 71 | + if [ -f "./Dockerfile.local" ]; then |
| 72 | + env_json=$(jq -n \ |
| 73 | + --arg path "${DIR}" \ |
| 74 | + --arg dockerfile "Dockerfile.local" \ |
| 75 | + --arg repo "${REPO_NAME}" \ |
| 76 | + --arg tag "${ENV_VERSION_ID}.local" \ |
| 77 | + '{path: $path, repository: $repo, tag: $tag, dockerfile: $dockerfile}') |
| 78 | +
|
| 79 | + # Append to the JSON array |
| 80 | + json_array=$(echo "${json_array}" | jq --argjson obj "${env_json}" '. + [$obj]') |
| 81 | + fi |
| 82 | +
|
| 83 | +
|
| 84 | + cd - || exit 1 |
| 85 | + done |
| 86 | +
|
| 87 | + matrix_json=$(jq -n --argjson arr "${json_array}" '{images: $arr}' | jq -c .) |
| 88 | +
|
| 89 | + # Print final JSON array |
| 90 | + echo "${matrix_json}" |
| 91 | + export matrix_json |
| 92 | + outputVariables: |
| 93 | + - name: matrix_json |
| 94 | + type: String |
| 95 | + value: matrix_json |
| 96 | + when: |
| 97 | + pipelineStatus: Success |
| 98 | + condition: <+pipeline.variables.envs_folders>!="" |
| 99 | + - stage: |
| 100 | + name: build images |
| 101 | + identifier: build_images |
| 102 | + description: "" |
| 103 | + type: CI |
| 104 | + spec: |
| 105 | + cloneCodebase: true |
| 106 | + caching: |
| 107 | + enabled: true |
| 108 | + override: false |
| 109 | + buildIntelligence: |
| 110 | + enabled: true |
| 111 | + platform: |
| 112 | + os: Linux |
| 113 | + arch: Amd64 |
| 114 | + runtime: |
| 115 | + type: Cloud |
| 116 | + spec: {} |
| 117 | + execution: |
| 118 | + steps: |
| 119 | + - step: |
| 120 | + type: BuildAndPushDockerRegistry |
| 121 | + name: Build and push to dockerhub |
| 122 | + identifier: Build_and_push_to_dockerhub |
| 123 | + spec: |
| 124 | + connectorRef: datarobot_user_models_read_write |
| 125 | + repo: datarobotdev/<+matrix.image.repository> |
| 126 | + tags: |
| 127 | + - <+matrix.image.tag> |
| 128 | + caching: true |
| 129 | + dockerfile: <+matrix.image.path>/<+matrix.image.dockerfile> |
| 130 | + context: <+matrix.image.path> |
| 131 | + when: |
| 132 | + pipelineStatus: Success |
| 133 | + condition: <+pipeline.variables.envs_folders>!="" |
| 134 | + strategy: |
| 135 | + matrix: |
| 136 | + image: <+json.list("images", <+pipeline.stages.Detect_changes_and_output_images_build_matrix.spec.execution.steps.Build_params_matrix.output.outputVariables.matrix_json>)> |
| 137 | + nodeName: <+matrix.image.repository>:<+matrix.image.tag> |
| 138 | + description: |- |
| 139 | + This pipeline can be used in other repositories to: |
| 140 | + * detect which environments have changed. Environment is a folder with env_info.json |
| 141 | + * build images according to repo name and environment version ID from env_info.json |
| 142 | + Should receive: |
| 143 | + Repo |
| 144 | + Branch |
| 145 | + Comma separated list of paths to envs to build |
| 146 | + variables: |
| 147 | + - name: target_repo |
| 148 | + type: String |
| 149 | + description: "Target repo: e.g. datarobot-user-models" |
| 150 | + required: false |
| 151 | + value: <+input> |
| 152 | + - name: target_branch |
| 153 | + type: String |
| 154 | + description: E.g. master |
| 155 | + required: false |
| 156 | + value: <+input> |
| 157 | + - name: envs_folders |
| 158 | + type: String |
| 159 | + description: Paths to envs' folders to build images from (usually changed envs from prev step) |
| 160 | + required: false |
| 161 | + value: <+input> |
0 commit comments