diff --git a/.adr-dir b/.adr-dir new file mode 100644 index 0000000000..da5cac6b77 --- /dev/null +++ b/.adr-dir @@ -0,0 +1 @@ +docs/architecture/decisions diff --git a/.tekton/multiarch-push-pipeline.yaml b/.tekton/multiarch-push-pipeline.yaml index 765029f0d3..d905697786 100644 --- a/.tekton/multiarch-push-pipeline.yaml +++ b/.tekton/multiarch-push-pipeline.yaml @@ -39,7 +39,7 @@ spec: - name: name value: slack-webhook-notification - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-slack-webhook-notification:0.1@sha256:4e68fe2225debc256d403b828ed358345bb56d03327b46d55cb6c42911375750 + value: quay.io/konflux-ci/tekton-catalog/task-slack-webhook-notification:0.1@sha256:69945a30c11387a766e3d0ae33991b68e865a290c09da1fea44f193d358926ba - name: kind value: task resolver: bundles @@ -183,7 +183,7 @@ spec: - name: name value: init - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:bbf313b09740fb39b3343bc69ee94b2a2c21d16a9304f9b7c111c305558fc346 + value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 - name: kind value: task resolver: bundles @@ -206,7 +206,7 @@ spec: - name: name value: git-clone-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3a920a83fc0135aaae2730fe9d446eb2da2ffc9d63a34bceea04afd24653bdee + value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3dc39eae48745a96097c07c577b944d6203a91c35d3f71d9ed5feab41d327a6a - name: kind value: task resolver: bundles @@ -235,7 +235,7 @@ spec: - name: name value: prefetch-dependencies-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:970285e3b0495961199523b566e0dd92ec2e29bedbcf61d8fc67106b06d0f923 + value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:5946ca57aa29f162e11b74984ec58960f55f9fb6a0e97c6c9215c4161f768726 - name: kind value: task resolver: bundles @@ -287,7 +287,7 @@ spec: - name: name value: buildah-remote-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-remote-oci-ta:0.6@sha256:ac05dabe8b6b446f974cf2b6ef1079cfaa9443d7078c2ebe3ec79aa650e1b5b2 + value: quay.io/konflux-ci/tekton-catalog/task-buildah-remote-oci-ta:0.6@sha256:17b267b5ae3deca5905d930e54337b89df45d3579f33b7fab4df74ee644cded4 - name: kind value: task resolver: bundles @@ -318,7 +318,7 @@ spec: - name: name value: build-image-index - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:d94cad7f41be61074dd21c7dff26dab9217c3435a16f62813c1cb8382dd9aae6 + value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:31197f4ee71be47c6f491e888ff266cbbb8ad5ed1c7c4141cc14f002d1802a50 - name: kind value: task resolver: bundles @@ -344,7 +344,7 @@ spec: - name: name value: source-build-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:2a290f91fdccf4c9ef726a1605163bc14904e1dbf9837ac6d2621caddd10f98e + value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:282cb5a9119a87e88559444feff67d76d6f356d03654b4845632c049b2314735 - name: kind value: task resolver: bundles @@ -370,7 +370,7 @@ spec: - name: name value: deprecated-image-check - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 + value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:462baed733dfc38aca5395499e92f19b6f13a74c2e88fe5d86c3cffa2f899b57 - name: kind value: task resolver: bundles @@ -397,7 +397,7 @@ spec: - name: name value: clair-scan - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:a7cc183967f89c4ac100d04ab8f81e54733beee60a0528208107c9a22d3c43af + value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e - name: kind value: task resolver: bundles @@ -443,7 +443,7 @@ spec: - name: name value: sast-snyk-check-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:181d63c126e3119a9d57b8feed4eb66a875b5208c3e90724c22758e65dca8733 + value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:8ad28b7783837a24acbc9a8494c935e796e591ce476085ad5899bebd7e53f077 - name: kind value: task resolver: bundles @@ -465,7 +465,7 @@ spec: - name: name value: clamav-scan - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:b0bd59748cda4a7abf311e4f448e6c1d00c6b6d8c0ecc1c2eb33e08dc0e0b802 + value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 - name: kind value: task resolver: bundles @@ -510,7 +510,7 @@ spec: - name: name value: sast-coverity-check-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:cdbe1a968676e4f5519b082bf1e27a4cdcf66dd60af66dbc26b3e604f957f7e9 + value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:78f5244a8cfd28c890ed62db7e4ff1fc97ff39876d37fb19f1b0c2c286a4002c - name: kind value: task resolver: bundles @@ -531,7 +531,7 @@ spec: - name: name value: coverity-availability-check - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:db2b267dc15e4ed17f704ee91b8e9b38068e1a35b1018a328fdca621819d74c6 + value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36400873d3031df128c55aa71ee11d322c3e55fd8f13dc5779098fbc117c0aa3 - name: kind value: task resolver: bundles @@ -557,7 +557,7 @@ spec: - name: name value: sast-shell-check-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:f950c3cefcff91de751f4d24a297c3312a2b265dd54478193ba1330791976c35 + value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:d44336d7bcbd1f7cedee639357a493bd1f661e2859e49e11a34644bdf6819c4e - name: kind value: task resolver: bundles @@ -583,7 +583,7 @@ spec: - name: name value: sast-unicode-check-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:1cf8f6f6e991e1820cbebddaf9f38442a2ea5669956c9ffc2b7e4698ae7e99be + value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:e5a8d3e8e7be7246a1460385b95c084ea6e8fe7520d40fe4389deb90f1bf5176 - name: kind value: task resolver: bundles @@ -608,7 +608,7 @@ spec: - name: name value: apply-tags - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 + value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:4c2b0a2d2904108f8d19edfa878df6cd49ed19aab73ab6fc6a435fba0265f771 - name: kind value: task resolver: bundles @@ -631,7 +631,7 @@ spec: - name: name value: push-dockerfile-oci-ta - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:14fba04580b236e4206a904b86ee2fd8eeaa4163f7619a9c2602d361e4f74c51 + value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:13633d5ba8445c0f732a0a5d1b33ffbb708398e45ef1647542b0ab22fee25a6a - name: kind value: task resolver: bundles diff --git a/.tekton/odh-base-image-cpu-py312-c9s-pull-request.yaml b/.tekton/odh-base-image-cpu-py312-c9s-pull-request.yaml index f29ca4edd0..72a622c2d7 100644 --- a/.tekton/odh-base-image-cpu-py312-c9s-pull-request.yaml +++ b/.tekton/odh-base-image-cpu-py312-c9s-pull-request.yaml @@ -8,8 +8,9 @@ metadata: build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/cancel-in-progress: "true" pipelinesascode.tekton.dev/max-keep-runs: "3" - pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch - == "main" + pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-base\-image\-cpu\-py312\-c9s|base\-images/cpu/c9s\-python\-3\.12) + pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch == "main" && ( "base-images/cpu/c9s-python-3.12/**".pathChanged() || ".tekton/odh-base-image-cpu-py312-c9s-pull-request.yaml".pathChanged() ) + && body.repository.full_name == "opendatahub-io/notebooks" creationTimestamp: null labels: appstudio.openshift.io/application: opendatahub-release @@ -18,6 +19,8 @@ metadata: name: odh-base-image-cpu-py312-c9s-on-pull-request namespace: open-data-hub-tenant spec: + timeouts: + pipeline: 3h params: - name: git-url value: '{{source_url}}' @@ -31,571 +34,12 @@ spec: value: base-images/cpu/c9s-python-3.12/Dockerfile.cpu - name: path-context value: . - pipelineSpec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where - to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "false" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: docker - description: The format for the resulting image's mediaType. Valid values are - oci or docker. - name: buildah-format - type: string - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote - VMs - name: privileged-nested - type: string - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 - - name: kind - value: task - resolver: bundles - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:bb2f8f1edec47faa08c1929f2ffc6748f3a96af9644e6c40000081c6ff3ec894 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:0503f9313dfe70e4defda88a7226ec91a74af42198dccfa3280397d965aa16d6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - name: build-container - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_URL - value: $(tasks.clone-repository.results.url) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:b990178b6bf21c353e1567fe1a66d1472f7f4a862f003cf8d5b31d1caa3c43d6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-container - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:d94cad7f41be61074dd21c7dff26dab9217c3435a16f62813c1cb8382dd9aae6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:5f9069a07a6dc16aae7a05adf49d2b6792815f3fabd116377578860743f4e0ec - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:60f2dac41844d222086ff7f477e51f3563716b183d87db89f603d6f604c21760 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:ae62d14c999fd93246fef4e57d28570fa5200c3266b9a3263a39965e5a5b02d7 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36bcf1531b85c2c7d7b4382bc0a9c61b0501e2e54e84991b11b225bdec0e5928 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:1f0fcba24ebc447d9f8a2ea2e8f262fa435d6c523ca6b0346cd67261551fc9ed - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:1833c618170ab9deb8455667f220df8e88d16ccd630a2361366f594e2bdcb712 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:738e6e2108bee5b50309a37b54bc1adf8433ac63598dbb6830d6cb4ac65d9de6 - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/konflux-vanguard/task-rpms-signature-scan:0.2@sha256:3d9fbf2c0a732f736b050c293380b63c8c72ab38d0ef79fcf9d1b7d8fcd25efb - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true + - name: build-platforms + value: + - linux/x86_64 + - linux/arm64 + pipelineRef: + name: multiarch-pull-request-pipeline taskRunTemplate: serviceAccountName: build-pipeline-odh-base-image-cpu-py312-c9s workspaces: diff --git a/.tekton/odh-base-image-cpu-py312-c9s-push.yaml b/.tekton/odh-base-image-cpu-py312-c9s-push.yaml index 309c548499..f7efaece84 100644 --- a/.tekton/odh-base-image-cpu-py312-c9s-push.yaml +++ b/.tekton/odh-base-image-cpu-py312-c9s-push.yaml @@ -8,7 +8,7 @@ metadata: pipelinesascode.tekton.dev/cancel-in-progress: "false" pipelinesascode.tekton.dev/max-keep-runs: "3" pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch - == "main" + == "main" && ( "base-images/cpu/c9s-python-3.12/**".pathChanged() || ".tekton/odh-base-image-cpu-py312-c9s-push.yaml".pathChanged() ) creationTimestamp: null labels: appstudio.openshift.io/application: opendatahub-release @@ -28,571 +28,16 @@ spec: value: base-images/cpu/c9s-python-3.12/Dockerfile.cpu - name: path-context value: . - pipelineSpec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where - to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "false" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: docker - description: The format for the resulting image's mediaType. Valid values are - oci or docker. - name: buildah-format - type: string - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote - VMs - name: privileged-nested - type: string - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 - - name: kind - value: task - resolver: bundles - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:bb2f8f1edec47faa08c1929f2ffc6748f3a96af9644e6c40000081c6ff3ec894 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:0503f9313dfe70e4defda88a7226ec91a74af42198dccfa3280397d965aa16d6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - name: build-container - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_URL - value: $(tasks.clone-repository.results.url) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:b990178b6bf21c353e1567fe1a66d1472f7f4a862f003cf8d5b31d1caa3c43d6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-container - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:d94cad7f41be61074dd21c7dff26dab9217c3435a16f62813c1cb8382dd9aae6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:5f9069a07a6dc16aae7a05adf49d2b6792815f3fabd116377578860743f4e0ec - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:60f2dac41844d222086ff7f477e51f3563716b183d87db89f603d6f604c21760 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:ae62d14c999fd93246fef4e57d28570fa5200c3266b9a3263a39965e5a5b02d7 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36bcf1531b85c2c7d7b4382bc0a9c61b0501e2e54e84991b11b225bdec0e5928 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:1f0fcba24ebc447d9f8a2ea2e8f262fa435d6c523ca6b0346cd67261551fc9ed - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:1833c618170ab9deb8455667f220df8e88d16ccd630a2361366f594e2bdcb712 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:738e6e2108bee5b50309a37b54bc1adf8433ac63598dbb6830d6cb4ac65d9de6 - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/konflux-vanguard/task-rpms-signature-scan:0.2@sha256:3d9fbf2c0a732f736b050c293380b63c8c72ab38d0ef79fcf9d1b7d8fcd25efb - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - latest + - name: build-platforms + value: + - linux/x86_64 + - linux/arm64 + pipelineRef: + name: multiarch-push-pipeline taskRunTemplate: serviceAccountName: build-pipeline-odh-base-image-cpu-py312-c9s workspaces: diff --git a/.tekton/odh-base-image-cpu-py312-ubi9-pull-request.yaml b/.tekton/odh-base-image-cpu-py312-ubi9-pull-request.yaml index b034e58f7a..866d260a21 100644 --- a/.tekton/odh-base-image-cpu-py312-ubi9-pull-request.yaml +++ b/.tekton/odh-base-image-cpu-py312-ubi9-pull-request.yaml @@ -8,8 +8,9 @@ metadata: build.appstudio.redhat.com/target_branch: '{{target_branch}}' pipelinesascode.tekton.dev/cancel-in-progress: "true" pipelinesascode.tekton.dev/max-keep-runs: "3" - pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch - == "main" + pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-base\-image\-cpu\-py312\-ubi9|base\-images/cpu/ubi9\-python\-3\.12) + pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch == "main" && ( "base-images/cpu/ubi9-python-3.12/**".pathChanged() || ".tekton/odh-base-image-cpu-py312-ubi9-pull-request.yaml".pathChanged() ) + && body.repository.full_name == "opendatahub-io/notebooks" creationTimestamp: null labels: appstudio.openshift.io/application: opendatahub-release @@ -18,6 +19,8 @@ metadata: name: odh-base-image-cpu-py312-ubi9-on-pull-request namespace: open-data-hub-tenant spec: + timeouts: + pipeline: 3h params: - name: git-url value: '{{source_url}}' @@ -31,571 +34,12 @@ spec: value: base-images/cpu/ubi9-python-3.12/Dockerfile.cpu - name: path-context value: . - pipelineSpec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where - to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "false" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: docker - description: The format for the resulting image's mediaType. Valid values are - oci or docker. - name: buildah-format - type: string - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote - VMs - name: privileged-nested - type: string - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 - - name: kind - value: task - resolver: bundles - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:bb2f8f1edec47faa08c1929f2ffc6748f3a96af9644e6c40000081c6ff3ec894 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:0503f9313dfe70e4defda88a7226ec91a74af42198dccfa3280397d965aa16d6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - name: build-container - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_URL - value: $(tasks.clone-repository.results.url) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:b990178b6bf21c353e1567fe1a66d1472f7f4a862f003cf8d5b31d1caa3c43d6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-container - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:d94cad7f41be61074dd21c7dff26dab9217c3435a16f62813c1cb8382dd9aae6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:5f9069a07a6dc16aae7a05adf49d2b6792815f3fabd116377578860743f4e0ec - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:60f2dac41844d222086ff7f477e51f3563716b183d87db89f603d6f604c21760 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:ae62d14c999fd93246fef4e57d28570fa5200c3266b9a3263a39965e5a5b02d7 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36bcf1531b85c2c7d7b4382bc0a9c61b0501e2e54e84991b11b225bdec0e5928 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:1f0fcba24ebc447d9f8a2ea2e8f262fa435d6c523ca6b0346cd67261551fc9ed - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:1833c618170ab9deb8455667f220df8e88d16ccd630a2361366f594e2bdcb712 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:738e6e2108bee5b50309a37b54bc1adf8433ac63598dbb6830d6cb4ac65d9de6 - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/konflux-vanguard/task-rpms-signature-scan:0.2@sha256:3d9fbf2c0a732f736b050c293380b63c8c72ab38d0ef79fcf9d1b7d8fcd25efb - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true + - name: build-platforms + value: + - linux/x86_64 + - linux/arm64 + pipelineRef: + name: multiarch-pull-request-pipeline taskRunTemplate: serviceAccountName: build-pipeline-odh-base-image-cpu-py312-ubi9 workspaces: diff --git a/.tekton/odh-base-image-cpu-py312-ubi9-push.yaml b/.tekton/odh-base-image-cpu-py312-ubi9-push.yaml index 76b3860cc7..999638eb5b 100644 --- a/.tekton/odh-base-image-cpu-py312-ubi9-push.yaml +++ b/.tekton/odh-base-image-cpu-py312-ubi9-push.yaml @@ -8,7 +8,7 @@ metadata: pipelinesascode.tekton.dev/cancel-in-progress: "false" pipelinesascode.tekton.dev/max-keep-runs: "3" pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch - == "main" + == "main" && ( "base-images/cpu/ubi9-python-3.12/**".pathChanged() || ".tekton/odh-base-image-cpu-py312-ubi9-push.yaml".pathChanged() ) creationTimestamp: null labels: appstudio.openshift.io/application: opendatahub-release @@ -28,571 +28,12 @@ spec: value: base-images/cpu/ubi9-python-3.12/Dockerfile.cpu - name: path-context value: . - pipelineSpec: - description: | - This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. - - _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. - This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ - params: - - description: Source Repository URL - name: git-url - type: string - - default: "" - description: Revision of the Source Repository - name: revision - type: string - - description: Fully Qualified Output Image - name: output-image - type: string - - default: . - description: Path to the source code of an application's component from where - to build image. - name: path-context - type: string - - default: Dockerfile - description: Path to the Dockerfile inside the context specified by parameter - path-context - name: dockerfile - type: string - - default: "false" - description: Force rebuild image - name: rebuild - type: string - - default: "false" - description: Skip checks against built image - name: skip-checks - type: string - - default: "false" - description: Execute the build with network isolation - name: hermetic - type: string - - default: "" - description: Build dependencies to be prefetched - name: prefetch-input - type: string - - default: "" - description: Image tag expiration time, time values could be something like - 1h, 2d, 3w for hours, days, and weeks, respectively. - name: image-expires-after - type: string - - default: "false" - description: Build a source image. - name: build-source-image - type: string - - default: "false" - description: Add built image into an OCI image index - name: build-image-index - type: string - - default: docker - description: The format for the resulting image's mediaType. Valid values are - oci or docker. - name: buildah-format - type: string - - default: [] - description: Array of --build-arg values ("arg=value" strings) for buildah - name: build-args - type: array - - default: "" - description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file - name: build-args-file - type: string - - default: "false" - description: Whether to enable privileged mode, should be used only with remote - VMs - name: privileged-nested - type: string - results: - - description: "" - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - description: "" - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - description: "" - name: CHAINS-GIT_URL - value: $(tasks.clone-repository.results.url) - - description: "" - name: CHAINS-GIT_COMMIT - value: $(tasks.clone-repository.results.commit) - tasks: - - name: init - params: - - name: image-url - value: $(params.output-image) - - name: rebuild - value: $(params.rebuild) - - name: skip-checks - value: $(params.skip-checks) - taskRef: - params: - - name: name - value: init - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 - - name: kind - value: task - resolver: bundles - - name: clone-repository - params: - - name: url - value: $(params.git-url) - - name: revision - value: $(params.revision) - - name: ociStorage - value: $(params.output-image).git - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - init - taskRef: - params: - - name: name - value: git-clone-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:bb2f8f1edec47faa08c1929f2ffc6748f3a96af9644e6c40000081c6ff3ec894 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - workspaces: - - name: basic-auth - workspace: git-auth - - name: prefetch-dependencies - params: - - name: input - value: $(params.prefetch-input) - - name: SOURCE_ARTIFACT - value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) - - name: ociStorage - value: $(params.output-image).prefetch - - name: ociArtifactExpiresAfter - value: $(params.image-expires-after) - runAfter: - - clone-repository - taskRef: - params: - - name: name - value: prefetch-dependencies-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:0503f9313dfe70e4defda88a7226ec91a74af42198dccfa3280397d965aa16d6 - - name: kind - value: task - resolver: bundles - workspaces: - - name: git-basic-auth - workspace: git-auth - - name: netrc - workspace: netrc - - name: build-container - params: - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: PRIVILEGED_NESTED - value: $(params.privileged-nested) - - name: SOURCE_URL - value: $(tasks.clone-repository.results.url) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - prefetch-dependencies - taskRef: - params: - - name: name - value: buildah-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:b990178b6bf21c353e1567fe1a66d1472f7f4a862f003cf8d5b31d1caa3c43d6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-image-index - params: - - name: IMAGE - value: $(params.output-image) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: ALWAYS_BUILD_INDEX - value: $(params.build-image-index) - - name: IMAGES - value: - - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) - - name: BUILDAH_FORMAT - value: $(params.buildah-format) - runAfter: - - build-container - taskRef: - params: - - name: name - value: build-image-index - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:d94cad7f41be61074dd21c7dff26dab9217c3435a16f62813c1cb8382dd9aae6 - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - name: build-source-image - params: - - name: BINARY_IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: BINARY_IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: source-build-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:5f9069a07a6dc16aae7a05adf49d2b6792815f3fabd116377578860743f4e0ec - - name: kind - value: task - resolver: bundles - when: - - input: $(tasks.init.results.build) - operator: in - values: - - "true" - - input: $(params.build-source-image) - operator: in - values: - - "true" - - name: deprecated-base-image-check - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: deprecated-image-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:f59175d9a0a60411738228dfe568af4684af4aa5e7e05c832927cb917801d489 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clair-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clair-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: ecosystem-cert-preflight-checks - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: ecosystem-cert-preflight-checks - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-snyk-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-snyk-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:60f2dac41844d222086ff7f477e51f3563716b183d87db89f603d6f604c21760 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: clamav-scan - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: clamav-scan - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-coverity-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE - value: $(params.output-image) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: HERMETIC - value: $(params.hermetic) - - name: PREFETCH_INPUT - value: $(params.prefetch-input) - - name: IMAGE_EXPIRES_AFTER - value: $(params.image-expires-after) - - name: COMMIT_SHA - value: $(tasks.clone-repository.results.commit) - - name: BUILD_ARGS - value: - - $(params.build-args[*]) - - name: BUILD_ARGS_FILE - value: $(params.build-args-file) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - coverity-availability-check - taskRef: - params: - - name: name - value: sast-coverity-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:ae62d14c999fd93246fef4e57d28570fa5200c3266b9a3263a39965e5a5b02d7 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - input: $(tasks.coverity-availability-check.results.STATUS) - operator: in - values: - - success - - name: coverity-availability-check - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: coverity-availability-check - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36bcf1531b85c2c7d7b4382bc0a9c61b0501e2e54e84991b11b225bdec0e5928 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-shell-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-shell-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:1f0fcba24ebc447d9f8a2ea2e8f262fa435d6c523ca6b0346cd67261551fc9ed - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: sast-unicode-check - params: - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - - name: CACHI2_ARTIFACT - value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: sast-unicode-check-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:1833c618170ab9deb8455667f220df8e88d16ccd630a2361366f594e2bdcb712 - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - - name: apply-tags - params: - - name: IMAGE_URL - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: apply-tags - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:f44be1bf0262471f2f503f5e19da5f0628dcaf968c86272a2ad6b4871e708448 - - name: kind - value: task - resolver: bundles - - name: push-dockerfile - params: - - name: IMAGE - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: IMAGE_DIGEST - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - - name: DOCKERFILE - value: $(params.dockerfile) - - name: CONTEXT - value: $(params.path-context) - - name: SOURCE_ARTIFACT - value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: push-dockerfile-oci-ta - - name: bundle - value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:738e6e2108bee5b50309a37b54bc1adf8433ac63598dbb6830d6cb4ac65d9de6 - - name: kind - value: task - resolver: bundles - - name: rpms-signature-scan - params: - - name: image-url - value: $(tasks.build-image-index.results.IMAGE_URL) - - name: image-digest - value: $(tasks.build-image-index.results.IMAGE_DIGEST) - runAfter: - - build-image-index - taskRef: - params: - - name: name - value: rpms-signature-scan - - name: bundle - value: quay.io/konflux-ci/konflux-vanguard/task-rpms-signature-scan:0.2@sha256:3d9fbf2c0a732f736b050c293380b63c8c72ab38d0ef79fcf9d1b7d8fcd25efb - - name: kind - value: task - resolver: bundles - when: - - input: $(params.skip-checks) - operator: in - values: - - "false" - workspaces: - - name: git-auth - optional: true - - name: netrc - optional: true + - name: build-platforms + value: + - linux/x86_64 + - linux/arm64 + pipelineRef: + name: multiarch-push-pipeline taskRunTemplate: serviceAccountName: build-pipeline-odh-base-image-cpu-py312-ubi9 workspaces: diff --git a/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml new file mode 100644 index 0000000000..9e4e9e18df --- /dev/null +++ b/.tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml @@ -0,0 +1,45 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( "runtimes/datascience/ubi9-python-3.12/**".pathChanged() || "runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() || ".tekton/odh-pipeline-runtime-datascience-cpu-py312-ubi9-push.yaml".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-pipeline-runtime-datascience-cpu-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-datascience-cpu-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-pipeline-runtime-datascience-cpu-py312-ubi9:{{revision}} + - name: dockerfile + value: runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-datascience-cpu-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml index 8bb52d4798..60176e242f 100644 --- a/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml +++ b/.tekton/odh-pipeline-runtime-minimal-cpu-py312-ubi9-push.yaml @@ -33,7 +33,7 @@ spec: - name: additional-tags value: - '{{target_branch}}-{{revision}}' - - 2025b-v1.38 + - 2025b-v1.39 pipelineRef: name: singlearch-push-pipeline taskRunTemplate: diff --git a/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml new file mode 100644 index 0000000000..8428f80f03 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml @@ -0,0 +1,58 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-pytorch-cuda-py312-ubi9-push.yaml".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-cuda-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-cuda-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-cuda-py312-ubi9:{{revision}} + - name: dockerfile + value: runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: runtimes/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-cuda-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml index 78d5c1ffba..9de6da3254 100644 --- a/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml +++ b/.tekton/odh-pipeline-runtime-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml @@ -35,7 +35,7 @@ spec: - name: additional-tags value: - '{{target_branch}}-{{revision}}' - - 2025b-v1.38 + - 2025b-v1.39 taskRunSpecs: - pipelineTaskName: build-container stepSpecs: diff --git a/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml new file mode 100644 index 0000000000..756ef4951b --- /dev/null +++ b/.tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml @@ -0,0 +1,47 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-pytorch-rocm-py312-ubi9-push.yaml".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-pipeline-runtime-pytorch-rocm-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-pytorch-rocm-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-pipeline-runtime-pytorch-rocm-py312-ubi9:{{revision}} + - name: dockerfile + value: runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm + - name: build-args-file + value: runtimes/rocm-pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-pytorch-rocm-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml new file mode 100644 index 0000000000..defc669d18 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml @@ -0,0 +1,65 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +#test +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && (".tekton/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-push.yaml".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-tensorflow-cuda-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-cuda-py312-ubi9:{{revision}} + - name: dockerfile + value: runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: runtimes/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: ecosystem-cert-preflight-checks + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-cuda-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml new file mode 100644 index 0000000000..abc46c79c5 --- /dev/null +++ b/.tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml @@ -0,0 +1,79 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +#test +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-push.yaml".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/**".pathChanged() || "runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-pipeline-runtime-tensorflow-rocm-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-pipeline-runtime-tensorflow-rocm-py312-ubi9:{{revision}} + - name: dockerfile + value: runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm + - name: build-args-file + value: runtimes/rocm-tensorflow/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: ecosystem-cert-preflight-checks + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + - pipelineTaskName: clair-scan + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-pipeline-runtime-tensorflow-rocm-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml new file mode 100644 index 0000000000..51f55ca500 --- /dev/null +++ b/.tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml @@ -0,0 +1,49 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-codeserver-datascience-cpu-py312-ubi9-push.yaml".pathChanged() || "codeserver/ubi9-python-3.12/**".pathChanged() || "codeserver/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-codeserver-datascience-cpu-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-codeserver-datascience-cpu-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9:{{revision}} + - name: dockerfile + value: codeserver/ubi9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: codeserver/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + - name: build-platforms + value: + - linux-extra-fast/amd64 + - linux-m2xlarge/arm64 + pipelineRef: + name: multiarch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-codeserver-datascience-cpu-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml new file mode 100644 index 0000000000..64715ca372 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml @@ -0,0 +1,56 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-datascience-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-datascience-cpu-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-datascience-cpu-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-datascience-cpu-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml new file mode 100644 index 0000000000..e4a29bfd29 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml @@ -0,0 +1,46 @@ +apiVersion: tekton.dev/v1 +#poke +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cpu-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cpu-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cpu-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml new file mode 100644 index 0000000000..6de814a01d --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml @@ -0,0 +1,45 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-cuda-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-cuda-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-cuda-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml new file mode 100644 index 0000000000..d56074efdb --- /dev/null +++ b/.tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml @@ -0,0 +1,59 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +# +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-minimal-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-minimal-rocm-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-minimal-rocm-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-minimal-rocm-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm + - name: build-args-file + value: jupyter/minimal/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-minimal-rocm-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml new file mode 100644 index 0000000000..31df3c2070 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml @@ -0,0 +1,56 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/**".pathChanged() || "cuda/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-cuda-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-cuda-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-cuda-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: jupyter/pytorch/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-cuda-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml index ad253670b8..3cb20dbcfe 100644 --- a/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml +++ b/.tekton/odh-workbench-jupyter-pytorch-llmcompressor-cuda-py312-ubi9-push.yaml @@ -33,7 +33,7 @@ spec: - name: additional-tags value: - '{{target_branch}}-{{revision}}' - - 2025b-v1.38 + - 2025b-v1.39 taskRunSpecs: - pipelineTaskName: build-container stepSpecs: diff --git a/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml new file mode 100644 index 0000000000..3165d85c1b --- /dev/null +++ b/.tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml @@ -0,0 +1,56 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-pytorch-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-pytorch-rocm-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-pytorch-rocm-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-pytorch-rocm-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm + - name: build-args-file + value: jupyter/rocm/pytorch/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + taskRunSpecs: + - pipelineTaskName: build-container + stepSpecs: + - name: build + computeResources: + requests: + cpu: '8' + memory: 16Gi + limits: + cpu: '16' + memory: 32Gi + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-pytorch-rocm-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml new file mode 100644 index 0000000000..9415db9eaf --- /dev/null +++ b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml @@ -0,0 +1,45 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-push.yaml".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-tensorflow-cuda-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-cuda-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: jupyter/tensorflow/ubi9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-cuda-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v3-0-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v3-0-push.yaml index afb0ea56a8..55e6c26379 100644 --- a/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v3-0-push.yaml +++ b/.tekton/odh-workbench-jupyter-tensorflow-cuda-py312-v3-0-push.yaml @@ -1,6 +1,5 @@ apiVersion: tekton.dev/v1 kind: PipelineRun -# retrigger Konflux builds to fix https://issues.redhat.com/browse/RHOAIENG-31914 metadata: annotations: build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} diff --git a/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml new file mode 100644 index 0000000000..7aad1973cd --- /dev/null +++ b/.tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml @@ -0,0 +1,47 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-push.yaml".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-tensorflow-rocm-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-tensorflow-rocm-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm + - name: build-args-file + value: jupyter/rocm/tensorflow/ubi9-python-3.12/build-args/rocm.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-tensorflow-rocm-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml new file mode 100644 index 0000000000..602b9bd633 --- /dev/null +++ b/.tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml @@ -0,0 +1,47 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && !("manifests/base/params-latest.env".pathChanged()) && ( ".tekton/odh-workbench-jupyter-trustyai-cpu-py312-ubi9-push.yaml".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/**".pathChanged() || "jupyter/utils/**".pathChanged() || "jupyter/minimal/ubi9-python-3.12/start-notebook.sh".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mongodb-org-6.0.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/mssql-2022.repo-x86_64/**".pathChanged() || "jupyter/datascience/ubi9-python-3.12/setup-elyra.sh".pathChanged() || "jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-jupyter-trustyai-cpu-py312-ubi9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-jupyter-trustyai-cpu-py312-ubi9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9:{{revision}} + - name: dockerfile + value: jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-jupyter-trustyai-cpu-py312-ubi9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py312-c9s-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-c9s-push.yaml index a62e212c1b..0822062b0a 100644 --- a/.tekton/odh-workbench-rstudio-minimal-cpu-py312-c9s-push.yaml +++ b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-c9s-push.yaml @@ -36,7 +36,7 @@ spec: - name: additional-tags value: - '{{target_branch}}-{{revision}}' - - 2025b-v1.38 + - 2025b-v1.39 pipelineRef: name: singlearch-push-pipeline taskRunTemplate: diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc-push.yaml new file mode 100644 index 0000000000..864dcf86f1 --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc-push.yaml @@ -0,0 +1,610 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch + == "R-studio-konflux-poc" + creationTimestamp: null + labels: + appstudio.openshift.io/application: rstudio-poc + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/redhat-user-workloads/rhoai-tenant/odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc:{{revision}} + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cpu.konflux + - name: path-context + value: . + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cpu.conf + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - name: hermetic + value: false + pipelineSpec: + description: | + This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. + + _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. + This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ + params: + - description: Source Repository URL + name: git-url + type: string + - default: "" + description: Revision of the Source Repository + name: revision + type: string + - description: Fully Qualified Output Image + name: output-image + type: string + - default: . + description: Path to the source code of an application's component from where + to build image. + name: path-context + type: string + - default: Dockerfile + description: Path to the Dockerfile inside the context specified by parameter + path-context + name: dockerfile + type: string + - default: "false" + description: Force rebuild image + name: rebuild + type: string + - default: "false" + description: Skip checks against built image + name: skip-checks + type: string + - default: "false" + description: Execute the build with network isolation + name: hermetic + type: string + - default: "" + description: Build dependencies to be prefetched + name: prefetch-input + type: string + - default: "" + description: Image tag expiration time, time values could be something like + 1h, 2d, 3w for hours, days, and weeks, respectively. + name: image-expires-after + type: string + - default: "false" + description: Build a source image. + name: build-source-image + type: string + - default: "false" + description: Add built image into an OCI image index + name: build-image-index + type: string + - default: docker + description: The format for the resulting image's mediaType. Valid values are + oci or docker. + name: buildah-format + type: string + - default: [] + description: Array of --build-arg values ("arg=value" strings) for buildah + name: build-args + type: array + - default: "" + description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file + name: build-args-file + type: string + - default: "false" + description: Whether to enable privileged mode, should be used only with remote + VMs + name: privileged-nested + type: string + results: + - description: "" + name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - description: "" + name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - description: "" + name: CHAINS-GIT_URL + value: $(tasks.clone-repository.results.url) + - description: "" + name: CHAINS-GIT_COMMIT + value: $(tasks.clone-repository.results.commit) + tasks: + - name: init + params: + - name: image-url + value: $(params.output-image) + - name: rebuild + value: $(params.rebuild) + - name: skip-checks + value: $(params.skip-checks) + taskRef: + params: + - name: name + value: init + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 + - name: kind + value: task + resolver: bundles + - name: clone-repository + params: + - name: url + value: $(params.git-url) + - name: revision + value: $(params.revision) + - name: ociStorage + value: $(params.output-image).git + - name: ociArtifactExpiresAfter + value: $(params.image-expires-after) + runAfter: + - init + taskRef: + params: + - name: name + value: git-clone-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3dc39eae48745a96097c07c577b944d6203a91c35d3f71d9ed5feab41d327a6a + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + workspaces: + - name: basic-auth + workspace: git-auth + - name: prefetch-dependencies + params: + - name: input + value: $(params.prefetch-input) + - name: SOURCE_ARTIFACT + value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) + - name: ociStorage + value: $(params.output-image).prefetch + - name: ociArtifactExpiresAfter + value: $(params.image-expires-after) + runAfter: + - clone-repository + taskRef: + params: + - name: name + value: prefetch-dependencies-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:5946ca57aa29f162e11b74984ec58960f55f9fb6a0e97c6c9215c4161f768726 + - name: kind + value: task + resolver: bundles + workspaces: + - name: git-basic-auth + workspace: git-auth + - name: netrc + workspace: netrc + - name: build-container + params: + - name: IMAGE + value: $(params.output-image) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: HERMETIC + value: $(params.hermetic) + - name: PREFETCH_INPUT + value: $(params.prefetch-input) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: BUILD_ARGS + value: + - $(params.build-args[*]) + - name: BUILD_ARGS_FILE + value: $(params.build-args-file) + - name: PRIVILEGED_NESTED + value: $(params.privileged-nested) + - name: SOURCE_URL + value: $(tasks.clone-repository.results.url) + - name: BUILDAH_FORMAT + value: $(params.buildah-format) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - prefetch-dependencies + taskRef: + params: + - name: name + value: buildah-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:27b04ea788cf64fd38c7d151feb5e1ca408804fa5a70cf704ae746a086ee92b8 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - name: build-image-index + params: + - name: IMAGE + value: $(params.output-image) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: ALWAYS_BUILD_INDEX + value: $(params.build-image-index) + - name: IMAGES + value: + - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) + - name: BUILDAH_FORMAT + value: $(params.buildah-format) + runAfter: + - build-container + taskRef: + params: + - name: name + value: build-image-index + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:31197f4ee71be47c6f491e888ff266cbbb8ad5ed1c7c4141cc14f002d1802a50 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - name: build-source-image + params: + - name: BINARY_IMAGE + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: BINARY_IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: source-build-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:282cb5a9119a87e88559444feff67d76d6f356d03654b4845632c049b2314735 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - input: $(params.build-source-image) + operator: in + values: + - "true" + - name: deprecated-base-image-check + params: + - name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: deprecated-image-check + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:462baed733dfc38aca5395499e92f19b6f13a74c2e88fe5d86c3cffa2f899b57 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: clair-scan + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: clair-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: ecosystem-cert-preflight-checks + params: + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: ecosystem-cert-preflight-checks + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-snyk-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-snyk-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:8ad28b7783837a24acbc9a8494c935e796e591ce476085ad5899bebd7e53f077 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: clamav-scan + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: clamav-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-coverity-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE + value: $(params.output-image) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: HERMETIC + value: $(params.hermetic) + - name: PREFETCH_INPUT + value: $(params.prefetch-input) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: BUILD_ARGS + value: + - $(params.build-args[*]) + - name: BUILD_ARGS_FILE + value: $(params.build-args-file) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - coverity-availability-check + taskRef: + params: + - name: name + value: sast-coverity-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:78f5244a8cfd28c890ed62db7e4ff1fc97ff39876d37fb19f1b0c2c286a4002c + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - input: $(tasks.coverity-availability-check.results.STATUS) + operator: in + values: + - success + - name: coverity-availability-check + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: coverity-availability-check + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36400873d3031df128c55aa71ee11d322c3e55fd8f13dc5779098fbc117c0aa3 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-shell-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-shell-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:d44336d7bcbd1f7cedee639357a493bd1f661e2859e49e11a34644bdf6819c4e + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-unicode-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-unicode-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:e5a8d3e8e7be7246a1460385b95c084ea6e8fe7520d40fe4389deb90f1bf5176 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: apply-tags + params: + - name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: apply-tags + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:4c2b0a2d2904108f8d19edfa878df6cd49ed19aab73ab6fc6a435fba0265f771 + - name: kind + value: task + resolver: bundles + - name: push-dockerfile + params: + - name: IMAGE + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: push-dockerfile-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:13633d5ba8445c0f732a0a5d1b33ffbb708398e45ef1647542b0ab22fee25a6a + - name: kind + value: task + resolver: bundles + - name: rpms-signature-scan + params: + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: rpms-signature-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-rpms-signature-scan:0.2@sha256:c02429b45145fc7f2b1fa94ec2f48c0d8ac203fd4385796b4c93e5d0dec967f8 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + workspaces: + - name: git-auth + optional: true + - name: netrc + optional: true + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cpu-py312-rhel9-poc + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-pull-request.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-pull-request.yaml new file mode 100644 index 0000000000..7bb14caecd --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-pull-request.yaml @@ -0,0 +1,55 @@ +# yamllint disable-file +# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py +--- +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: 'true' + pipelinesascode.tekton.dev/max-keep-runs: '3' + pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-rstudio\-minimal\-cpu\-py312\-rhel9|rstudio/rhel9\-python\-3\.12) + pipelinesascode.tekton.dev/on-cel-expression: | + event == "pull_request" && target_branch == "main" && + !("manifests/base/params-latest.env".pathChanged()) && + ( ".tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-pull-request.yaml".pathChanged() || "rstudio/rhel9-python-3.12/**".pathChanged() || "rstudio/rhel9-python-3.12/build-args/cpu.conf".pathChanged() ) + && body.repository.full_name == "opendatahub-io/notebooks" + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cpu-py312-rhel9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cpu-py312-rhel9-on-pull-request + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 3h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cpu-py312-rhel9:on-pr-{{revision}} + - name: image-expires-after + value: 5d + - name: build-platforms + value: + - linux/x86_64 + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cpu + - name: path-context + value: . + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cpu.conf + pipelineRef: + name: multiarch-pull-request-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cpu-py312-rhel9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-push.yaml new file mode 100644 index 0000000000..9d0b518a09 --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-push.yaml @@ -0,0 +1,49 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && + !("manifests/base/params-latest.env".pathChanged()) && + ( ".tekton/odh-workbench-rstudio-minimal-cpu-py312-rhel9-push.yaml".pathChanged() || "rstudio/rhel9-python-3.12/**".pathChanged() || "rstudio/rhel9-python-3.12/build-args/cpu.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cpu-py312-rhel9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cpu-py312-rhel9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cpu-py312-rhel9:{{revision}} + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cpu + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cpu.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cpu-py312-rhel9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py312-c9s-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-c9s-push.yaml index 53cfd3ba9b..309f155cb5 100644 --- a/.tekton/odh-workbench-rstudio-minimal-cuda-py312-c9s-push.yaml +++ b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-c9s-push.yaml @@ -35,7 +35,7 @@ spec: - name: additional-tags value: - '{{target_branch}}-{{revision}}' - - 2025b-v1.38 + - 2025b-v1.39 pipelineRef: name: singlearch-push-pipeline taskRunTemplate: diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc-push.yaml new file mode 100644 index 0000000000..ca7e7e6014 --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc-push.yaml @@ -0,0 +1,610 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/red-hat-data-services/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch + == "R-studio-konflux-poc" + creationTimestamp: null + labels: + appstudio.openshift.io/application: rstudio-poc + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc-on-push + namespace: rhoai-tenant +spec: + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/redhat-user-workloads/rhoai-tenant/odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc:{{revision}} + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cuda.konflux + - name: path-context + value: . + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cuda.conf + - name: build-platforms + value: + - linux/x86_64 + - linux-m2xlarge/arm64 + - name: hermetic + value: false + pipelineSpec: + description: | + This pipeline is ideal for building container images from a Containerfile while maintaining trust after pipeline customization. + + _Uses `buildah` to create a container image leveraging [trusted artifacts](https://konflux-ci.dev/architecture/ADR/0036-trusted-artifacts.html). It also optionally creates a source image and runs some build-time tests. Information is shared between tasks using OCI artifacts instead of PVCs. EC will pass the [`trusted_task.trusted`](https://conforma.dev/docs/policy/packages/release_trusted_task.html#trusted_task__trusted) policy as long as all data used to build the artifact is generated from trusted tasks. + This pipeline is pushed as a Tekton bundle to [quay.io](https://quay.io/repository/konflux-ci/tekton-catalog/pipeline-docker-build-oci-ta?tab=tags)_ + params: + - description: Source Repository URL + name: git-url + type: string + - default: "" + description: Revision of the Source Repository + name: revision + type: string + - description: Fully Qualified Output Image + name: output-image + type: string + - default: . + description: Path to the source code of an application's component from where + to build image. + name: path-context + type: string + - default: Dockerfile + description: Path to the Dockerfile inside the context specified by parameter + path-context + name: dockerfile + type: string + - default: "false" + description: Force rebuild image + name: rebuild + type: string + - default: "false" + description: Skip checks against built image + name: skip-checks + type: string + - default: "false" + description: Execute the build with network isolation + name: hermetic + type: string + - default: "" + description: Build dependencies to be prefetched + name: prefetch-input + type: string + - default: "" + description: Image tag expiration time, time values could be something like + 1h, 2d, 3w for hours, days, and weeks, respectively. + name: image-expires-after + type: string + - default: "false" + description: Build a source image. + name: build-source-image + type: string + - default: "false" + description: Add built image into an OCI image index + name: build-image-index + type: string + - default: docker + description: The format for the resulting image's mediaType. Valid values are + oci or docker. + name: buildah-format + type: string + - default: [] + description: Array of --build-arg values ("arg=value" strings) for buildah + name: build-args + type: array + - default: "" + description: Path to a file with build arguments for buildah, see https://www.mankier.com/1/buildah-build#--build-arg-file + name: build-args-file + type: string + - default: "false" + description: Whether to enable privileged mode, should be used only with remote + VMs + name: privileged-nested + type: string + results: + - description: "" + name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - description: "" + name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - description: "" + name: CHAINS-GIT_URL + value: $(tasks.clone-repository.results.url) + - description: "" + name: CHAINS-GIT_COMMIT + value: $(tasks.clone-repository.results.commit) + tasks: + - name: init + params: + - name: image-url + value: $(params.output-image) + - name: rebuild + value: $(params.rebuild) + - name: skip-checks + value: $(params.skip-checks) + taskRef: + params: + - name: name + value: init + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-init:0.2@sha256:3ca52e1d8885fc229bd9067275f44d5b21a9a609981d0324b525ddeca909bf10 + - name: kind + value: task + resolver: bundles + - name: clone-repository + params: + - name: url + value: $(params.git-url) + - name: revision + value: $(params.revision) + - name: ociStorage + value: $(params.output-image).git + - name: ociArtifactExpiresAfter + value: $(params.image-expires-after) + runAfter: + - init + taskRef: + params: + - name: name + value: git-clone-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-git-clone-oci-ta:0.1@sha256:3dc39eae48745a96097c07c577b944d6203a91c35d3f71d9ed5feab41d327a6a + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + workspaces: + - name: basic-auth + workspace: git-auth + - name: prefetch-dependencies + params: + - name: input + value: $(params.prefetch-input) + - name: SOURCE_ARTIFACT + value: $(tasks.clone-repository.results.SOURCE_ARTIFACT) + - name: ociStorage + value: $(params.output-image).prefetch + - name: ociArtifactExpiresAfter + value: $(params.image-expires-after) + runAfter: + - clone-repository + taskRef: + params: + - name: name + value: prefetch-dependencies-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-prefetch-dependencies-oci-ta:0.2@sha256:5946ca57aa29f162e11b74984ec58960f55f9fb6a0e97c6c9215c4161f768726 + - name: kind + value: task + resolver: bundles + workspaces: + - name: git-basic-auth + workspace: git-auth + - name: netrc + workspace: netrc + - name: build-container + params: + - name: IMAGE + value: $(params.output-image) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: HERMETIC + value: $(params.hermetic) + - name: PREFETCH_INPUT + value: $(params.prefetch-input) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: BUILD_ARGS + value: + - $(params.build-args[*]) + - name: BUILD_ARGS_FILE + value: $(params.build-args-file) + - name: PRIVILEGED_NESTED + value: $(params.privileged-nested) + - name: SOURCE_URL + value: $(tasks.clone-repository.results.url) + - name: BUILDAH_FORMAT + value: $(params.buildah-format) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - prefetch-dependencies + taskRef: + params: + - name: name + value: buildah-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.6@sha256:27b04ea788cf64fd38c7d151feb5e1ca408804fa5a70cf704ae746a086ee92b8 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - name: build-image-index + params: + - name: IMAGE + value: $(params.output-image) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: ALWAYS_BUILD_INDEX + value: $(params.build-image-index) + - name: IMAGES + value: + - $(tasks.build-container.results.IMAGE_URL)@$(tasks.build-container.results.IMAGE_DIGEST) + - name: BUILDAH_FORMAT + value: $(params.buildah-format) + runAfter: + - build-container + taskRef: + params: + - name: name + value: build-image-index + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-build-image-index:0.1@sha256:31197f4ee71be47c6f491e888ff266cbbb8ad5ed1c7c4141cc14f002d1802a50 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - name: build-source-image + params: + - name: BINARY_IMAGE + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: BINARY_IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: source-build-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-source-build-oci-ta:0.3@sha256:282cb5a9119a87e88559444feff67d76d6f356d03654b4845632c049b2314735 + - name: kind + value: task + resolver: bundles + when: + - input: $(tasks.init.results.build) + operator: in + values: + - "true" + - input: $(params.build-source-image) + operator: in + values: + - "true" + - name: deprecated-base-image-check + params: + - name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: deprecated-image-check + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-deprecated-image-check:0.5@sha256:462baed733dfc38aca5395499e92f19b6f13a74c2e88fe5d86c3cffa2f899b57 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: clair-scan + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: clair-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-clair-scan:0.3@sha256:8ec7d7b9438ace5ef3fb03a533d9440d0fd81e51c73b0dc1eb51602fb7cd044e + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: ecosystem-cert-preflight-checks + params: + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: ecosystem-cert-preflight-checks + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-ecosystem-cert-preflight-checks:0.2@sha256:9568c51a5158d534248908b9b561cf67d2826ed4ea164ffd95628bb42380e6ec + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-snyk-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-snyk-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-snyk-check-oci-ta:0.4@sha256:8ad28b7783837a24acbc9a8494c935e796e591ce476085ad5899bebd7e53f077 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: clamav-scan + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: clamav-scan + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-clamav-scan:0.3@sha256:f3d2d179cddcc07d0228d9f52959a233037a3afa2619d0a8b2effbb467db80c3 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-coverity-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE + value: $(params.output-image) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: HERMETIC + value: $(params.hermetic) + - name: PREFETCH_INPUT + value: $(params.prefetch-input) + - name: IMAGE_EXPIRES_AFTER + value: $(params.image-expires-after) + - name: COMMIT_SHA + value: $(tasks.clone-repository.results.commit) + - name: BUILD_ARGS + value: + - $(params.build-args[*]) + - name: BUILD_ARGS_FILE + value: $(params.build-args-file) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - coverity-availability-check + taskRef: + params: + - name: name + value: sast-coverity-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-coverity-check-oci-ta:0.3@sha256:78f5244a8cfd28c890ed62db7e4ff1fc97ff39876d37fb19f1b0c2c286a4002c + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - input: $(tasks.coverity-availability-check.results.STATUS) + operator: in + values: + - success + - name: coverity-availability-check + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: coverity-availability-check + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-coverity-availability-check:0.2@sha256:36400873d3031df128c55aa71ee11d322c3e55fd8f13dc5779098fbc117c0aa3 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-shell-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-shell-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-shell-check-oci-ta:0.1@sha256:d44336d7bcbd1f7cedee639357a493bd1f661e2859e49e11a34644bdf6819c4e + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: sast-unicode-check + params: + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + - name: CACHI2_ARTIFACT + value: $(tasks.prefetch-dependencies.results.CACHI2_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: sast-unicode-check-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-sast-unicode-check-oci-ta:0.3@sha256:e5a8d3e8e7be7246a1460385b95c084ea6e8fe7520d40fe4389deb90f1bf5176 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + - name: apply-tags + params: + - name: IMAGE_URL + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: apply-tags + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-apply-tags:0.2@sha256:4c2b0a2d2904108f8d19edfa878df6cd49ed19aab73ab6fc6a435fba0265f771 + - name: kind + value: task + resolver: bundles + - name: push-dockerfile + params: + - name: IMAGE + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: IMAGE_DIGEST + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + - name: DOCKERFILE + value: $(params.dockerfile) + - name: CONTEXT + value: $(params.path-context) + - name: SOURCE_ARTIFACT + value: $(tasks.prefetch-dependencies.results.SOURCE_ARTIFACT) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: push-dockerfile-oci-ta + - name: bundle + value: quay.io/konflux-ci/tekton-catalog/task-push-dockerfile-oci-ta:0.1@sha256:13633d5ba8445c0f732a0a5d1b33ffbb708398e45ef1647542b0ab22fee25a6a + - name: kind + value: task + resolver: bundles + - name: rpms-signature-scan + params: + - name: image-url + value: $(tasks.build-image-index.results.IMAGE_URL) + - name: image-digest + value: $(tasks.build-image-index.results.IMAGE_DIGEST) + runAfter: + - build-image-index + taskRef: + params: + - name: name + value: rpms-signature-scan + - name: bundle + value: quay.io/konflux-ci/konflux-vanguard/task-rpms-signature-scan:0.2@sha256:c02429b45145fc7f2b1fa94ec2f48c0d8ac203fd4385796b4c93e5d0dec967f8 + - name: kind + value: task + resolver: bundles + when: + - input: $(params.skip-checks) + operator: in + values: + - "false" + workspaces: + - name: git-auth + optional: true + - name: netrc + optional: true + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cuda-py312-rhel9-poc + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-pull-request.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-pull-request.yaml new file mode 100644 index 0000000000..08ccd499a9 --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-pull-request.yaml @@ -0,0 +1,55 @@ +# yamllint disable-file +# This pipeline is autogenerated by scripts/generate_pull_request_pipelineruns.py +--- +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: 'true' + pipelinesascode.tekton.dev/max-keep-runs: '3' + pipelinesascode.tekton.dev/on-comment: ^/kfbuild\s+(all|odh\-workbench\-rstudio\-minimal\-cuda\-py312\-rhel9|rstudio/rhel9\-python\-3\.12) + pipelinesascode.tekton.dev/on-cel-expression: | + event == "pull_request" && target_branch == "main" && + !("manifests/base/params-latest.env".pathChanged()) && + ( ".tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-pull-request.yaml".pathChanged() || "rstudio/rhel9-python-3.12/**".pathChanged() || "rstudio/rhel9-python-3.12/build-args/cuda.conf".pathChanged() ) + && body.repository.full_name == "opendatahub-io/notebooks" + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cuda-py312-rhel9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cuda-py312-rhel9-on-pull-request + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 3h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cuda-py312-rhel9:on-pr-{{revision}} + - name: image-expires-after + value: 5d + - name: build-platforms + value: + - linux/x86_64 + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cuda + - name: path-context + value: . + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cuda.conf + pipelineRef: + name: multiarch-pull-request-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cuda-py312-rhel9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-push.yaml b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-push.yaml new file mode 100644 index 0000000000..9534cbecf8 --- /dev/null +++ b/.tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-push.yaml @@ -0,0 +1,49 @@ +apiVersion: tekton.dev/v1 +kind: PipelineRun +metadata: + annotations: + build.appstudio.openshift.io/repo: https://github.com/opendatahub-io/notebooks?rev={{revision}} + build.appstudio.redhat.com/commit_sha: '{{revision}}' + build.appstudio.redhat.com/target_branch: '{{target_branch}}' + pipelinesascode.tekton.dev/cancel-in-progress: "false" + pipelinesascode.tekton.dev/max-keep-runs: "3" + build.appstudio.openshift.io/build-nudge-files: "manifests/base/params-latest.env" + pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch == "main" && + !("manifests/base/params-latest.env".pathChanged()) && + ( ".tekton/odh-workbench-rstudio-minimal-cuda-py312-rhel9-push.yaml".pathChanged() || "rstudio/rhel9-python-3.12/**".pathChanged() || "rstudio/rhel9-python-3.12/build-args/cuda.conf".pathChanged() ) + creationTimestamp: + labels: + appstudio.openshift.io/application: opendatahub-release + appstudio.openshift.io/component: odh-workbench-rstudio-minimal-cuda-py312-rhel9 + pipelines.appstudio.openshift.io/type: build + name: odh-workbench-rstudio-minimal-cuda-py312-rhel9-on-push + namespace: open-data-hub-tenant +spec: + timeouts: + pipeline: 8h + params: + - name: git-url + value: '{{source_url}}' + - name: revision + value: '{{revision}}' + - name: output-image + value: quay.io/opendatahub/odh-workbench-rstudio-minimal-cuda-py312-rhel9:{{revision}} + - name: dockerfile + value: rstudio/rhel9-python-3.12/Dockerfile.cuda + - name: build-args-file + value: rstudio/rhel9-python-3.12/build-args/cuda.conf + - name: path-context + value: . + - name: additional-tags + value: + - '{{target_branch}}-{{revision}}' + - 2025b-v1.39 + pipelineRef: + name: singlearch-push-pipeline + taskRunTemplate: + serviceAccountName: build-pipeline-odh-workbench-rstudio-minimal-cuda-py312-rhel9 + workspaces: + - name: git-auth + secret: + secretName: '{{ git_auth_secret }}' +status: {} diff --git a/README.md b/README.md index 69fe9a44c9..6d89f0f9d3 100644 --- a/README.md +++ b/README.md @@ -187,7 +187,7 @@ This table provides a concise overview of the support status for various contain |------------|----------|--------|------|---------|----------|----------|--------|-------------| | CPU Minimal | - | 3.12 | UBI9 | ✅ | ✅ | ✅ | ✅ | [quay.io/repository/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-jupyter-minimal-cpu-py312-ubi9?tab=tags) | | CPU DataScience | - | 3.12 | UBI9 | ✅ | ✅ | ✅ | ✅ | [quay.io/repository/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-jupyter-datascience-cpu-py312-ubi9?tab=tags) | -| CPU TrustyAI | - | 3.12 | UBI9 | ✅ | ✅ | ✅ | - | [quay.io/repository/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9?tab=tags) | +| CPU TrustyAI | - | 3.12 | UBI9 | ✅ | ✅ | ✅ | ✅ | [quay.io/repository/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-jupyter-trustyai-cpu-py312-ubi9?tab=tags) | | CPU CodeServer | - | 3.12 | UBI9 | ✅ | ✅ | ✅ | ✅ | [quay.io/repository/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-codeserver-datascience-cpu-py312-ubi9?tab=tags) | | CPU RStudio | - | 3.12 | C9S | ✅ | ✅ | - | - | [quay.io/repository/opendatahub/odh-workbench-rstudio-minimal-cpu-py312-c9s](https://quay.io/repository/opendatahub/odh-workbench-rstudio-minimal-cpu-py312-c9s?tab=tags) | | CUDA Minimal | 12.8 | 3.12 | UBI9/RHEL9.6 | ✅ | ✅ | ❌ | ❌ | [quay.io/repository/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9](https://quay.io/repository/opendatahub/odh-workbench-jupyter-minimal-cuda-py312-ubi9?tab=tags) | diff --git a/codeserver/ubi9-python-3.12/Dockerfile.cpu b/codeserver/ubi9-python-3.12/Dockerfile.cpu index b8b6d00d3e..6c5528993c 100644 --- a/codeserver/ubi9-python-3.12/Dockerfile.cpu +++ b/codeserver/ubi9-python-3.12/Dockerfile.cpu @@ -55,13 +55,15 @@ COPY ${CODESERVER_SOURCE_CODE}/devel_env_setup.sh ./ # Important: Since HOME & USER for the python-312 has been changed, # we need to ensure the same cache directory is mounted in # the final stage with the necessary permissions to consume from cache -RUN --mount=type=cache,target=/root/.cache/uv \ - pip install --no-cache-dir uv && \ - # the devel script is ppc64le and s390x specific - sets up build-time dependencies - source ./devel_env_setup.sh && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +pip install --no-cache-dir uv +# the devel script is ppc64le and s390x specific - sets up build-time dependencies +source ./devel_env_setup.sh +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +EOF # dummy file to make image build wait for this stage RUN touch /tmp/control @@ -85,32 +87,46 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y tar perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/dnf +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y tar perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/dnf +EOF # (ARCH-ppc64le): since wheels are compiled from source, we need shared libs available at runtime -RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS,target=/OpenBlas,rw \ - bash -c ' \ - if [[ $(uname -m) == "ppc64le" ]]; then \ - PREFIX=/usr/ make install -C /OpenBlas; \ - fi ' +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS,target=/OpenBlas,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +if [[ $(uname -m) == "ppc64le" ]]; then + PREFIX=/usr/ make install -C /OpenBlas +fi +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -157,21 +173,28 @@ COPY --from=rpm-base /tmp/control /dev/null # Install code-server # Note: Use cache mounts, bind mounts fail on konflux # https://redhat-internal.slack.com/archives/C04PZ7H0VA8/p1755628065772589?thread_ts=1755597929.335999&cid=C04PZ7H0VA8 -RUN --mount=type=cache,from=rpm-base,source=/tmp/,target=/code-server-rpm/,rw \ - # EXPLANATION: dnf installation produces an "unsigned rpm" error from Konflux (Conforma) - # since we're building rpm from source, we will simply unpack it over / - # dnf install -y "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" - # dnf -y clean all --enablerepo='*' - dnf install -y cpio && dnf -y clean all && \ - cd / && rpm2cpio "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" | cpio -idmv +RUN --mount=type=cache,from=rpm-base,source=/tmp/,target=/code-server-rpm/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +# EXPLANATION: dnf installation produces an "unsigned rpm" error from Konflux (Conforma) +# since we're building rpm from source, we will simply unpack it over / +# dnf install -y "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" +# dnf -y clean all --enablerepo='*' +dnf install -y cpio +dnf -y clean all +cd / +rpm2cpio "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" | cpio -idmv +EOF COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/utils utils/ # Create and intall the extensions though build-time on a temporary directory. Later this directory will copied on the `/opt/app-root/src/.local/share/code-server/extensions` via run-code-server.sh file when it starts up. # https://coder.com/docs/code-server/FAQ#how-do-i-install-an-extension -RUN mkdir -p /opt/app-root/extensions-temp && \ - code-server --install-extension /opt/app-root/bin/utils/ms-python.python-2025.14.0.vsix --extensions-dir /opt/app-root/extensions-temp && \ - code-server --install-extension /opt/app-root/bin/utils/ms-toolsai.jupyter-2025.8.0.vsix --extensions-dir /opt/app-root/extensions-temp +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +mkdir -p /opt/app-root/extensions-temp +code-server --install-extension /opt/app-root/bin/utils/ms-python.python-2025.14.0.vsix --extensions-dir /opt/app-root/extensions-temp +code-server --install-extension /opt/app-root/bin/utils/ms-toolsai.jupyter-2025.8.0.vsix --extensions-dir /opt/app-root/extensions-temp +EOF # Install NGINX to proxy code-server and pass probes check ENV APP_ROOT=/opt/app-root @@ -188,10 +211,13 @@ ENV NGINX_CONFIGURATION_PATH=${APP_ROOT}/etc/nginx.d \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN INSTALL_PKGS="bind-utils nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +INSTALL_PKGS="bind-utils nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -216,34 +242,37 @@ COPY ${CODESERVER_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI script executable - chmod +x /opt/app-root/api/kernels/access.cgi && \ - rpm-file-permissions && \ - # Ensure the temporary directory and target directory have the correct permissions - mkdir -p /opt/app-root/src/.local/share/code-server/extensions && \ - mkdir -p /opt/app-root/src/.local/share/code-server/coder-logs && \ - chown -R 1001:0 /opt/app-root/src/.local/share/code-server && \ - chown -R 1001:0 /opt/app-root/extensions-temp && \ - chown -R 1001:0 /opt/app-root/src/.config/code-server +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI script executable +chmod +x /opt/app-root/api/kernels/access.cgi +rpm-file-permissions +# Ensure the temporary directory and target directory have the correct permissions +mkdir -p /opt/app-root/src/.local/share/code-server/extensions +mkdir -p /opt/app-root/src/.local/share/code-server/coder-logs +chown -R 1001:0 /opt/app-root/src/.local/share/code-server +chown -R 1001:0 /opt/app-root/extensions-temp +chown -R 1001:0 /opt/app-root/src/.config/code-server +EOF # Launcher COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/run-code-server.sh ${CODESERVER_SOURCE_CODE}/run-nginx.sh ./ @@ -261,28 +290,35 @@ COPY --from=whl-cache /tmp/control /dev/null # Install packages and cleanup # (ARCH-ppc64le): install packages (eg. pyarrow) that need to be built from source repository on ppc64le RUN --mount=type=cache,target=/root/.cache/uv \ - --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ - bash -c ' \ - if [[ $(uname -m) == "ppc64le" ]] || [[ $(uname -m) == "s390x" ]]; then \ - uv pip install /wheelsdir/*.whl; \ - fi ' + --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +if [[ $(uname -m) == "ppc64le" ]] || [[ $(uname -m) == "s390x" ]]; then + uv pip install /wheelsdir/*.whl +fi +EOF + # install packages as USER 0 (this will allow us to consume uv cache) -RUN --mount=type=cache,target=/root/.cache/uv \ - echo "Installing softwares and packages" && \ - # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag - # TODO(jdanek): seen some builds fail on GitHub Actions with --offline and see no need to limit ourselves to the cache, will remove this - UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ - # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files - # Build debugpy from source instead - UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ - # change ownership to default user (all packages were installed as root and has root:root ownership \ - chown -R 1001:0 /opt/app-root +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag +# TODO(jdanek): seen some builds fail on GitHub Actions with --offline and see no need to limit ourselves to the cache, will remove this +UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml +# Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files +# Build debugpy from source instead +UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root +EOF USER 1001 # Fix permissions to support pip in Openshift environments -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu b/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu index 989651d242..d619c50144 100644 --- a/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/codeserver/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -55,13 +55,15 @@ COPY ${CODESERVER_SOURCE_CODE}/devel_env_setup.sh ./ # Important: Since HOME & USER for the python-312 has been changed, # we need to ensure the same cache directory is mounted in # the final stage with the necessary permissions to consume from cache -RUN --mount=type=cache,target=/root/.cache/uv \ - pip install --no-cache-dir uv && \ - # the devel script is ppc64le and s390x specific - sets up build-time dependencies - source ./devel_env_setup.sh && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +pip install --no-cache-dir uv +# the devel script is ppc64le and s390x specific - sets up build-time dependencies +source ./devel_env_setup.sh +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +EOF # dummy file to make image build wait for this stage RUN touch /tmp/control @@ -70,6 +72,8 @@ RUN touch /tmp/control # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -85,32 +89,46 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y tar perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/dnf +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y tar perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/dnf +EOF # (ARCH-ppc64le): since wheels are compiled from source, we need shared libs available at runtime -RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS,target=/OpenBlas,rw \ - bash -c ' \ - if [[ $(uname -m) == "ppc64le" ]]; then \ - PREFIX=/usr/ make install -C /OpenBlas; \ - fi ' +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS,target=/OpenBlas,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +if [[ $(uname -m) == "ppc64le" ]]; then + PREFIX=/usr/ make install -C /OpenBlas +fi +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -155,21 +173,28 @@ COPY --from=rpm-base /tmp/control /dev/null # Install code-server # Note: Use cache mounts, bind mounts fail on konflux # https://redhat-internal.slack.com/archives/C04PZ7H0VA8/p1755628065772589?thread_ts=1755597929.335999&cid=C04PZ7H0VA8 -RUN --mount=type=cache,from=rpm-base,source=/tmp/,target=/code-server-rpm/,rw \ - # EXPLANATION: dnf installation produces an "unsigned rpm" error from Konflux (Conforma) - # since we're building rpm from source, we will simply unpack it over / - # dnf install -y "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" - # dnf -y clean all --enablerepo='*' - dnf install -y cpio && dnf -y clean all && \ - cd / && rpm2cpio "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" | cpio -idmv +RUN --mount=type=cache,from=rpm-base,source=/tmp/,target=/code-server-rpm/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +# EXPLANATION: dnf installation produces an "unsigned rpm" error from Konflux (Conforma) +# since we're building rpm from source, we will simply unpack it over / +# dnf install -y "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" +# dnf -y clean all --enablerepo='*' +dnf install -y cpio +dnf -y clean all +cd / +rpm2cpio "/code-server-rpm/code-server-${CODESERVER_VERSION/v/}-${TARGETARCH}.rpm" | cpio -idmv +EOF COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/utils utils/ # Create and intall the extensions though build-time on a temporary directory. Later this directory will copied on the `/opt/app-root/src/.local/share/code-server/extensions` via run-code-server.sh file when it starts up. # https://coder.com/docs/code-server/FAQ#how-do-i-install-an-extension -RUN mkdir -p /opt/app-root/extensions-temp && \ - code-server --install-extension /opt/app-root/bin/utils/ms-python.python-2025.14.0.vsix --extensions-dir /opt/app-root/extensions-temp && \ - code-server --install-extension /opt/app-root/bin/utils/ms-toolsai.jupyter-2025.8.0.vsix --extensions-dir /opt/app-root/extensions-temp +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +mkdir -p /opt/app-root/extensions-temp +code-server --install-extension /opt/app-root/bin/utils/ms-python.python-2025.14.0.vsix --extensions-dir /opt/app-root/extensions-temp +code-server --install-extension /opt/app-root/bin/utils/ms-toolsai.jupyter-2025.8.0.vsix --extensions-dir /opt/app-root/extensions-temp +EOF # Install NGINX to proxy code-server and pass probes check ENV APP_ROOT=/opt/app-root @@ -186,10 +211,13 @@ ENV NGINX_CONFIGURATION_PATH=${APP_ROOT}/etc/nginx.d \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN INSTALL_PKGS="bind-utils nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +INSTALL_PKGS="bind-utils nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -214,34 +242,37 @@ COPY ${CODESERVER_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI script executable - chmod +x /opt/app-root/api/kernels/access.cgi && \ - rpm-file-permissions && \ - # Ensure the temporary directory and target directory have the correct permissions - mkdir -p /opt/app-root/src/.local/share/code-server/extensions && \ - mkdir -p /opt/app-root/src/.local/share/code-server/coder-logs && \ - chown -R 1001:0 /opt/app-root/src/.local/share/code-server && \ - chown -R 1001:0 /opt/app-root/extensions-temp && \ - chown -R 1001:0 /opt/app-root/src/.config/code-server +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI script executable +chmod +x /opt/app-root/api/kernels/access.cgi +rpm-file-permissions +# Ensure the temporary directory and target directory have the correct permissions +mkdir -p /opt/app-root/src/.local/share/code-server/extensions +mkdir -p /opt/app-root/src/.local/share/code-server/coder-logs +chown -R 1001:0 /opt/app-root/src/.local/share/code-server +chown -R 1001:0 /opt/app-root/extensions-temp +chown -R 1001:0 /opt/app-root/src/.config/code-server +EOF # Launcher COPY --chown=1001:0 ${CODESERVER_SOURCE_CODE}/run-code-server.sh ${CODESERVER_SOURCE_CODE}/run-nginx.sh ./ @@ -259,28 +290,35 @@ COPY --from=whl-cache /tmp/control /dev/null # Install packages and cleanup # (ARCH-ppc64le): install packages (eg. pyarrow) that need to be built from source repository on ppc64le RUN --mount=type=cache,target=/root/.cache/uv \ - --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ - bash -c ' \ - if [[ $(uname -m) == "ppc64le" ]] || [[ $(uname -m) == "s390x" ]]; then \ - uv pip install /wheelsdir/*.whl; \ - fi ' + --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +if [[ $(uname -m) == "ppc64le" ]] || [[ $(uname -m) == "s390x" ]]; then + uv pip install /wheelsdir/*.whl +fi +EOF + # install packages as USER 0 (this will allow us to consume uv cache) -RUN --mount=type=cache,target=/root/.cache/uv \ - echo "Installing softwares and packages" && \ - # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag - # TODO(jdanek): seen some builds fail on GitHub Actions with --offline and see no need to limit ourselves to the cache, will remove this - UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ - # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files - # Build debugpy from source instead - UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ - # change ownership to default user (all packages were installed as root and has root:root ownership \ - chown -R 1001:0 /opt/app-root +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag +# TODO(jdanek): seen some builds fail on GitHub Actions with --offline and see no need to limit ourselves to the cache, will remove this +UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml +# Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files +# Build debugpy from source instead +UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root +EOF USER 1001 # Fix permissions to support pip in Openshift environments -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/codeserver/ubi9-python-3.12/build-args/cpu.conf b/codeserver/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/codeserver/ubi9-python-3.12/build-args/cpu.conf +++ b/codeserver/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/codeserver/ubi9-python-3.12/pylock.toml b/codeserver/ubi9-python-3.12/pylock.toml index 923bd6f13b..27902cafb0 100644 --- a/codeserver/ubi9-python-3.12/pylock.toml +++ b/codeserver/ubi9-python-3.12/pylock.toml @@ -441,6 +441,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -450,6 +452,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -459,6 +463,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -468,6 +474,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -475,6 +483,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -484,6 +494,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/docs/architecture/decisions/0001-record-architecture-decisions.md b/docs/architecture/decisions/0001-record-architecture-decisions.md new file mode 100644 index 0000000000..b9b5a925f6 --- /dev/null +++ b/docs/architecture/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,24 @@ +# 1. Record architecture decisions + +Date: 2025-01-18 + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +We will presently ignore the [opendatahub-io/architecture-decision-records](https://github.com/opendatahub-io/architecture-decision-records) +repository, since our ADRs are (at least initially) be internal to the notebooks project itself and also because +previous experience ([PR#91](https://github.com/opendatahub-io/architecture-decision-records/pull/91)) +shows that it is challenging to get merge approval for a PR there. + +## Consequences + +See Michael Nygard's article, linked above. For a lightweight ADR toolset, see Nat Pryce's [adr-tools](https://github.com/npryce/adr-tools). diff --git a/docs/architecture/decisions/0002-onboard-onto-all-possible-test-execution-platforms-available.md b/docs/architecture/decisions/0002-onboard-onto-all-possible-test-execution-platforms-available.md new file mode 100644 index 0000000000..7519a4a4e3 --- /dev/null +++ b/docs/architecture/decisions/0002-onboard-onto-all-possible-test-execution-platforms-available.md @@ -0,0 +1,37 @@ +# 2. Onboard onto all possible test execution platforms available + +Date: 2025-10-29 + +## Status + +Accepted + +## Context + +We as the Notebooks team need to be responsible for the test execution on our code. +To do that, we need to explore options available and decide which ones to use. + +## Decision + +We will onboard and create some quick-start-style tests on all platforms available to us. +This will provide us with sufficient knowledge to talk to Red Hat DevTestOps team on equal footing about what we actually need. + +### Execution platforms to consider for onboarding + +* OpenShift CI (already onboarded) +* GitHub Actions (already onboarded) +* Konflux [E2E test execution](https://developers.redhat.com/articles/2024/10/28/ephemeral-openshift-clusters-konflux-ci-using-cluster-service-operator) +* Travis CI Partner Queue +* Packit.dev +* Testing-Farm.io nested virtualization +* +* Hydra trigger such as /test-e2e that will run something on Red Hat internal CI +* Machine under @jiridanek's desk in Brno +* Virtual machines under our management on ITUP.scale platform in Red Hat + +## Consequences + +This is related to [E2E Testing Platform Evaluation - Periodic Review #1389](https://github.com/opendatahub-io/notebooks/issues/1389). + +We'll need to maintain the test execution platforms we choose to use. +We'll need to eventually offboard from the platforms we decide not to use, to avoid confusion. diff --git a/docs/architecture/decisions/README.md b/docs/architecture/decisions/README.md new file mode 100644 index 0000000000..9b70a482cc --- /dev/null +++ b/docs/architecture/decisions/README.md @@ -0,0 +1,27 @@ +## Getting started + +* , installation: +[Linux](https://github.com/npryce/adr-tools/blob/master/INSTALL.md#from-a-release-package-linux-macos-x), +[Homebrew](https://formulae.brew.sh/formula/adr-tools) + +```commandline +brew install adr-tools +``` + +## Start new ADR + + + +```commandline +adr new Implement as Unix shell scripts +``` + +## Initialize new ADR directory + +```commandline +adr init docs/architecture/decisions +``` + +## OpenDataHub ADRs + + diff --git a/docs/gateway-api-migration-guide.md b/docs/gateway-api-migration-guide.md new file mode 100644 index 0000000000..4144ec0024 --- /dev/null +++ b/docs/gateway-api-migration-guide.md @@ -0,0 +1,389 @@ +# Migrating Workbench Images to Kubernetes Gateway API + +## Overview + +When migrating from OpenShift Route + oauth-proxy to Kubernetes Gateway API + kube-rbac-proxy, workbench images require nginx configuration updates to properly handle path-based routing. + +## The Core Requirement + +**Your workbench image must serve all content from the base path `${NB_PREFIX}`.** + +Any call from a browser to a different path, for example `/index.html`, `/api/my-endpoint`, or simply `/`, won't be routed to the workbench container. This is because the routing, handled by the Gateway API, is path-based, using the same value as the environment variable `NB_PREFIX` that is injected into the workbench at runtime. + +Example `NB_PREFIX`: `/notebook//` + +## Key Architectural Difference + +### OpenShift Route (Old) +``` +External: /notebook/user/workbench/app/ + ↓ +Route strips prefix + ↓ +Container receives: /app/ +``` + +**Important**: The prefix stripping isn't automatic - it requires implementation: +- **nginx** strips the prefix via rewrite rules +- **Catch-all redirects** like `location / { return 302 /app; }` + +Both approaches work because the Route **forwards all traffic** to the pod regardless of path. + +### Gateway API (New) +``` +External: /notebook/user/workbench/app/ + ↓ +Gateway preserves full path (path-based routing) + ↓ +Container receives: /notebook/user/workbench/app/ +``` + +**Critical Difference**: Gateway API uses **path-based routing**. Only requests matching the configured path prefix are forwarded to the pod. + +### Why Old Approaches Fail with Gateway API + +``` +App redirects: /notebook/user/workbench/app → /app + ↓ +Browser follows redirect to: /app + ↓ +Gateway routing rule: /notebook/user/workbench/** (doesn't match /app!) + ↓ +Pod receives NO traffic → 404 or routing failure +``` + +**The Problem**: If your application redirects to paths outside `${NB_PREFIX}`, the Gateway cannot route those requests back to your pod. The path-based matching at the Gateway level requires all traffic to stay within the configured prefix. + +**Critical Change**: Your application (or reverse proxy) must handle the **full path** including the prefix and never redirect outside of it. + +--- + +## Part 1: For All Workbenches - General Requirements + +These requirements apply **regardless of whether you use nginx or application-level path handling**. + +### 1. Health Check Endpoints + +Your workbench **must** respond to health checks at: + +``` +GET /{NB_PREFIX}/api +``` + +This endpoint must return an HTTP 200 status for probes to succeed. + +**Example for Python Flask**: +```python +from flask import Flask +import os + +app = Flask(__name__) +nb_prefix = os.getenv('NB_PREFIX', '') + +@app.route(f'{nb_prefix}/api') +def health_check(): + return {'status': 'healthy'}, 200 + +@app.route(f'{nb_prefix}/api/kernels') +def kernels(): + # Handle culler endpoint + return {'kernels': []}, 200 + +@app.route(f'{nb_prefix}/api/terminals') +def terminals(): + # Handle culler endpoint + return {'terminals': []}, 200 +``` + +**Example for Node.js Express**: +```javascript +const express = require('express'); +const app = express(); +const nbPrefix = process.env.NB_PREFIX || ''; + +app.get(`${nbPrefix}/api`, (req, res) => { + res.json({ status: 'healthy' }); +}); + +app.get(`${nbPrefix}/api/kernels`, (req, res) => { + res.json({ kernels: [] }); +}); + +app.get(`${nbPrefix}/api/terminals`, (req, res) => { + res.json({ terminals: [] }); +}); +``` + +### 2. Culler Endpoints + +If your workbench supports culling idle workbenches, you must handle: + +``` +GET /{NB_PREFIX}/api/kernels +GET /{NB_PREFIX}/api/terminals +``` + +These should return information about active kernels/terminals, or empty arrays if none exist. + +### 3. Use Relative URLs in Your Application + +**Critical**: Your application must generate relative URLs, not absolute ones. + +```html + +Menu 1 + + + + +Menu 1 + + + + +Menu 1 +``` + +**Why**: Hardcoded absolute paths like `/menu1` will not include the `{NB_PREFIX}`, causing 404 errors. Relative URLs or framework-generated URLs will correctly resolve to `/{NB_PREFIX}/menu1`. + +### 4. Configure Your Application's Base Path + +If your framework supports it, configure the base path using the `NB_PREFIX` environment variable: + +**FastAPI**: +```python +from fastapi import FastAPI +import os + +app = FastAPI(root_path=os.getenv('NB_PREFIX', '')) +``` + +**Flask**: +```python +from flask import Flask +import os + +app = Flask(__name__) +app.config['APPLICATION_ROOT'] = os.getenv('NB_PREFIX', '') +``` + +**Express.js**: +```javascript +const express = require('express'); +const app = express(); +const nbPrefix = process.env.NB_PREFIX || ''; + +// Mount all routes under the prefix +const router = express.Router(); +// ... define routes on router ... +app.use(nbPrefix, router); +``` + +**Streamlit**: +```toml +# .streamlit/config.toml +[server] +baseUrlPath = "/notebook/namespace/workbench" # Set via NB_PREFIX +``` + +### 5. Limitations: Applications with Hardcoded Absolute Paths + +**If your application has hardcoded absolute paths that cannot be changed**, migration becomes very difficult: + +```javascript +// ❌ This cannot work with Gateway API unless rewritten +const menuUrl = "/menu1"; // Hardcoded absolute path +fetch(menuUrl).then(...); +``` + +**Solutions**: +1. **Modify the application** - Change to relative URLs or configurable base path (preferred) +2. **Use nginx with URL rewriting** - nginx can intercept and rewrite some URLs, but this is limited +3. **HTML/JS post-processing** - Intercept responses and rewrite URLs (complex, not recommended) + +**Warning**: nginx can rewrite URLs in redirects and some headers, but it **cannot** rewrite URLs embedded in HTML/JavaScript content without complex content manipulation, which is error-prone and slow. + +--- + +## Part 2: For nginx-based Workbenches - Reverse Proxy Configuration + +**Use this section if** your application does not support base path configuration and you need nginx to handle the path translation. + +### Required nginx Changes + +### 1. Remove Problematic Location Blocks + +**REMOVE** any overly broad location blocks that cause infinite redirects: + +```nginx +# ❌ REMOVE THIS - Too broad, causes infinite loops +location ${NB_PREFIX}/ { + return 302 $custom_scheme://$http_host/app/; +} +``` + +**Why**: This matches ALL paths under the prefix, including your application endpoint itself (e.g., `/notebook/user/workbench/app/`), creating redirect loops. + +### 2. Update Redirects to Preserve NB_PREFIX + +**All redirects must include `${NB_PREFIX}`** to keep requests within the Gateway route: + +```nginx +# ❌ BAD - Strips prefix +location = ${NB_PREFIX} { + return 302 $custom_scheme://$http_host/myapp/; +} + +# ✅ GOOD - Preserves prefix +location ${NB_PREFIX} { + return 302 $custom_scheme://$http_host${NB_PREFIX}/myapp/; +} +``` + +**Note**: Use `location ${NB_PREFIX}` (without `=`) to handle both with and without trailing slash. + +### 3. Add Prefix-Aware Proxy Location + +**Add a location block** that matches the full prefixed path and strips the prefix before proxying: + +```nginx +location ${NB_PREFIX}/myapp/ { + # Strip the prefix before proxying to backend + rewrite ^${NB_PREFIX}/myapp/(.*)$ /$1 break; + + # Proxy to your application + proxy_pass http://localhost:8080/; + proxy_http_version 1.1; + + # Essential for WebSocket support + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + + # Long timeout for interactive sessions + proxy_read_timeout 20d; + + # Pass through important headers + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $custom_scheme; +} +``` + +### 4. Update Health Check Endpoints + +Health checks must also preserve the prefix: + +```nginx +# Health check endpoint +location = ${NB_PREFIX}/api { + return 302 ${NB_PREFIX}/myapp/healthz/; + access_log off; +} +``` + +### 5. Add Wildcard server_name Fallback + +Gateway API uses different hostnames than OpenShift Routes. Add fallback logic: + +```bash +# In run-nginx.sh or startup script +export BASE_URL=$(extract_base_url_from_notebook_args) + +# If BASE_URL is empty or invalid, use wildcard server_name +if [ -z "$BASE_URL" ] || [ "$BASE_URL" = "$(echo $NB_PREFIX | awk -F/ '{ print $4"-"$3 }')" ]; then + export BASE_URL="_" +fi +``` + +This sets `server_name _;` which accepts requests from any hostname. + +### 6. Update kube-rbac-proxy Configuration + +Remove trailing slashes from upstream URLs in pod/statefulset specs: + +```yaml +# ❌ BAD +args: + - '--upstream=http://127.0.0.1:8888/' + +# ✅ GOOD +args: + - '--upstream=http://127.0.0.1:8888' +``` + +## HTTPRoute Configuration + +Ensure your HTTPRoute matches the full prefix path: + +```yaml +apiVersion: gateway.networking.k8s.io/v1 +kind: HTTPRoute +metadata: + name: my-workbench + namespace: +spec: + parentRefs: + - group: gateway.networking.k8s.io + kind: Gateway + name: data-science-gateway + namespace: openshift-ingress + rules: + - backendRefs: + - kind: Service + name: my-workbench-rbac + port: 8443 + weight: 1 + matches: + - path: + type: PathPrefix + value: /notebook// +``` + +**Important**: The `value` must match the `NB_PREFIX` environment variable set in the pod. + +## Reference Implementation + +See these files for complete examples: + +### Code-Server +- **nginx config**: `codeserver/ubi9-python-3.12/nginx/serverconf/proxy.conf.template_nbprefix` +- **startup script**: `codeserver/ubi9-python-3.12/run-nginx.sh` + +### RStudio +- **nginx config**: `rstudio/c9s-python-3.11/nginx/serverconf/proxy.conf.template_nbprefix` +- **startup script**: `rstudio/c9s-python-3.11/run-nginx.sh` + +## Understanding nginx Location Matching + +nginx location blocks have different matching priorities: + +```nginx +# 1. Exact match (highest priority) +location = /exact/path { + # Only matches /exact/path (no trailing slash) +} + +# 2. Prefix match (evaluated in order of length) +location /prefix { + # Matches /prefix, /prefix/, /prefix/anything +} + +# 3. Regex match (not covered here) +``` + +For Gateway API, you need: + +```nginx +# Redirect root to app +location ${NB_PREFIX} { + return 302 $custom_scheme://$http_host${NB_PREFIX}/myapp/; +} + +# Proxy app traffic (longer prefix wins) +location ${NB_PREFIX}/myapp/ { + proxy_pass http://localhost:8080/; +} +``` + +Request `/notebook/ns/wb` → matches first location → redirects +Request `/notebook/ns/wb/myapp/` → matches second location (longer) → proxies diff --git a/jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu b/jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu index f527392d80..6228007343 100644 --- a/jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu +++ b/jupyter/datascience/ubi9-python-3.12/Dockerfile.cpu @@ -19,19 +19,22 @@ ARG TARGETARCH # Keep s390x special-case from original (create dummy binary) but # include explicit curl/unzip steps from the delta for non-s390x. -RUN arch="${TARGETARCH:-$(uname -m)}" && \ - arch=$(echo "$arch" | cut -d- -f1) && \ - if [ "$arch" = "s390x" ]; then \ - echo "Skipping mongocli build for ${arch}, creating dummy binary"; \ - mkdir -p /tmp && printf '#!/bin/sh\necho "mongocli not supported on s390x"\n' > /tmp/mongocli && \ - chmod +x /tmp/mongocli; \ - else \ - echo "Building mongocli for ${arch}"; \ - curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip && \ - unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip && \ - cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux GOARCH=${arch} GO111MODULE=on go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +arch="${TARGETARCH:-$(uname -m)}" +arch=$(echo "$arch" | cut -d- -f1) +if [ "$arch" = "s390x" ]; then + echo "Skipping mongocli build for ${arch}, creating dummy binary" + mkdir -p /tmp && printf '#!/bin/sh\necho "mongocli not supported on s390x"\n' > /tmp/mongocli + chmod +x /tmp/mongocli +else + echo "Building mongocli for ${arch}" + curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip + unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip + cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ + CGO_ENABLED=1 GOOS=linux GOARCH=${arch} GO111MODULE=on go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +fi +EOF #################### # cpu-base # @@ -53,21 +56,28 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN --mount=type=cache,target=/var/cache/dnf \ - echo "Building for architecture: ${TARGETARCH}" && \ - if [ "$TARGETARCH" = "s390x" ]; then \ - PACKAGES="perl mesa-libGL skopeo gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel"; \ - else \ - PACKAGES="perl mesa-libGL skopeo"; \ - fi && \ - echo "Installing: $PACKAGES" && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum +RUN --mount=type=cache,target=/var/cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Building for architecture: ${TARGETARCH}" +if [ "$TARGETARCH" = "s390x" ]; then + PACKAGES="perl mesa-libGL skopeo gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel" +else + PACKAGES="perl mesa-libGL skopeo" +fi +echo "Installing: $PACKAGES" +dnf install -y $PACKAGES +dnf clean all +rm -rf /var/cache/yum +EOF RUN /bin/bash <<'EOF' set -Eeuxo pipefail @@ -103,14 +113,18 @@ EOF USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################## @@ -126,55 +140,57 @@ WORKDIR /tmp/build-wheels # Build pyarrow on ppc64le and s390x RUN --mount=type=cache,target=/root/.cache/pip \ - --mount=type=cache,target=/root/.cache/dnf \ - if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - # Install build dependencies (shared for pyarrow and onnx) - dnf install -y cmake make gcc-c++ pybind11-devel wget && \ - dnf clean all && \ - # Build and collect pyarrow wheel - git clone --depth 1 --branch "apache-arrow-17.0.0" https://github.com/apache/arrow.git && \ - cd arrow/cpp && \ - mkdir release && cd release && \ - ARROW_S3_FLAG="" && \ - if [ "$TARGETARCH" != "s390x" ]; then ARROW_S3_FLAG="-DARROW_S3=ON"; fi && \ - cmake -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_ORC=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_DATASET=ON \ - -DARROW_DEPENDENCY_SOURCE=BUNDLED \ - -DARROW_WITH_LZ4=OFF \ - -DARROW_WITH_ZSTD=OFF \ - -DARROW_WITH_SNAPPY=OFF \ - ${ARROW_S3_FLAG} \ - -DARROW_SUBSTRAIT=ON \ - -DARROW_BUILD_TESTS=OFF \ - -DARROW_BUILD_BENCHMARKS=OFF \ - .. && \ - make -j$(nproc) VERBOSE=1 && \ - make install -j$(nproc) && \ - cd ../../python && \ - pip install --no-cache-dir -r requirements-build.txt && \ - PYARROW_WITH_PARQUET=1 \ - PYARROW_WITH_DATASET=1 \ - PYARROW_WITH_FILESYSTEM=1 \ - PYARROW_WITH_JSON=1 \ - PYARROW_WITH_CSV=1 \ - PYARROW_PARALLEL=$(nproc) \ - python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ - mkdir -p /tmp/wheels && \ - cp dist/pyarrow-*.whl /tmp/wheels/ && \ - chmod -R 777 /tmp/wheels && \ - # Ensure wheels directory exists and has content - ls -la /tmp/wheels/; \ - else \ - # Create empty wheels directory for non-s390x - mkdir -p /tmp/wheels; \ - fi + --mount=type=cache,target=/root/.cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + # Install build dependencies (shared for pyarrow and onnx) + dnf install -y cmake make gcc-c++ pybind11-devel wget + dnf clean all + # Build and collect pyarrow wheel + git clone --depth 1 --branch "apache-arrow-17.0.0" https://github.com/apache/arrow.git + cd arrow/cpp + mkdir release && cd release + ARROW_S3_FLAG="" + if [ "$TARGETARCH" != "s390x" ]; then ARROW_S3_FLAG="-DARROW_S3=ON"; fi + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_DEPENDENCY_SOURCE=BUNDLED \ + -DARROW_WITH_LZ4=OFF \ + -DARROW_WITH_ZSTD=OFF \ + -DARROW_WITH_SNAPPY=OFF \ + ${ARROW_S3_FLAG} \ + -DARROW_SUBSTRAIT=ON \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + .. + make -j$(nproc) VERBOSE=1 + make install -j$(nproc) + cd ../../python + pip install --no-cache-dir -r requirements-build.txt + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel + mkdir -p /tmp/wheels + cp dist/pyarrow-*.whl /tmp/wheels/ + chmod -R 777 /tmp/wheels + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/ +else + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels +fi +EOF ####################################################### # common-builder (for Power-only) @@ -256,6 +272,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -291,8 +308,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat && \ - dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -304,11 +325,14 @@ ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/ # Copy wheels from build stage (ppc64le and s390x only) COPY --from=pyarrow-builder /tmp/wheels /tmp/wheels -RUN if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - pip install --no-cache-dir /tmp/wheels/*.whl; \ -else \ - echo "Skipping wheel install for $TARGETARCH"; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + pip install --no-cache-dir /tmp/wheels/*.whl +else + echo "Skipping wheel install for $TARGETARCH" fi +EOF # Copy OpenBLAS,ONNX wheels for Power COPY --from=openblas-builder /root/OpenBLAS-${OPENBLAS_VERSION} /openblas @@ -351,41 +375,43 @@ COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ # Copy Elyra setup to utils so that it's sourced at startup COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ -RUN --mount=type=cache,target=/root/.cache/pip \ - echo "Installing software and packages" && \ +RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing software and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + # We need special flags and environment variables when building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml +else # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - # We need special flags and environment variables when building packages - GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ - CFLAGS="-O3" CXXFLAGS="-O3" \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress \ - --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ - --requirements=./pylock.toml; \ - else \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress \ - --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ - --requirements=./pylock.toml; \ - fi && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - mkdir /opt/app-root/pipeline-runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" \ - /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - install -D -m 0644 /opt/app-root/bin/utils/jupyter_server_config.py \ - /opt/app-root/etc/jupyter/jupyter_server_config.py && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml +fi +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +mkdir /opt/app-root/pipeline-runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" \ + /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +install -D -m 0644 /opt/app-root/bin/utils/jupyter_server_config.py \ + /opt/app-root/etc/jupyter/jupyter_server_config.py +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu index 5ac02beacb..8e5018b307 100644 --- a/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/jupyter/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -19,24 +19,29 @@ ARG TARGETARCH # Keep s390x special-case from original (create dummy binary) but # include explicit curl/unzip steps from the delta for non-s390x. -RUN arch="${TARGETARCH:-$(uname -m)}" && \ - arch=$(echo "$arch" | cut -d- -f1) && \ - if [ "$arch" = "s390x" ]; then \ - echo "Skipping mongocli build for ${arch}, creating dummy binary"; \ - mkdir -p /tmp && printf '#!/bin/sh\necho "mongocli not supported on s390x"\n' > /tmp/mongocli && \ - chmod +x /tmp/mongocli; \ - else \ - echo "Building mongocli for ${arch}"; \ - curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip && \ - unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip && \ - cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux GOARCH=${arch} GO111MODULE=on go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +arch="${TARGETARCH:-$(uname -m)}" +arch=$(echo "$arch" | cut -d- -f1) +if [ "$arch" = "s390x" ]; then + echo "Skipping mongocli build for ${arch}, creating dummy binary" + mkdir -p /tmp && printf '#!/bin/sh\necho "mongocli not supported on s390x"\n' > /tmp/mongocli + chmod +x /tmp/mongocli +else + echo "Building mongocli for ${arch}" + curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip + unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip + cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ + CGO_ENABLED=1 GOOS=linux GOARCH=${arch} GO111MODULE=on go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +fi +EOF #################### # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -53,21 +58,28 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN --mount=type=cache,target=/var/cache/dnf \ - echo "Building for architecture: ${TARGETARCH}" && \ - if [ "$TARGETARCH" = "s390x" ]; then \ - PACKAGES="perl mesa-libGL skopeo gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel"; \ - else \ - PACKAGES="perl mesa-libGL skopeo"; \ - fi && \ - echo "Installing: $PACKAGES" && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum +RUN --mount=type=cache,target=/var/cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Building for architecture: ${TARGETARCH}" +if [ "$TARGETARCH" = "s390x" ]; then + PACKAGES="perl mesa-libGL skopeo gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel" +else + PACKAGES="perl mesa-libGL skopeo" +fi +echo "Installing: $PACKAGES" +dnf install -y $PACKAGES +dnf clean all +rm -rf /var/cache/yum +EOF RUN /bin/bash <<'EOF' set -Eeuxo pipefail @@ -103,14 +115,18 @@ EOF USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################## @@ -126,55 +142,57 @@ WORKDIR /tmp/build-wheels # Build pyarrow on ppc64le and s390x RUN --mount=type=cache,target=/root/.cache/pip \ - --mount=type=cache,target=/root/.cache/dnf \ - if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - # Install build dependencies (shared for pyarrow and onnx) - dnf install -y cmake make gcc-c++ pybind11-devel wget && \ - dnf clean all && \ - # Build and collect pyarrow wheel - git clone --depth 1 --branch "apache-arrow-17.0.0" https://github.com/apache/arrow.git && \ - cd arrow/cpp && \ - mkdir release && cd release && \ - ARROW_S3_FLAG="" && \ - if [ "$TARGETARCH" != "s390x" ]; then ARROW_S3_FLAG="-DARROW_S3=ON"; fi && \ - cmake -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_ORC=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_DATASET=ON \ - -DARROW_DEPENDENCY_SOURCE=BUNDLED \ - -DARROW_WITH_LZ4=OFF \ - -DARROW_WITH_ZSTD=OFF \ - -DARROW_WITH_SNAPPY=OFF \ - ${ARROW_S3_FLAG} \ - -DARROW_SUBSTRAIT=ON \ - -DARROW_BUILD_TESTS=OFF \ - -DARROW_BUILD_BENCHMARKS=OFF \ - .. && \ - make -j$(nproc) VERBOSE=1 && \ - make install -j$(nproc) && \ - cd ../../python && \ - pip install --no-cache-dir -r requirements-build.txt && \ - PYARROW_WITH_PARQUET=1 \ - PYARROW_WITH_DATASET=1 \ - PYARROW_WITH_FILESYSTEM=1 \ - PYARROW_WITH_JSON=1 \ - PYARROW_WITH_CSV=1 \ - PYARROW_PARALLEL=$(nproc) \ - python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ - mkdir -p /tmp/wheels && \ - cp dist/pyarrow-*.whl /tmp/wheels/ && \ - chmod -R 777 /tmp/wheels && \ - # Ensure wheels directory exists and has content - ls -la /tmp/wheels/; \ - else \ - # Create empty wheels directory for non-s390x - mkdir -p /tmp/wheels; \ - fi + --mount=type=cache,target=/root/.cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + # Install build dependencies (shared for pyarrow and onnx) + dnf install -y cmake make gcc-c++ pybind11-devel wget + dnf clean all + # Build and collect pyarrow wheel + git clone --depth 1 --branch "apache-arrow-17.0.0" https://github.com/apache/arrow.git + cd arrow/cpp + mkdir release && cd release + ARROW_S3_FLAG="" + if [ "$TARGETARCH" != "s390x" ]; then ARROW_S3_FLAG="-DARROW_S3=ON"; fi + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_DEPENDENCY_SOURCE=BUNDLED \ + -DARROW_WITH_LZ4=OFF \ + -DARROW_WITH_ZSTD=OFF \ + -DARROW_WITH_SNAPPY=OFF \ + ${ARROW_S3_FLAG} \ + -DARROW_SUBSTRAIT=ON \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + .. + make -j$(nproc) VERBOSE=1 + make install -j$(nproc) + cd ../../python + pip install --no-cache-dir -r requirements-build.txt + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel + mkdir -p /tmp/wheels + cp dist/pyarrow-*.whl /tmp/wheels/ + chmod -R 777 /tmp/wheels + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/ +else + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels +fi +EOF ####################################################### # common-builder (for Power-only) @@ -256,6 +274,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -289,8 +308,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat && \ - dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -302,11 +325,14 @@ ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/ # Copy wheels from build stage (ppc64le and s390x only) COPY --from=pyarrow-builder /tmp/wheels /tmp/wheels -RUN if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - pip install --no-cache-dir /tmp/wheels/*.whl; \ -else \ - echo "Skipping wheel install for $TARGETARCH"; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + pip install --no-cache-dir /tmp/wheels/*.whl +else + echo "Skipping wheel install for $TARGETARCH" fi +EOF # Copy OpenBLAS,ONNX wheels for Power COPY --from=openblas-builder /root/OpenBLAS-${OPENBLAS_VERSION} /openblas @@ -349,41 +375,43 @@ COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ # Copy Elyra setup to utils so that it's sourced at startup COPY ${DATASCIENCE_SOURCE_CODE}/setup-elyra.sh ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ -RUN --mount=type=cache,target=/root/.cache/pip \ - echo "Installing software and packages" && \ +RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing software and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then + # We need special flags and environment variables when building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml +else # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - if [ "$TARGETARCH" = "ppc64le" ] || [ "$TARGETARCH" = "s390x" ]; then \ - # We need special flags and environment variables when building packages - GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ - CFLAGS="-O3" CXXFLAGS="-O3" \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress \ - --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ - --requirements=./pylock.toml; \ - else \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress \ - --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ - --requirements=./pylock.toml; \ - fi && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - mkdir /opt/app-root/pipeline-runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" \ - /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - install -D -m 0644 /opt/app-root/bin/utils/jupyter_server_config.py \ - /opt/app-root/etc/jupyter/jupyter_server_config.py && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress \ + --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match \ + --requirements=./pylock.toml +fi +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +mkdir /opt/app-root/pipeline-runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" \ + /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +install -D -m 0644 /opt/app-root/bin/utils/jupyter_server_config.py \ + /opt/app-root/etc/jupyter/jupyter_server_config.py +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf b/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/datascience/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/jupyter/datascience/ubi9-python-3.12/pylock.toml b/jupyter/datascience/ubi9-python-3.12/pylock.toml index 192c1e4b86..d9d6fba97c 100644 --- a/jupyter/datascience/ubi9-python-3.12/pylock.toml +++ b/jupyter/datascience/ubi9-python-3.12/pylock.toml @@ -548,10 +548,10 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" +version = "0.32.1" marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -1152,6 +1152,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -1161,6 +1163,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -1170,6 +1174,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -1179,6 +1185,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1186,6 +1194,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1195,6 +1205,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/jupyter/datascience/ubi9-python-3.12/pyproject.toml b/jupyter/datascience/ubi9-python-3.12/pyproject.toml index cf69761b37..f960974b2a 100644 --- a/jupyter/datascience/ubi9-python-3.12/pyproject.toml +++ b/jupyter/datascience/ubi9-python-3.12/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf "kubeflow-training==1.9.3", - "codeflare-sdk~=0.32.0; platform_machine != 'ppc64le' and platform_machine != 's390x'", + "codeflare-sdk~=0.32.1; platform_machine != 'ppc64le' and platform_machine != 's390x'", "feast~=0.55.0", # DB connectors diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu b/jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu index 8cf803cce0..bc6ea645ea 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.cpu @@ -43,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -103,21 +116,24 @@ USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ # Install Python dependencies from requirements.txt file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda b/jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda index aed3dd4822..700b22fc55 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.cuda @@ -27,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -75,6 +88,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -82,21 +96,24 @@ USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ # Install Python dependencies from requirements.txt file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu index eece2861a0..9c3871fa36 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -28,6 +28,8 @@ RUN ./install_pandoc.sh # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -43,25 +45,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -93,21 +108,24 @@ USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ # Install Python dependencies from requirements.txt file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda index 3c741d9715..945ba0492c 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -12,6 +12,8 @@ FROM registry.access.redhat.com/ubi9/ubi AS ubi-repos # cuda-base # #################### FROM ${BASE_IMAGE} AS cuda-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -27,25 +29,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -75,6 +90,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -82,21 +98,24 @@ USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ # Install Python dependencies from requirements.txt file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm index 0dfa9b3db8..8ff3a46338 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -10,6 +10,8 @@ FROM registry.access.redhat.com/ubi9/ubi AS ubi-repos # rocm-base # #################### FROM ${BASE_IMAGE} AS rocm-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -25,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -63,32 +78,43 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ -# Install Python dependencies from Pipfile.lock file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 +# Install Python dependencies from Pipfile.lock file +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF + +# Fix permissions to support pip in Openshift environments +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src ENTRYPOINT ["start-notebook.sh"] diff --git a/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm b/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm index 6c4b30251c..a05d1f1c35 100644 --- a/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/minimal/ubi9-python-3.12/Dockerfile.rocm @@ -25,25 +25,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -73,32 +86,43 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./ -# Install Python dependencies from Pipfile.lock file -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 +# Install Python dependencies from Pipfile.lock file +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +EOF + +# Fix permissions to support pip in Openshift environments +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src ENTRYPOINT ["start-notebook.sh"] diff --git a/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf b/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/minimal/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda index cebc26885d..9d39f617ce 100644 --- a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -137,24 +159,27 @@ LABEL name="odh-notebook-jupyter-cuda-pytorch-llmcompressor-ubi9-python-3.12" \ # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda index 0a087f3ceb..3b105b0336 100644 --- a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -127,25 +149,28 @@ WORKDIR /opt/app-root/bin # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml index f5506bb570..022a1c3511 100644 --- a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml @@ -1117,6 +1117,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -1126,6 +1128,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -1135,6 +1139,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -1144,6 +1150,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1151,6 +1159,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1160,6 +1170,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda index 6beb3cc6d0..d299e411b7 100644 --- a/jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda +++ b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -137,24 +159,27 @@ LABEL name="odh-notebook-jupyter-cuda-pytorch-ubi9-python-3.12" \ # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda index 9067136801..c7ef81df1e 100644 --- a/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/jupyter/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -127,25 +149,28 @@ WORKDIR /opt/app-root/bin # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/jupyter/pytorch/ubi9-python-3.12/pylock.toml b/jupyter/pytorch/ubi9-python-3.12/pylock.toml index 01384b5907..2097fab142 100644 --- a/jupyter/pytorch/ubi9-python-3.12/pylock.toml +++ b/jupyter/pytorch/ubi9-python-3.12/pylock.toml @@ -552,9 +552,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -1152,6 +1152,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -1161,6 +1163,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -1170,6 +1174,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -1179,6 +1185,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1186,6 +1194,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1195,6 +1205,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/jupyter/pytorch/ubi9-python-3.12/pyproject.toml b/jupyter/pytorch/ubi9-python-3.12/pyproject.toml index 9feed37e7a..d0a96f8ad6 100644 --- a/jupyter/pytorch/ubi9-python-3.12/pyproject.toml +++ b/jupyter/pytorch/ubi9-python-3.12/pyproject.toml @@ -21,7 +21,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "kubeflow-training==1.9.3", "feast~=0.55.0", diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm index 69696aa3fe..c67cb79a6d 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -14,10 +14,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # rocm-base # @@ -38,25 +41,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -78,6 +94,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -99,7 +116,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -124,31 +146,35 @@ WORKDIR /opt/app-root/bin COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - USER 0 -# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments -RUN ./de-vendor-torch.sh && \ - rm ./de-vendor-torch.sh && \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ - fix-permissions /opt/app-root -P -USER 1001 +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# De-vendor the ROCm libs that are embedded in Pytorch +./de-vendor-torch.sh +rm ./de-vendor-torch.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src LABEL name="rhoai/odh-workbench-jupyter-pytorch-rocm-py312-rhel9" \ diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm index e122bcda8c..80b2bafbd1 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/Dockerfile.rocm @@ -14,10 +14,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # rocm-base # @@ -38,25 +41,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -78,6 +94,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -99,7 +116,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -134,29 +156,33 @@ LABEL name="odh-notebook-jupyter-rocm-pytorch-ubi9-python-3.12" \ COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - USER 0 -# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments -RUN ./de-vendor-torch.sh && \ - rm ./de-vendor-torch.sh && \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ - fix-permissions /opt/app-root -P -USER 1001 +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# De-vendor the ROCm libs that are embedded in Pytorch +./de-vendor-torch.sh +rm ./de-vendor-torch.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/pylock.toml b/jupyter/rocm/pytorch/ubi9-python-3.12/pylock.toml index 636526170b..c20b71736c 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/pylock.toml +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/pylock.toml @@ -552,9 +552,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -1152,6 +1152,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -1161,6 +1163,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -1170,6 +1174,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -1179,6 +1185,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1186,6 +1194,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1195,6 +1205,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml b/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml index b1fd5445a1..00279e7325 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "kubeflow-training==1.9.3", "feast~=0.55.0", diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm index c125fcd479..9785459caa 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -14,10 +14,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # rocm-base # @@ -38,25 +41,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -76,6 +92,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -99,7 +116,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -125,37 +147,38 @@ WORKDIR /opt/app-root/bin COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +# Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ -USER 0 COPY ${TENSORFLOW_SOURCE_CODE}/utils/link-solibs.sh /tmp/link-solibs.sh RUN /tmp/link-solibs.sh && rm /tmp/link-solibs.sh -USER 1001 +USER 1001 WORKDIR /opt/app-root/src LABEL name="rhoai/odh-workbench-jupyter-tensorflow-rocm-py312-rhel9" \ diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm index 4497796925..b4f8e6f497 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/Dockerfile.rocm @@ -14,10 +14,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # rocm-base # @@ -38,25 +41,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -76,6 +92,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -99,7 +116,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -135,35 +157,36 @@ LABEL name="odh-notebook-jupyter-rocm-tensorflow-ubi9-python-3.12" \ COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +# Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ -USER 0 COPY ${TENSORFLOW_SOURCE_CODE}/utils/link-solibs.sh /tmp/link-solibs.sh RUN /tmp/link-solibs.sh && rm /tmp/link-solibs.sh -USER 1001 +USER 1001 WORKDIR /opt/app-root/src diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/pylock.toml b/jupyter/rocm/tensorflow/ubi9-python-3.12/pylock.toml index c3906f416b..ac3965ae6c 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/pylock.toml +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/pylock.toml @@ -546,9 +546,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/93/27/bf74dc1494625c3 [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml b/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml index fed9d67688..e15e0632bc 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml @@ -25,7 +25,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "kubeflow-training==1.9.3", # DB connectors diff --git a/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda index e8d67e0ff8..4bf1308cfd 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda +++ b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -138,25 +160,28 @@ LABEL name="odh-notebook-cuda-jupyter-tensorflow-ubi9-python-3.12" \ # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ diff --git a/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda index e5c665cfe7..15ca7dae25 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/jupyter/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -16,10 +16,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # cuda-base # @@ -40,25 +43,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -80,6 +96,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -101,7 +118,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -128,25 +150,28 @@ WORKDIR /opt/app-root/bin # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # setup path for runtime configuration - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# setup path for runtime configuration +mkdir /opt/app-root/runtimes +# Remove default Elyra runtime-images +rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +# copy jupyter configuration +cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +# Disable announcement plugin of jupyterlab +jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +# Apply JupyterLab addons +/opt/app-root/bin/utils/addons/apply.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ diff --git a/jupyter/tensorflow/ubi9-python-3.12/pylock.toml b/jupyter/tensorflow/ubi9-python-3.12/pylock.toml index 4a47877eaa..c85113b991 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/pylock.toml +++ b/jupyter/tensorflow/ubi9-python-3.12/pylock.toml @@ -558,9 +558,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -1179,6 +1179,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -1188,6 +1190,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -1197,6 +1201,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -1206,6 +1212,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1213,6 +1221,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1222,6 +1232,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml b/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml index 35a4fdd2cc..e74c614210 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "feast~=0.55.0", # DB connectors diff --git a/jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu index 734d5274d3..8380b83685 100644 --- a/jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu +++ b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.cpu @@ -14,10 +14,13 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # wheel-cache-base # @@ -34,13 +37,15 @@ ARG TRUSTYAI_SOURCE_CODE=jupyter/trustyai/ubi9-python-3.12 COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml . COPY ${TRUSTYAI_SOURCE_CODE}/devel_env_setup.sh . -RUN --mount=type=cache,target=/root/.cache/uv \ - pip install --no-cache-dir uv && \ - # the devel script is ppc64le and s390x specific - sets up build-time dependencies - source ./devel_env_setup.sh && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +pip install --no-cache-dir uv +# the devel script is ppc64le and s390x specific - sets up build-time dependencies +source ./devel_env_setup.sh +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +EOF #################### # cpu-base # @@ -61,25 +66,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -100,6 +118,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -122,7 +141,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -159,64 +183,77 @@ USER 0 # Install jre that is needed to run the trustyai library # Also install runtime libraries for s390x/ppc64le -RUN INSTALL_PKGS="java-17-openjdk" && \ - ARCH=$(uname -m) && \ - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - # Add runtime libraries needed for s390x/ppc64le (OpenBLAS for PyTorch/NumPy) - INSTALL_PKGS="$INSTALL_PKGS openblas openblas-threads"; \ - fi && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - dnf -y clean all --enablerepo='*' && \ - # Create symlink for compatibility (openblas package provides libopenblasp.so.0 but PyTorch looks for libopenblas.so.0) - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - ln -sf /usr/lib64/libopenblasp.so.0 /usr/lib64/libopenblas.so.0 && \ - ldconfig; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +INSTALL_PKGS="java-17-openjdk" +ARCH=$(uname -m) +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + # Add runtime libraries needed for s390x/ppc64le (OpenBLAS for PyTorch/NumPy) + INSTALL_PKGS="$INSTALL_PKGS openblas openblas-threads" +fi +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +dnf -y clean all --enablerepo='*' +# Create symlink for compatibility (openblas package provides libopenblasp.so.0 but PyTorch looks for libopenblas.so.0) +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + ln -sf /usr/lib64/libopenblasp.so.0 /usr/lib64/libopenblas.so.0 + ldconfig +fi +EOF # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml ./ # install openblas for ppc64le -RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS/,target=/OpenBlas/,rw \ - bash -c 'ARCH=$(uname -m); \ - if [ "$ARCH" = "ppc64le" ]; then \ - PREFIX=/usr/ make install -C /OpenBlas; \ - fi' +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS/,target=/OpenBlas/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +if [ "$ARCH" = "ppc64le" ]; then + PREFIX=/usr/ make install -C /OpenBlas +fi +EOF # Install packages and cleanup # install packages as USER 0 (this will allow us to consume uv cache) RUN --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ - --mount=type=cache,target=/root/.cache/uv \ - bash -c 'ARCH=$(uname -m); \ - if [ "$ARCH" = "ppc64le" ] || [ "$ARCH" = "s390x" ]; then \ - UV_LINK_MODE=copy uv pip install /wheelsdir/*.whl accelerate --cache-dir /root/.cache/uv; \ - fi' - -RUN --mount=type=cache,target=/root/.cache/uv \ - echo "Installing softwares and packages" && \ - # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag - UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ - # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files - # Build debugpy from source instead - UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ - # change ownership to default user (all packages were installed as root and has root:root ownership \ - chown -R 1001:0 /opt/app-root/ && \ - chmod -R g=u /opt/app-root + --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +if [ "$ARCH" = "ppc64le" ] || [ "$ARCH" = "s390x" ]; then + UV_LINK_MODE=copy uv pip install /wheelsdir/*.whl accelerate --cache-dir /root/.cache/uv +fi +EOF + +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag +UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml +# Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files +# Build debugpy from source instead +UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root/ +chmod -R g=u /opt/app-root +EOF USER 1001 -RUN # setup path for runtime configuration \ - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +# TODO: https://issues.redhat.com/browse/RHAIENG-1503 uncomment this +## setup path for runtime configuration +#mkdir /opt/app-root/runtimes +## Remove default Elyra runtime-images +#rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +## Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +#sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +## copy jupyter configuration +#cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +## Disable announcement plugin of jupyterlab +#jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +## Apply JupyterLab addons +#/opt/app-root/bin/utils/addons/apply.sh +## Fix permissions to support pip in Openshift environments +#chmod -R g+w /opt/app-root/lib/python3.12/site-packages +#fix-permissions /opt/app-root -P +EOF diff --git a/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu index 985431abd9..3bb1248442 100644 --- a/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/jupyter/trustyai/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -14,15 +14,20 @@ FROM registry.access.redhat.com/ubi9/go-toolset:latest AS mongocli-builder ARG MONGOCLI_VERSION=2.0.4 WORKDIR /tmp/ -RUN curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip -RUN unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip -RUN cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ && \ - CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -Lo mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip https://github.com/mongodb/mongodb-cli/archive/refs/tags/mongocli/v${MONGOCLI_VERSION}.zip +unzip ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}.zip +cd ./mongodb-cli-mongocli-v${MONGOCLI_VERSION}/ +CGO_ENABLED=1 GOOS=linux go build -a -tags strictfipsruntime -o /tmp/mongocli ./cmd/mongocli/ +EOF #################### # wheel-cache-base # #################### FROM ${BASE_IMAGE} AS whl-cache +USER 0 +RUN subscription-manager refresh # hadolint ignore=DL3002 USER root @@ -34,18 +39,22 @@ ARG TRUSTYAI_SOURCE_CODE=jupyter/trustyai/ubi9-python-3.12 COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml . COPY ${TRUSTYAI_SOURCE_CODE}/devel_env_setup.sh . -RUN --mount=type=cache,target=/root/.cache/uv \ - pip install --no-cache-dir uv && \ - # the devel script is ppc64le and s390x specific - sets up build-time dependencies - source ./devel_env_setup.sh && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +pip install --no-cache-dir uv +# the devel script is ppc64le and s390x specific - sets up build-time dependencies +source ./devel_env_setup.sh +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +UV_LINK_MODE=copy uv pip install --strict --no-deps --refresh --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +EOF #################### # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -61,25 +70,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -100,6 +122,7 @@ USER 0 # Dependencies for PDF export begin RUN ./utils/install_pdf_deps.sh ENV PATH="/usr/local/texlive/bin/linux:/usr/local/pandoc/bin:$PATH" + # Dependencies for PDF export end USER 1001 @@ -122,7 +145,12 @@ WORKDIR /opt/app-root/bin USER root # Install useful OS packages -RUN dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y jq unixODBC unixODBC-devel postgresql git-lfs libsndfile libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Copy dynamically-linked mongocli built in earlier build stage COPY --from=mongocli-builder /tmp/mongocli /opt/app-root/bin/ @@ -157,64 +185,77 @@ USER 0 # Install jre that is needed to run the trustyai library # Also install runtime libraries for s390x/ppc64le -RUN INSTALL_PKGS="java-17-openjdk" && \ - ARCH=$(uname -m) && \ - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - # Add runtime libraries needed for s390x/ppc64le (OpenBLAS for PyTorch/NumPy) - INSTALL_PKGS="$INSTALL_PKGS openblas openblas-threads"; \ - fi && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - dnf -y clean all --enablerepo='*' && \ - # Create symlink for compatibility (openblas package provides libopenblasp.so.0 but PyTorch looks for libopenblas.so.0) - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - ln -sf /usr/lib64/libopenblasp.so.0 /usr/lib64/libopenblas.so.0 && \ - ldconfig; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +INSTALL_PKGS="java-17-openjdk" +ARCH=$(uname -m) +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + # Add runtime libraries needed for s390x/ppc64le (OpenBLAS for PyTorch/NumPy) + INSTALL_PKGS="$INSTALL_PKGS openblas openblas-threads" +fi +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +dnf -y clean all --enablerepo='*' +# Create symlink for compatibility (openblas package provides libopenblasp.so.0 but PyTorch looks for libopenblas.so.0) +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + ln -sf /usr/lib64/libopenblasp.so.0 /usr/lib64/libopenblas.so.0 + ldconfig +fi +EOF # Install Python packages and Jupyterlab extensions from requirements.txt COPY ${TRUSTYAI_SOURCE_CODE}/pylock.toml ./ # install openblas for ppc64le -RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS/,target=/OpenBlas/,rw \ - bash -c 'ARCH=$(uname -m); \ - if [ "$ARCH" = "ppc64le" ]; then \ - PREFIX=/usr/ make install -C /OpenBlas; \ - fi' +RUN --mount=type=cache,from=whl-cache,source=/root/OpenBLAS/,target=/OpenBlas/,rw /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +if [ "$ARCH" = "ppc64le" ]; then + PREFIX=/usr/ make install -C /OpenBlas +fi +EOF # Install packages and cleanup # install packages as USER 0 (this will allow us to consume uv cache) RUN --mount=type=cache,from=whl-cache,source=/wheelsdir/,target=/wheelsdir/,rw \ - --mount=type=cache,target=/root/.cache/uv \ - bash -c 'ARCH=$(uname -m); \ - if [ "$ARCH" = "ppc64le" ] || [ "$ARCH" = "s390x" ]; then \ - UV_LINK_MODE=copy uv pip install /wheelsdir/*.whl accelerate --cache-dir /root/.cache/uv; \ - fi' - -RUN --mount=type=cache,target=/root/.cache/uv \ - echo "Installing softwares and packages" && \ - # we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag - UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml && \ - # Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files - # Build debugpy from source instead - UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') && \ - # change ownership to default user (all packages were installed as root and has root:root ownership \ - chown -R 1001:0 /opt/app-root/ && \ - chmod -R g=u /opt/app-root + --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +if [ "$ARCH" = "ppc64le" ] || [ "$ARCH" = "s390x" ]; then + UV_LINK_MODE=copy uv pip install /wheelsdir/*.whl accelerate --cache-dir /root/.cache/uv +fi +EOF + +RUN --mount=type=cache,target=/root/.cache/uv /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# we can ensure wheels are consumed from the cache only by restricting internet access for uv install with '--offline' flag +UV_LINK_MODE=copy uv pip install --cache-dir /root/.cache/uv --requirements=./pylock.toml +# Note: debugpy wheel availabe on pypi (in uv cache) is none-any but bundles amd64.so files +# Build debugpy from source instead +UV_LINK_MODE=copy uv pip install --no-cache git+https://github.com/microsoft/debugpy.git@v$(grep -A1 '\"debugpy\"' ./pylock.toml | grep -Eo '\b[0-9\.]+\b') +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root/ +chmod -R g=u /opt/app-root +EOF USER 1001 -RUN # setup path for runtime configuration \ - mkdir /opt/app-root/runtimes && \ - # Remove default Elyra runtime-images \ - rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json && \ - # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \ - sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \ - # copy jupyter configuration - cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter && \ - # Disable announcement plugin of jupyterlab \ - jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \ - # Apply JupyterLab addons \ - /opt/app-root/bin/utils/addons/apply.sh && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +# TODO: https://issues.redhat.com/browse/RHAIENG-1503 uncomment this +## setup path for runtime configuration +#mkdir /opt/app-root/runtimes +## Remove default Elyra runtime-images +#rm /opt/app-root/share/jupyter/metadata/runtime-images/*.json +## Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y +#sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json +## copy jupyter configuration +#cp /opt/app-root/bin/utils/jupyter_server_config.py /opt/app-root/etc/jupyter +## Disable announcement plugin of jupyterlab +#jupyter labextension disable "@jupyterlab/apputils-extension:announcements" +## Apply JupyterLab addons +#/opt/app-root/bin/utils/addons/apply.sh +## Fix permissions to support pip in Openshift environments +#chmod -R g+w /opt/app-root/lib/python3.12/site-packages +#fix-permissions /opt/app-root -P +EOF diff --git a/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf b/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf +++ b/jupyter/trustyai/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/jupyter/trustyai/ubi9-python-3.12/pylock.toml b/jupyter/trustyai/ubi9-python-3.12/pylock.toml index a0b6bc7a4c..b9897a87c1 100644 --- a/jupyter/trustyai/ubi9-python-3.12/pylock.toml +++ b/jupyter/trustyai/ubi9-python-3.12/pylock.toml @@ -545,10 +545,10 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/93/27/bf74dc1494625c3 [[packages]] name = "codeflare-sdk" -version = "0.32.0" +version = "0.32.1" marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" diff --git a/jupyter/trustyai/ubi9-python-3.12/pyproject.toml b/jupyter/trustyai/ubi9-python-3.12/pyproject.toml index 463ac623d2..b8beec7a2a 100644 --- a/jupyter/trustyai/ubi9-python-3.12/pyproject.toml +++ b/jupyter/trustyai/ubi9-python-3.12/pyproject.toml @@ -40,7 +40,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0; platform_machine != 'ppc64le' and platform_machine != 's390x'", + "codeflare-sdk~=0.32.1; platform_machine != 'ppc64le' and platform_machine != 's390x'", "kubeflow-training==1.9.3", # DB connectors diff --git a/rstudio/c9s-python-3.12/Dockerfile.cpu b/rstudio/c9s-python-3.12/Dockerfile.cpu index eb86a071c5..d5e8d84bee 100644 --- a/rstudio/c9s-python-3.12/Dockerfile.cpu +++ b/rstudio/c9s-python-3.12/Dockerfile.cpu @@ -13,7 +13,7 @@ FROM ${BASE_IMAGE} AS cpu-base WORKDIR /opt/app-root/bin # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # OS Packages needs to be installed as root @@ -23,13 +23,22 @@ USER root # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages # remove skopeo, CVE-2025-4674 -RUN dnf install -y mesa-libGL && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y mesa-libGL +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 @@ -61,51 +70,63 @@ USER 0 ENV R_VERSION=4.5.1 # Install R -RUN dnf install -y 'dnf-command(config-manager)' && \ - dnf config-manager --set-enabled crb && \ - dnf install -y https://download.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \ - INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ - R-littler R-littler-examples openssl-libs compat-openssl11" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site && \ - (umask 002;touch /usr/lib64/R/etc/Renviron.site) && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y 'dnf-command(config-manager)' +dnf config-manager --set-enabled crb +dnf install -y https://download.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF # set R library to default (used in install.r from littler) ENV LIBLOC=/usr/lib64/R/library ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 -RUN chmod -R a+w ${LIBLOC} && \ - # create User R Library path - mkdir -p ${R_LIBS_USER} && \ - chmod -R a+w ${R_LIBS_USER} +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF WORKDIR /tmp/ COPY /rstudio/utils /tmp/utils # Install RStudio ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm -RUN wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} && \ - dnf install -y ${RSTUDIO_RPM} && \ - rm ${RSTUDIO_RPM} && \ - dnf -y clean all --enablerepo='*' && \ - # Specific RStudio config and fixes - chmod 1777 /var/run/rstudio-server && \ - mkdir -p /usr/share/doc/R && \ - # package installation - # install necessary texlive-framed package to make Knit R markup to PDF rendering possible - dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed && \ - dnf clean all && \ - rm -rf /var/cache/yum && \ - (cd /tmp/utils && ./cve_remediation.sh) +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf # Install R packages # https://cran.r-project.org/web/packages COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ -RUN R -f ./install_packages.R && \ - rm ./install_packages.R +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +R -f ./install_packages.R +rm ./install_packages.R +EOF ENV APP_ROOT=/opt/app-root @@ -121,12 +142,15 @@ ENV NGINX_VERSION=1.24 \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN dnf -y module enable nginx:$NGINX_VERSION && \ - INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -151,30 +175,33 @@ COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI scripts executable and set proper ownership - chmod +x /opt/app-root/api/kernels/access.cgi && \ - chmod +x /opt/app-root/api/probe.cgi && \ - chown -R 1001:0 /opt/app-root/api && \ - rpm-file-permissions +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF # Launcher WORKDIR /opt/app-root/bin @@ -186,13 +213,16 @@ USER 1001 COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/rstudio/c9s-python-3.12/Dockerfile.cuda b/rstudio/c9s-python-3.12/Dockerfile.cuda index 94c66e8344..d4a45d0500 100644 --- a/rstudio/c9s-python-3.12/Dockerfile.cuda +++ b/rstudio/c9s-python-3.12/Dockerfile.cuda @@ -19,8 +19,12 @@ USER root # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages @@ -31,7 +35,7 @@ RUN dnf install -y mesa-libGL && dnf clean all && rm -rf /var/cache/yum USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end WORKDIR /opt/app-root/src @@ -61,51 +65,63 @@ USER 0 ENV R_VERSION=4.5.1 # Install R -RUN dnf install -y 'dnf-command(config-manager)' && \ - dnf config-manager --set-enabled crb && \ - dnf install -y https://download.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \ - INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ - R-littler R-littler-examples openssl-libs compat-openssl11" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site && \ - (umask 002;touch /usr/lib64/R/etc/Renviron.site) && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y 'dnf-command(config-manager)' +dnf config-manager --set-enabled crb +dnf install -y https://download.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF # set R library to default (used in install.r from littler) ENV LIBLOC=/usr/lib64/R/library ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 -RUN chmod -R a+w ${LIBLOC} && \ - # create User R Library path - mkdir -p ${R_LIBS_USER} && \ - chmod -R a+w ${R_LIBS_USER} +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF WORKDIR /tmp/ COPY /rstudio/utils /tmp/utils # Install RStudio ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm -RUN wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} && \ - dnf install -y ${RSTUDIO_RPM} && \ - rm ${RSTUDIO_RPM} && \ - dnf -y clean all --enablerepo='*' && \ - # Specific RStudio config and fixes - chmod 1777 /var/run/rstudio-server && \ - mkdir -p /usr/share/doc/R && \ - # package installation - # install necessary texlive-framed package to make Knit R markup to PDF rendering possible - dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed && \ - dnf clean all && \ - rm -rf /var/cache/yum && \ - (cd /tmp/utils && ./cve_remediation.sh) +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf # Install R packages # https://cran.r-project.org/web/packages COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ -RUN R -f ./install_packages.R && \ - rm ./install_packages.R +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +R -f ./install_packages.R +rm ./install_packages.R +EOF ENV APP_ROOT=/opt/app-root @@ -121,12 +137,16 @@ ENV NGINX_VERSION=1.24 \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN dnf -y module enable nginx:$NGINX_VERSION && \ - INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." +echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -151,30 +171,33 @@ COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI scripts executable and set proper ownership - chmod +x /opt/app-root/api/kernels/access.cgi && \ - chmod +x /opt/app-root/api/probe.cgi && \ - chown -R 1001:0 /opt/app-root/api && \ - rpm-file-permissions +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF # Launcher WORKDIR /opt/app-root/bin @@ -186,13 +209,16 @@ USER 1001 COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/rstudio/rhel9-python-3.12/Dockerfile.cpu b/rstudio/rhel9-python-3.12/Dockerfile.cpu index efea6b3c44..48a3c33742 100644 --- a/rstudio/rhel9-python-3.12/Dockerfile.cpu +++ b/rstudio/rhel9-python-3.12/Dockerfile.cpu @@ -22,13 +22,18 @@ USER root # The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. # By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo +COPY --from=ubi-repos /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release # upgrade first to avoid fixable vulnerabilities begin # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages @@ -39,7 +44,7 @@ RUN dnf install -y perl mesa-libGL && dnf clean all && rm -rf /var/cache/yum USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end WORKDIR /opt/app-root/src @@ -78,69 +83,89 @@ USER 0 #RUN sed -i 's/\(def in_container():\)/\1\n return False/g' /usr/lib64/python*/*-packages/rhsm/config.py # If necessary, run the subscription manager command using the provided credentials. Only include --serverurl and --baseurl if they are provided -RUN if [ -d "${SECRET_DIR}" ]; then \ - SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) && \ - BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) && \ - USERNAME=$(cat ${SECRET_DIR}/USERNAME) && \ - PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) && \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) + BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) + USERNAME=$(cat ${SECRET_DIR}/USERNAME) + PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) subscription-manager register \ ${SERVERURL:+--serverurl=$SERVERURL} \ ${BASEURL:+--baseurl=$BASEURL} \ --username=$USERNAME \ --password=$PASSWORD \ --force \ - --auto-attach; \ - fi + --auto-attach +fi +EOF + # TILL HERE ENV R_VERSION=4.5.1 # Install R -RUN dnf install -y dnf-plugins-core && \ - subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms && \ - dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \ - INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ - R-littler R-littler-examples openssl-libs compat-openssl11" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site && \ - (umask 002;touch /usr/lib64/R/etc/Renviron.site) && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y dnf-plugins-core +if command -v subscription-manager &> /dev/null; then + subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms +else + dnf config-manager --set-enabled crb +fi +dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF # set R library to default (used in install.r from littler) ENV LIBLOC=/usr/lib64/R/library ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 -RUN chmod -R a+w ${LIBLOC} && \ - # create User R Library path - mkdir -p ${R_LIBS_USER} && \ - chmod -R a+w ${R_LIBS_USER} +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF WORKDIR /tmp/ COPY /rstudio/utils /tmp/utils # Install RStudio ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm -RUN wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} && \ - dnf install -y ${RSTUDIO_RPM} && \ - rm ${RSTUDIO_RPM} && \ - dnf -y clean all --enablerepo='*' && \ - # Specific RStudio config and fixes - chmod 1777 /var/run/rstudio-server && \ - mkdir -p /usr/share/doc/R && \ - # package installation - # install necessary texlive-framed package to make Knit R markup to PDF rendering possible - dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed && \ - dnf clean all && \ - rm -rf /var/cache/yum && \ - (cd /tmp/utils && ./cve_remediation.sh) +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf # # Install R packages # # https://cran.r-project.org/web/packages # COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ -# RUN R -f ./install_packages.R && \ -# rm ./install_packages.R +# RUN /bin/bash <<'EOF' +# set -Eeuxo pipefail +# R -f ./install_packages.R +# rm ./install_packages.R +# EOF ENV APP_ROOT=/opt/app-root @@ -156,12 +181,15 @@ ENV NGINX_VERSION=1.24 \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN dnf -y module enable nginx:$NGINX_VERSION && \ - INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -186,30 +214,33 @@ COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI scripts executable and set proper ownership - chmod +x /opt/app-root/api/kernels/access.cgi && \ - chmod +x /opt/app-root/api/probe.cgi && \ - chown -R 1001:0 /opt/app-root/api && \ - rpm-file-permissions +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF # Launcher WORKDIR /opt/app-root/bin @@ -219,22 +250,29 @@ COPY ${RSTUDIO_SOURCE_CODE}/run-rstudio.sh ${RSTUDIO_SOURCE_CODE}/setup_rstudio. # TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 # Unregister the system -RUN if [ -d "${SECRET_DIR}" ]; then \ - subscription-manager remove --all && subscription-manager unregister && subscription-manager clean; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + subscription-manager remove --all && subscription-manager unregister && subscription-manager clean +fi +EOF + # TILL HERE USER 1001 COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/rstudio/rhel9-python-3.12/Dockerfile.cpu.konflux b/rstudio/rhel9-python-3.12/Dockerfile.cpu.konflux new file mode 100644 index 0000000000..484e184c65 --- /dev/null +++ b/rstudio/rhel9-python-3.12/Dockerfile.cpu.konflux @@ -0,0 +1,279 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +# External image alias for UBI repository configuration +FROM registry.access.redhat.com/ubi9/ubi@sha256:dcd8128d7620b06e4bce291f30db6ffcfa339b04e6d66081e36eb8e2b1b700f8 AS ubi-repos + +#################### +# cpu-base # +#################### +FROM ${BASE_IMAGE} AS cpu-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo +COPY --from=ubi-repos /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +# remove skopeo, CVE-2025-4674 +RUN dnf install -y perl mesa-libGL && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +WORKDIR /opt/app-root/src + +##################### +# cpu-rstudio # +##################### +FROM cpu-base AS cpu-rstudio + +ARG RSTUDIO_SOURCE_CODE=rstudio/rhel9-python-3.12 +ARG TARGETARCH + +WORKDIR /opt/app-root/bin + +# TODO THIS SHOULD BE REMOVED +# Access the client's secret for the subscription manager from the environment variable +ARG SECRET_DIR=/opt/app-root/src/.sec +ARG SERVERURL_DEFAULT="" +ARG BASEURL_DEFAULT="" +# TILL HERE + +LABEL name="odh-notebook-rstudio-server-rhel9-python-3.12" \ + summary="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + description="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + io.k8s.display-name="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + io.k8s.description="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + authoritative-source-url="https://github.com/opendatahub-io/notebooks" \ + io.openshift.build.commit.ref="main" \ + io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/rstudio/rhel9-python-3.12" \ + io.openshift.build.image="quay.io/opendatahub/workbench-images:rstudio-rhel9-python-3.12" + +USER 0 + +# TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 +# uncomment the below line if you fall on this error: subscription-manager is disabled when running inside a container. Please refer to your host system for subscription management. +#RUN sed -i 's/\(def in_container():\)/\1\n return False/g' /usr/lib64/python*/*-packages/rhsm/config.py + +# If necessary, run the subscription manager command using the provided credentials. Only include --serverurl and --baseurl if they are provided +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) + BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) + USERNAME=$(cat ${SECRET_DIR}/USERNAME) + PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) + subscription-manager register \ + ${SERVERURL:+--serverurl=$SERVERURL} \ + ${BASEURL:+--baseurl=$BASEURL} \ + --username=$USERNAME \ + --password=$PASSWORD \ + --force \ + --auto-attach +fi +EOF + +# TILL HERE + +ENV R_VERSION=4.5.1 + +# Install R +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y dnf-plugins-core +if command -v subscription-manager &> /dev/null; then + subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms +else + dnf config-manager --set-enabled crb +fi +dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF + +# set R library to default (used in install.r from littler) +ENV LIBLOC=/usr/lib64/R/library +ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF + +WORKDIR /tmp/ +COPY /rstudio/utils /tmp/utils + +# Install RStudio +ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF + +COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf + +# # Install R packages +# # https://cran.r-project.org/web/packages +# COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ +# RUN /bin/bash <<'EOF' +# set -Eeuxo pipefail +# R -f ./install_packages.R +# rm ./install_packages.R +# EOF + +ENV APP_ROOT=/opt/app-root + +# Install NGINX to proxy RStudio and pass probes check +ENV NGINX_VERSION=1.24 \ + NGINX_SHORT_VER=124 \ + NGINX_CONFIGURATION_PATH=${APP_ROOT}/etc/nginx.d \ + NGINX_CONF_PATH=/etc/nginx/nginx.conf \ + NGINX_DEFAULT_CONF_PATH=${APP_ROOT}/etc/nginx.default.d \ + NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx \ + NGINX_APP_ROOT=${APP_ROOT} \ + NGINX_LOG_PATH=/var/log/nginx \ + NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl + +# Modules does not exist +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF + +# Configure httpd for CGI processing +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/rstudio-cgi.conf /etc/httpd/conf.d/rstudio-cgi.conf + +# Copy extra files to the image. +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/nginx/root/ / + +# Configure nginx +COPY ${RSTUDIO_SOURCE_CODE}/nginx/serverconf/ /opt/app-root/etc/nginx.default.d/ +COPY ${RSTUDIO_SOURCE_CODE}/nginx/httpconf/ /opt/app-root/etc/nginx.d/ +COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ + +# Changing ownership and user rights to support following use-cases: +# 1) running container on OpenShift, whose default security model +# is to run the container under random UID, but GID=0 +# 2) for working root-less container with UID=1001, which does not have +# to have GID=0 +# 3) for default use-case, that is running container directly on operating system, +# with default UID and GID (1001:0) +# Supported combinations of UID:GID are thus following: +# UID=1001 && GID=0 +# UID=&& GID=0 +# UID=1001 && GID= +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF + +# Launcher +WORKDIR /opt/app-root/bin + +COPY ${RSTUDIO_SOURCE_CODE}/utils utils/ +COPY ${RSTUDIO_SOURCE_CODE}/run-rstudio.sh ${RSTUDIO_SOURCE_CODE}/setup_rstudio.py ${RSTUDIO_SOURCE_CODE}/rsession.sh ${RSTUDIO_SOURCE_CODE}/run-nginx.sh ./ + +# TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 +# Unregister the system +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + subscription-manager remove --all && subscription-manager unregister && subscription-manager clean +fi +EOF + +# TILL HERE + +USER 1001 + +COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +WORKDIR /opt/app-root/src + +CMD ["/opt/app-root/bin/run-rstudio.sh"] diff --git a/rstudio/rhel9-python-3.12/Dockerfile.cuda b/rstudio/rhel9-python-3.12/Dockerfile.cuda index a471506314..fcd672931e 100644 --- a/rstudio/rhel9-python-3.12/Dockerfile.cuda +++ b/rstudio/rhel9-python-3.12/Dockerfile.cuda @@ -22,13 +22,18 @@ USER root # The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. # By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo +COPY --from=ubi-repos /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release # upgrade first to avoid fixable vulnerabilities begin # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages @@ -39,7 +44,7 @@ RUN dnf install -y perl mesa-libGL && dnf clean all && rm -rf /var/cache/yum USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end WORKDIR /opt/app-root/src @@ -78,8 +83,11 @@ USER 0 ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda # Install CUDA toolkit 12.8 -RUN dnf -y install cuda-toolkit-12-8 && \ - dnf -y clean all --enablerepo="*" +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y install cuda-toolkit-12-8 +dnf -y clean all --enablerepo="*" +EOF WORKDIR /opt/app-root/src # TILL HERE @@ -89,69 +97,89 @@ WORKDIR /opt/app-root/src #RUN sed -i 's/\(def in_container():\)/\1\n return False/g' /usr/lib64/python*/*-packages/rhsm/config.py # If necessary, run the subscription manager command using the provided credentials. Only include --serverurl and --baseurl if they are provided -RUN if [ -d "${SECRET_DIR}" ]; then \ - SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) && \ - BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) && \ - USERNAME=$(cat ${SECRET_DIR}/USERNAME) && \ - PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) && \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) + BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) + USERNAME=$(cat ${SECRET_DIR}/USERNAME) + PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) subscription-manager register \ ${SERVERURL:+--serverurl=$SERVERURL} \ ${BASEURL:+--baseurl=$BASEURL} \ --username=$USERNAME \ --password=$PASSWORD \ --force \ - --auto-attach; \ - fi + --auto-attach +fi +EOF + # TILL HERE ENV R_VERSION=4.5.1 # Install R -RUN dnf install -y dnf-plugins-core && \ - subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms && \ - dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \ - INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ - R-littler R-littler-examples openssl-libs compat-openssl11" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site && \ - (umask 002;touch /usr/lib64/R/etc/Renviron.site) && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y dnf-plugins-core +if command -v subscription-manager &> /dev/null; then + subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms +else + dnf config-manager --set-enabled crb +fi +dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF # set R library to default (used in install.r from littler) ENV LIBLOC=/usr/lib64/R/library ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 -RUN chmod -R a+w ${LIBLOC} && \ - # create User R Library path - mkdir -p ${R_LIBS_USER} && \ - chmod -R a+w ${R_LIBS_USER} +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF WORKDIR /tmp/ COPY /rstudio/utils /tmp/utils # Install RStudio ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm -RUN wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} && \ - dnf install -y ${RSTUDIO_RPM} && \ - rm ${RSTUDIO_RPM} && \ - dnf -y clean all --enablerepo='*' && \ - # Specific RStudio config and fixes - chmod 1777 /var/run/rstudio-server && \ - mkdir -p /usr/share/doc/R && \ - # package installation - # install necessary texlive-framed package to make Knit R markup to PDF rendering possible - dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed && \ - dnf clean all && \ - rm -rf /var/cache/yum && \ - (cd /tmp/utils && ./cve_remediation.sh) +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf # # Install R packages # # https://cran.r-project.org/web/packages # COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ -# RUN R -f ./install_packages.R && \ -# rm ./install_packages.R +# RUN /bin/bash <<'EOF' +# set -Eeuxo pipefail +# R -f ./install_packages.R +# rm ./install_packages.R +# EOF ENV APP_ROOT=/opt/app-root @@ -168,12 +196,15 @@ ENV NGINX_VERSION=1.24 \ NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl # Modules does not exist -RUN dnf -y module enable nginx:$NGINX_VERSION && \ - INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" && \ - dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS && \ - rpm -V $INSTALL_PKGS && \ - nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" && \ - dnf -y clean all --enablerepo='*' +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF # Configure httpd for CGI processing COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf @@ -198,30 +229,33 @@ COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ # UID=1001 && GID=0 # UID=&& GID=0 # UID=1001 && GID= -RUN sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ && \ - mkdir -p ${NGINX_APP_ROOT}/api/ && \ - mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - mkdir -p ${NGINX_LOG_PATH} && \ - mkdir -p ${NGINX_PERL_MODULE_PATH} && \ - # Create httpd directories and set permissions - mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chown -R 1001:0 ${NGINX_CONF_PATH} && \ - chown -R 1001:0 ${NGINX_APP_ROOT}/etc && \ - chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chown -R 1001:0 /var/lib/nginx /var/log/nginx /run && \ - chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - chmod ug+rw ${NGINX_CONF_PATH} && \ - chmod -R ug+rwX ${NGINX_APP_ROOT}/etc && \ - chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start && \ - chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run && \ - chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs && \ - # Make CGI scripts executable and set proper ownership - chmod +x /opt/app-root/api/kernels/access.cgi && \ - chmod +x /opt/app-root/api/probe.cgi && \ - chown -R 1001:0 /opt/app-root/api && \ - rpm-file-permissions +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF # Launcher WORKDIR /opt/app-root/bin @@ -231,22 +265,29 @@ COPY ${RSTUDIO_SOURCE_CODE}/run-rstudio.sh ${RSTUDIO_SOURCE_CODE}/setup_rstudio. # TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 # Unregister the system -RUN if [ -d "${SECRET_DIR}" ]; then \ - subscription-manager remove --all && subscription-manager unregister && subscription-manager clean; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + subscription-manager remove --all && subscription-manager unregister && subscription-manager clean +fi +EOF + # TILL HERE USER 1001 COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/rstudio/rhel9-python-3.12/Dockerfile.cuda.konflux b/rstudio/rhel9-python-3.12/Dockerfile.cuda.konflux new file mode 100644 index 0000000000..654fc70401 --- /dev/null +++ b/rstudio/rhel9-python-3.12/Dockerfile.cuda.konflux @@ -0,0 +1,294 @@ +ARG TARGETARCH + +######################### +# configuration args # +######################### +ARG BASE_IMAGE + +# External image alias for UBI repository configuration +FROM registry.access.redhat.com/ubi9/ubi@sha256:dcd8128d7620b06e4bce291f30db6ffcfa339b04e6d66081e36eb8e2b1b700f8 AS ubi-repos + +#################### +# cuda-base # +#################### +FROM ${BASE_IMAGE} AS cuda-base + +WORKDIR /opt/app-root/bin + +# OS Packages needs to be installed as root +USER root + +# Inject the official UBI 9 repository configuration into the AIPCC base image. +# The Quay-based AIPCC image is "repo-less" by default (https://gitlab.com/redhat/rhel-ai/core/base-images/app#repositories), so dnf cannot upgrade or install packages. +# By copying ubi.repo from the public UBI 9 image, we enable package management for upgrades and installations. +COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo +COPY --from=ubi-repos /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release + +# upgrade first to avoid fixable vulnerabilities begin +# Problem: The operation would result in removing the following protected packages: systemd +# (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) +# Solution: --best --skip-broken does not work either, so use --nobest +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + +# upgrade first to avoid fixable vulnerabilities end + +# Install useful OS packages +# remove skopeo, CVE-2025-4674 +RUN dnf install -y perl mesa-libGL && dnf clean all && rm -rf /var/cache/yum + +# Other apps and tools installed as default user +USER 1001 + +# Install micropipenv and uv to deploy packages from requirements.txt begin +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +# Install micropipenv and uv to deploy packages from requirements.txt end + +WORKDIR /opt/app-root/src + +################ +# cuda-rstudio # +################ +FROM cuda-base AS cuda-rstudio + +ARG RSTUDIO_SOURCE_CODE=rstudio/rhel9-python-3.12 +ARG TARGETARCH + +WORKDIR /opt/app-root/bin + +# TODO THIS SHOULD BE REMOVED +# Access the client's secret for the subscription manager from the environment variable +ARG SECRET_DIR=/opt/app-root/src/.sec +ARG SERVERURL_DEFAULT="" +ARG BASEURL_DEFAULT="" +# TILL HERE + +LABEL name="odh-notebook-rstudio-server-rhel9-python-3.12" \ + summary="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + description="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + io.k8s.display-name="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + io.k8s.description="RStudio Server image with python 3.12 based on Red Hat Enterprise Linux 9" \ + authoritative-source-url="https://github.com/opendatahub-io/notebooks" \ + io.openshift.build.commit.ref="main" \ + io.openshift.build.source-location="https://github.com/opendatahub-io/notebooks/tree/main/rstudio/rhel9-python-3.12" \ + io.openshift.build.image="quay.io/opendatahub/workbench-images:rstudio-rhel9-python-3.12" + +USER 0 + +# TEST THIS PART OF CODE IF INSTALED WELL +# Set this flag so that libraries can find the location of CUDA +ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda + +# Install CUDA toolkit 12.8 +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y install cuda-toolkit-12-8 +dnf -y clean all --enablerepo="*" +EOF + +WORKDIR /opt/app-root/src +# TILL HERE + +# TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 +# uncomment the below line if you fall on this error: subscription-manager is disabled when running inside a container. Please refer to your host system for subscription management. +#RUN sed -i 's/\(def in_container():\)/\1\n return False/g' /usr/lib64/python*/*-packages/rhsm/config.py + +# If necessary, run the subscription manager command using the provided credentials. Only include --serverurl and --baseurl if they are provided +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + SERVERURL=$(cat ${SECRET_DIR}/SERVERURL 2>/dev/null || echo ${SERVERURL_DEFAULT}) + BASEURL=$(cat ${SECRET_DIR}/BASEURL 2>/dev/null || echo ${BASEURL_DEFAULT}) + USERNAME=$(cat ${SECRET_DIR}/USERNAME) + PASSWORD=$(cat ${SECRET_DIR}/PASSWORD) + subscription-manager register \ + ${SERVERURL:+--serverurl=$SERVERURL} \ + ${BASEURL:+--baseurl=$BASEURL} \ + --username=$USERNAME \ + --password=$PASSWORD \ + --force \ + --auto-attach +fi +EOF + +# TILL HERE + +ENV R_VERSION=4.5.1 + +# Install R +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y dnf-plugins-core +if command -v subscription-manager &> /dev/null; then + subscription-manager repos --enable codeready-builder-for-rhel-9-x86_64-rpms +else + dnf config-manager --set-enabled crb +fi +dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm +INSTALL_PKGS="R-core R-core-devel R-java R-Rcpp R-highlight \ +R-littler R-littler-examples openssl-libs compat-openssl11" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +echo 'options(repos = c(CRAN = "https://cran.rstudio.com/"), download.file.method = "libcurl")' >> /usr/lib64/R/etc/Rprofile.site +(umask 002;touch /usr/lib64/R/etc/Renviron.site) +dnf -y clean all --enablerepo='*' +EOF + +# set R library to default (used in install.r from littler) +ENV LIBLOC=/usr/lib64/R/library +ENV R_LIBS_USER=/opt/app-root/bin/Rpackages/4.5 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +chmod -R a+w ${LIBLOC} +# create User R Library path +mkdir -p ${R_LIBS_USER} +chmod -R a+w ${R_LIBS_USER} +EOF + +WORKDIR /tmp/ +COPY /rstudio/utils /tmp/utils + +# Install RStudio +ARG RSTUDIO_RPM=rstudio-server-rhel-2025.09.0-387-x86_64.rpm +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +wget --progress=dot:giga https://download2.rstudio.org/server/rhel9/x86_64/${RSTUDIO_RPM} +dnf install -y ${RSTUDIO_RPM} +rm ${RSTUDIO_RPM} +dnf -y clean all --enablerepo='*' +# Specific RStudio config and fixes +chmod 1777 /var/run/rstudio-server +mkdir -p /usr/share/doc/R +# package installation +# install necessary texlive-framed package to make Knit R markup to PDF rendering possible +dnf install -y libgit2-devel.x86_64 libcurl-devel harfbuzz-devel.x86_64 fribidi-devel.x86_64 cmake "flexiblas-*" texlive-framed +dnf clean all +rm -rf /var/cache/yum +(cd /tmp/utils && ./cve_remediation.sh) +EOF + +COPY ${RSTUDIO_SOURCE_CODE}/rsession.conf /etc/rstudio/rsession.conf + +# # Install R packages +# # https://cran.r-project.org/web/packages +# COPY ${RSTUDIO_SOURCE_CODE}/install_packages.R ./ +# RUN /bin/bash <<'EOF' +# set -Eeuxo pipefail +# R -f ./install_packages.R +# rm ./install_packages.R +# EOF + +ENV APP_ROOT=/opt/app-root + +# Install NGINX to proxy RStudio and pass probes check +ENV NGINX_VERSION=1.24 \ + NGINX_VERSION=1.24 \ + NGINX_SHORT_VER=124 \ + NGINX_CONFIGURATION_PATH=${APP_ROOT}/etc/nginx.d \ + NGINX_CONF_PATH=/etc/nginx/nginx.conf \ + NGINX_DEFAULT_CONF_PATH=${APP_ROOT}/etc/nginx.default.d \ + NGINX_CONTAINER_SCRIPTS_PATH=/usr/share/container-scripts/nginx \ + NGINX_APP_ROOT=${APP_ROOT} \ + NGINX_LOG_PATH=/var/log/nginx \ + NGINX_PERL_MODULE_PATH=${APP_ROOT}/etc/perl + +# Modules does not exist +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y module enable nginx:$NGINX_VERSION +INSTALL_PKGS="nss_wrapper bind-utils gettext hostname nginx nginx-mod-stream nginx-mod-http-perl httpd" +dnf install -y --setopt=tsflags=nodocs $INSTALL_PKGS +rpm -V $INSTALL_PKGS +nginx -v 2>&1 | grep -qe "nginx/$NGINX_VERSION\." && echo "Found VERSION $NGINX_VERSION" +dnf -y clean all --enablerepo='*' +EOF + +# Configure httpd for CGI processing +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/httpd.conf /etc/httpd/conf/httpd.conf +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/httpd/rstudio-cgi.conf /etc/httpd/conf.d/rstudio-cgi.conf + +# Copy extra files to the image. +COPY --chown=1001:0 ${RSTUDIO_SOURCE_CODE}/nginx/root/ / + +# Configure nginx +COPY ${RSTUDIO_SOURCE_CODE}/nginx/serverconf/ /opt/app-root/etc/nginx.default.d/ +COPY ${RSTUDIO_SOURCE_CODE}/nginx/httpconf/ /opt/app-root/etc/nginx.d/ +COPY ${RSTUDIO_SOURCE_CODE}/nginx/api/ /opt/app-root/api/ + +# Changing ownership and user rights to support following use-cases: +# 1) running container on OpenShift, whose default security model +# is to run the container under random UID, but GID=0 +# 2) for working root-less container with UID=1001, which does not have +# to have GID=0 +# 3) for default use-case, that is running container directly on operating system, +# with default UID and GID (1001:0) +# Supported combinations of UID:GID are thus following: +# UID=1001 && GID=0 +# UID=&& GID=0 +# UID=1001 && GID= +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +sed -i -f ${NGINX_APP_ROOT}/nginxconf.sed ${NGINX_CONF_PATH} +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.d/ +mkdir -p ${NGINX_APP_ROOT}/etc/nginx.default.d/ +mkdir -p ${NGINX_APP_ROOT}/api/ +mkdir -p ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +mkdir -p ${NGINX_LOG_PATH} +mkdir -p ${NGINX_PERL_MODULE_PATH} +# Create httpd directories and set permissions +mkdir -p /var/log/httpd /var/run/httpd /etc/httpd/logs +chown -R 1001:0 ${NGINX_CONF_PATH} +chown -R 1001:0 ${NGINX_APP_ROOT}/etc +chown -R 1001:0 ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chown -R 1001:0 /var/lib/nginx /var/log/nginx /run +chown -R 1001:0 /var/log/httpd /var/run/httpd /etc/httpd/logs +chmod ug+rw ${NGINX_CONF_PATH} +chmod -R ug+rwX ${NGINX_APP_ROOT}/etc +chmod -R ug+rwX ${NGINX_CONTAINER_SCRIPTS_PATH}/nginx-start +chmod -R ug+rwX /var/lib/nginx /var/log/nginx /run +chmod -R ug+rwX /var/log/httpd /var/run/httpd /etc/httpd/logs +# Make CGI scripts executable and set proper ownership +chmod +x /opt/app-root/api/kernels/access.cgi +chmod +x /opt/app-root/api/probe.cgi +chown -R 1001:0 /opt/app-root/api +rpm-file-permissions +EOF + +# Launcher +WORKDIR /opt/app-root/bin + +COPY ${RSTUDIO_SOURCE_CODE}/utils utils/ +COPY ${RSTUDIO_SOURCE_CODE}/run-rstudio.sh ${RSTUDIO_SOURCE_CODE}/setup_rstudio.py ${RSTUDIO_SOURCE_CODE}/rsession.sh ${RSTUDIO_SOURCE_CODE}/run-nginx.sh ./ + +# TODO THIS SHOULD BE REMOVED in favor of: https://issues.redhat.com/browse/RHOAIENG-32541 +# Unregister the system +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ -d "${SECRET_DIR}" ]; then + subscription-manager remove --all && subscription-manager unregister && subscription-manager clean +fi +EOF + +# TILL HERE + +USER 1001 + +COPY ${RSTUDIO_SOURCE_CODE}/pylock.toml ./ + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +WORKDIR /opt/app-root/src + +CMD ["/opt/app-root/bin/run-rstudio.sh"] diff --git a/rstudio/rhel9-python-3.12/build-args/cpu.conf b/rstudio/rhel9-python-3.12/build-args/cpu.conf index a876f4d37b..2dec27f449 100644 --- a/rstudio/rhel9-python-3.12/build-args/cpu.conf +++ b/rstudio/rhel9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : CentOS Stream 9 with Python 3.12 # Architectures: linux/arm64, linux/x86_64 -BASE_IMAGE=registry.redhat.io/rhel9/python-312:latest +BASE_IMAGE=quay.io/opendatahub/odh-base-image-cpu-py312-c9s:latest diff --git a/rstudio/rhel9-python-3.12/build-args/cuda.conf b/rstudio/rhel9-python-3.12/build-args/cuda.conf index 57885d5e19..d6c3287261 100644 --- a/rstudio/rhel9-python-3.12/build-args/cuda.conf +++ b/rstudio/rhel9-python-3.12/build-args/cuda.conf @@ -1,4 +1,4 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : CentOS Stream 9 with Python 3.12 # CUDA Version : 12.8.1 # Architectures: linux/arm64, linux/x86_64 -BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-ubi9:v12.8 +BASE_IMAGE=quay.io/opendatahub/odh-base-image-cuda-py312-c9s:v12.8 diff --git a/runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu b/runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu index d651d151b3..b5ab75da1b 100644 --- a/runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu +++ b/runtimes/datascience/ubi9-python-3.12/Dockerfile.cpu @@ -29,32 +29,39 @@ ARG TARGETARCH # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN --mount=type=cache,target=/var/cache/dnf \ - echo "Building for architecture: ${TARGETARCH}" && \ - PACKAGES="perl mesa-libGL skopeo libxcrypt-compat" && \ - # Additional dev tools only for s390x - if [ "$TARGETARCH" = "s390x" ]; then \ - PACKAGES="$PACKAGES gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel openssl zlib-devel"; \ - fi && \ - if [ "$TARGETARCH" = "ppc64le" ]; then \ - PACKAGES="$PACKAGES git gcc-toolset-13 make wget unzip rust cargo unixODBC-devel cmake ninja-build"; \ - fi && \ - if [ -n "$PACKAGES" ]; then \ - echo "Installing: $PACKAGES" && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum; \ - fi +RUN --mount=type=cache,target=/var/cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Building for architecture: ${TARGETARCH}" +PACKAGES="perl mesa-libGL skopeo libxcrypt-compat" +# Additional dev tools only for s390x +if [ "$TARGETARCH" = "s390x" ]; then + PACKAGES="$PACKAGES gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel openssl zlib-devel" +fi +if [ "$TARGETARCH" = "ppc64le" ]; then + PACKAGES="$PACKAGES git gcc-toolset-13 make wget unzip rust cargo unixODBC-devel cmake ninja-build" +fi +if [ -n "$PACKAGES" ]; then + echo "Installing: $PACKAGES" + dnf install -y $PACKAGES + dnf clean all + rm -rf /var/cache/yum +fi +EOF RUN /bin/bash <<'EOF' set -Eeuxo pipefail if [ "$TARGETARCH" = "ppc64le" ]; then cat > /etc/profile.d/ppc64le.sh <<'PROFILE_EOF' export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/ -export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH +export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} export OPENBLAS_VERSION=0.3.30 export ONNX_VERSION=1.19.0 export PYARROW_VERSION=17.0.0 @@ -99,14 +106,18 @@ EOF USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################## @@ -121,74 +132,79 @@ USER 0 WORKDIR /tmp/build-wheels # Set pyarrow version for s390x -RUN if [ "$TARGETARCH" = "s390x" ]; then \ - echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/s390x.sh; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/s390x.sh fi +EOF # Build pyarrow optimized for s390x RUN --mount=type=cache,target=/root/.cache/pip \ - --mount=type=cache,target=/root/.cache/dnf \ - if [ "$TARGETARCH" = "s390x" ]; then \ - # Install build dependencies - dnf install -y cmake make gcc-c++ pybind11-devel wget git \ - openssl-devel zlib-devel bzip2-devel lz4-devel \ - ninja-build && \ - dnf clean all && \ - # Source the environment variables - source /etc/profile.d/s390x.sh && \ - # Clone specific version of arrow - git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git && \ - cd arrow && \ - # Set environment variables for build - export ARROW_HOME=/usr/local && \ - export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:$LD_LIBRARY_PATH && \ - export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH && \ - # Build C++ library first - cd cpp && \ - mkdir build && cd build && \ - cmake -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_ORC=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_DATASET=ON \ - -DARROW_WITH_LZ4=ON \ - -DARROW_WITH_ZSTD=ON \ - -DARROW_WITH_SNAPPY=OFF \ - -DARROW_WITH_BZ2=ON \ - -DARROW_WITH_ZLIB=ON \ - -DARROW_BUILD_TESTS=OFF \ - -DARROW_BUILD_BENCHMARKS=OFF \ - -DARROW_USE_CCACHE=OFF \ - -GNinja \ - .. && \ - ninja install && \ - cd ../../python && \ - # Install Python build requirements - pip install --no-cache-dir -r requirements-build.txt && \ - # Build Python package - PYARROW_WITH_PARQUET=1 \ - PYARROW_WITH_DATASET=1 \ - PYARROW_WITH_FILESYSTEM=1 \ - PYARROW_WITH_JSON=1 \ - PYARROW_WITH_CSV=1 \ - PYARROW_WITH_LZ4=1 \ - PYARROW_WITH_ZSTD=1 \ - PYARROW_WITH_BZ2=1 \ - PYARROW_BUNDLE_ARROW_CPP=1 \ - PYARROW_PARALLEL=$(nproc) \ - python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ - mkdir -p /tmp/wheels && \ - cp dist/pyarrow-*.whl /tmp/wheels/ && \ - # Ensure wheels directory exists and has content - ls -la /tmp/wheels/; \ - else \ - # Create empty wheels directory for non-s390x - mkdir -p /tmp/wheels; \ - fi + --mount=type=cache,target=/root/.cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + # Install build dependencies + dnf install -y cmake make gcc-c++ pybind11-devel wget git \ + openssl-devel zlib-devel bzip2-devel lz4-devel \ + ninja-build + dnf clean all + # Source the environment variables + source /etc/profile.d/s390x.sh + # Clone specific version of arrow + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git + cd arrow + # Set environment variables for build + export ARROW_HOME=/usr/local + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig:${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH} + # Build C++ library first + cd cpp + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_SNAPPY=OFF \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + -DARROW_USE_CCACHE=OFF \ + -GNinja \ + .. + ninja install + cd ../../python + # Install Python build requirements + pip install --no-cache-dir -r requirements-build.txt + # Build Python package + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_WITH_LZ4=1 \ + PYARROW_WITH_ZSTD=1 \ + PYARROW_WITH_BZ2=1 \ + PYARROW_BUNDLE_ARROW_CPP=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel + mkdir -p /tmp/wheels + cp dist/pyarrow-*.whl /tmp/wheels/ + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/ +else + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels +fi +EOF ################################### # openblas builder stage for ppc64le @@ -206,14 +222,18 @@ ENV OPENBLAS_VERSION=0.3.30 RUN echo "openblas-builder stage TARGETARCH: ${TARGETARCH}" # Download and build OpenBLAS -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - source /opt/rh/gcc-toolset-13/enable && \ - wget --progress=dot:giga https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip && \ - unzip OpenBLAS-${OPENBLAS_VERSION}.zip && cd OpenBLAS-${OPENBLAS_VERSION} && \ - make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0; \ - else \ - echo "Not ppc64le, skipping OpenBLAS build" && mkdir -p /root/OpenBLAS-dummy; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + source /opt/rh/gcc-toolset-13/enable + wget --progress=dot:giga https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip + unzip OpenBLAS-${OPENBLAS_VERSION}.zip && cd OpenBLAS-${OPENBLAS_VERSION} + make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0 +else + echo "Not ppc64le, skipping OpenBLAS build" + mkdir -p /root/OpenBLAS-dummy +fi +EOF ################################### # onnx builder stage for ppc64le @@ -229,18 +249,22 @@ ENV ONNX_VERSION=1.19.0 RUN echo "onnx-builder stage TARGETARCH: ${TARGETARCH}" -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - source /opt/rh/gcc-toolset-13/enable && \ - git clone --recursive https://github.com/onnx/onnx.git && \ - cd onnx && git checkout v${ONNX_VERSION} && \ - git submodule update --init --recursive && \ - pip install --no-cache-dir -r requirements.txt && \ - CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" && \ - export CMAKE_ARGS && \ - pip wheel . -w /onnx_wheels; \ - else \ - echo "Not ppc64le, skipping ONNX build" && mkdir -p /onnx_wheels; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + source /opt/rh/gcc-toolset-13/enable + git clone --recursive https://github.com/onnx/onnx.git + cd onnx && git checkout v${ONNX_VERSION} + git submodule update --init --recursive + pip install --no-cache-dir -r requirements.txt + CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" + export CMAKE_ARGS + pip wheel . -w /onnx_wheels +else + echo "Not ppc64le, skipping ONNX build" + mkdir -p /onnx_wheels +fi +EOF ################################### # pyarrow builder stage for ppc64le @@ -256,50 +280,54 @@ ENV PYARROW_VERSION=17.0.0 RUN echo "arrow-builder stage TARGETARCH: ${TARGETARCH}" -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git --recursive && \ - cd arrow && rm -rf .git && mkdir dist && \ - pip3 install --no-cache-dir -r python/requirements-build.txt && \ - ARROW_HOME=$(pwd)/dist && \ - export ARROW_HOME && \ - LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH && \ - export LD_LIBRARY_PATH && \ - export CMAKE_PREFIX_PATH=$ARROW_HOME:$CMAKE_PREFIX_PATH && \ - export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data" && \ - export ARROW_TEST_DATA="${PWD}/testing/data" && \ - cmake -S cpp -B cpp/build \ - -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ - -DCMAKE_BUILD_TYPE=release \ - -DARROW_WITH_BZ2=ON \ - -DARROW_WITH_ZLIB=ON \ - -DARROW_WITH_ZSTD=ON \ - -DARROW_WITH_LZ4=ON \ - -DARROW_WITH_SNAPPY=ON \ - -DARROW_WITH_BROTLI=ON \ - -DARROW_DATASET=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_COMPUTE=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_BUILD_SHARED=ON \ - -DARROW_BUILD_TESTS=OFF && \ - cd cpp/build && \ - make -j20 install && \ - export PYARROW_PARALLEL=20 && \ - export PYARROW_WITH_PARQUET=1 && \ - export PYARROW_WITH_DATASET=1 && \ - export PYARROW_BUNDLE_ARROW_CPP=1 && \ - pip3 install --no-cache-dir wheel && \ - cd ../../python && \ - python setup.py build_ext \ - --build-type=release \ - --bundle-arrow-cpp \ - bdist_wheel --dist-dir /arrowwheels; \ - else \ - echo "Not ppc64le, skipping pyarrow build" && mkdir -p /arrowwheels; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git --recursive + cd arrow && rm -rf .git && mkdir dist + pip3 install --no-cache-dir -r python/requirements-build.txt + ARROW_HOME=$(pwd)/dist + export ARROW_HOME + LD_LIBRARY_PATH=$(pwd)/dist/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + export LD_LIBRARY_PATH + export CMAKE_PREFIX_PATH=$ARROW_HOME:$CMAKE_PREFIX_PATH + export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data" + export ARROW_TEST_DATA="${PWD}/testing/data" + cmake -S cpp -B cpp/build \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DCMAKE_BUILD_TYPE=release \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_SNAPPY=ON \ + -DARROW_WITH_BROTLI=ON \ + -DARROW_DATASET=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_COMPUTE=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_BUILD_SHARED=ON \ + -DARROW_BUILD_TESTS=OFF + cd cpp/build + make -j20 install + export PYARROW_PARALLEL=20 + export PYARROW_WITH_PARQUET=1 + export PYARROW_WITH_DATASET=1 + export PYARROW_BUNDLE_ARROW_CPP=1 + pip3 install --no-cache-dir wheel + cd ../../python + python setup.py build_ext \ + --build-type=release \ + --bundle-arrow-cpp \ + bdist_wheel --dist-dir /arrowwheels +else + echo "Not ppc64le, skipping pyarrow build" + mkdir -p /arrowwheels +fi +EOF ####################### # runtime-datascience # @@ -327,25 +355,33 @@ COPY --from=openblas-builder /root/OpenBLAS-* /openblas COPY --from=onnx-builder /onnx_wheels /tmp/onnx_wheels COPY --from=arrow-builder /arrowwheels /tmp/arrowwheels -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - echo "Installing ppc64le ONNX, pyarrow wheels and OpenBLAS..." && \ - HOME=/root pip install --no-cache-dir /tmp/onnx_wheels/*.whl /tmp/arrowwheels/*.whl && \ - if [ -d "/openblas" ] && [ "$(ls -A /openblas 2>/dev/null)" ]; then \ - PREFIX=/usr/local make -C /openblas install; \ - fi && rm -rf /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ - else \ - echo "Skipping architecture-specific wheel installs for (${TARGETARCH})" && \ - rm -rf /tmp/wheels /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + echo "Installing ppc64le ONNX, pyarrow wheels and OpenBLAS..." + HOME=/root pip install --no-cache-dir /tmp/onnx_wheels/*.whl /tmp/arrowwheels/*.whl + if [ -d "/openblas" ] && [ "$(ls -A /openblas 2>/dev/null)" ]; then + PREFIX=/usr/local make -C /openblas install fi + rm -rf /openblas /tmp/onnx_wheels /tmp/arrowwheels +else + echo "Skipping architecture-specific wheel installs for (${TARGETARCH})" + rm -rf /tmp/wheels /openblas /tmp/onnx_wheels /tmp/arrowwheels +fi +EOF USER 0 # Copy wheels from build stage (s390x only) COPY --from=s390x-builder /tmp/wheels /tmp/wheels -RUN if [ "$TARGETARCH" = "s390x" ]; then \ - pip install --no-cache-dir /tmp/wheels/*.whl && rm -rf /tmp/wheels; \ -else \ - echo "Skipping wheel install for $TARGETARCH"; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + pip install --no-cache-dir /tmp/wheels/*.whl + rm -rf /tmp/wheels +else + echo "Skipping wheel install for $TARGETARCH" fi +EOF # Install Python packages from pylock.toml @@ -353,28 +389,30 @@ COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ -RUN --mount=type=cache,target=/root/.cache/pip \ - echo "Installing softwares and packages" && \ - if [ "$TARGETARCH" = "ppc64le" ]; then \ - export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig; \ - export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH && \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - elif [ "$TARGETARCH" = "s390x" ]; then \ - # For s390x, we need special flags and environment variables for building packages - GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ - CFLAGS="-O3" CXXFLAGS="-O3" \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - else \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - fi && \ - # change ownership to default user (all packages were installed as root and has root:root ownership - chown -R 1001:0 /opt/app-root/ && \ - chmod -R g=u /opt/app-root && \ - # Fix permissions to support pip in Openshift environments - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +if [ "$TARGETARCH" = "ppc64le" ]; then + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +elif [ "$TARGETARCH" = "s390x" ]; then + # For s390x, we need special flags and environment variables for building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +else + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +fi +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root/ +chmod -R g=u /opt/app-root +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF USER 1001 diff --git a/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu b/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu index ca0ceb13b6..a11d08f9ed 100644 --- a/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/runtimes/datascience/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -10,6 +10,8 @@ FROM registry.access.redhat.com/ubi9/ubi AS ubi-repos # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh ARG TARGETARCH @@ -29,32 +31,39 @@ ARG TARGETARCH # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN --mount=type=cache,target=/var/cache/dnf \ - echo "Building for architecture: ${TARGETARCH}" && \ - PACKAGES="perl mesa-libGL skopeo libxcrypt-compat" && \ - # Additional dev tools only for s390x - if [ "$TARGETARCH" = "s390x" ]; then \ - PACKAGES="$PACKAGES gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel openssl zlib-devel"; \ - fi && \ - if [ "$TARGETARCH" = "ppc64le" ]; then \ - PACKAGES="$PACKAGES git gcc-toolset-13 make wget unzip rust cargo unixODBC-devel cmake ninja-build"; \ - fi && \ - if [ -n "$PACKAGES" ]; then \ - echo "Installing: $PACKAGES" && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum; \ - fi +RUN --mount=type=cache,target=/var/cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Building for architecture: ${TARGETARCH}" +PACKAGES="perl mesa-libGL skopeo libxcrypt-compat" +# Additional dev tools only for s390x +if [ "$TARGETARCH" = "s390x" ]; then + PACKAGES="$PACKAGES gcc gcc-c++ make openssl-devel autoconf automake libtool cmake python3-devel pybind11-devel openblas-devel unixODBC-devel openssl zlib-devel" +fi +if [ "$TARGETARCH" = "ppc64le" ]; then + PACKAGES="$PACKAGES git gcc-toolset-13 make wget unzip rust cargo unixODBC-devel cmake ninja-build" +fi +if [ -n "$PACKAGES" ]; then + echo "Installing: $PACKAGES" + dnf install -y $PACKAGES + dnf clean all + rm -rf /var/cache/yum +fi +EOF RUN /bin/bash <<'EOF' set -Eeuxo pipefail if [ "$TARGETARCH" = "ppc64le" ]; then cat > /etc/profile.d/ppc64le.sh <<'PROFILE_EOF' export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/ -export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH +export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} export OPENBLAS_VERSION=0.3.30 export ONNX_VERSION=1.19.0 export PYARROW_VERSION=17.0.0 @@ -99,14 +108,18 @@ EOF USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################## @@ -121,74 +134,79 @@ USER 0 WORKDIR /tmp/build-wheels # Set pyarrow version for s390x -RUN if [ "$TARGETARCH" = "s390x" ]; then \ - echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/s390x.sh; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + echo 'export PYARROW_VERSION=17.0.0' >> /etc/profile.d/s390x.sh fi +EOF # Build pyarrow optimized for s390x RUN --mount=type=cache,target=/root/.cache/pip \ - --mount=type=cache,target=/root/.cache/dnf \ - if [ "$TARGETARCH" = "s390x" ]; then \ - # Install build dependencies - dnf install -y cmake make gcc-c++ pybind11-devel wget git \ - openssl-devel zlib-devel bzip2-devel lz4-devel \ - ninja-build && \ - dnf clean all && \ - # Source the environment variables - source /etc/profile.d/s390x.sh && \ - # Clone specific version of arrow - git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git && \ - cd arrow && \ - # Set environment variables for build - export ARROW_HOME=/usr/local && \ - export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:$LD_LIBRARY_PATH && \ - export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH && \ - # Build C++ library first - cd cpp && \ - mkdir build && cd build && \ - cmake -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_ORC=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_DATASET=ON \ - -DARROW_WITH_LZ4=ON \ - -DARROW_WITH_ZSTD=ON \ - -DARROW_WITH_SNAPPY=OFF \ - -DARROW_WITH_BZ2=ON \ - -DARROW_WITH_ZLIB=ON \ - -DARROW_BUILD_TESTS=OFF \ - -DARROW_BUILD_BENCHMARKS=OFF \ - -DARROW_USE_CCACHE=OFF \ - -GNinja \ - .. && \ - ninja install && \ - cd ../../python && \ - # Install Python build requirements - pip install --no-cache-dir -r requirements-build.txt && \ - # Build Python package - PYARROW_WITH_PARQUET=1 \ - PYARROW_WITH_DATASET=1 \ - PYARROW_WITH_FILESYSTEM=1 \ - PYARROW_WITH_JSON=1 \ - PYARROW_WITH_CSV=1 \ - PYARROW_WITH_LZ4=1 \ - PYARROW_WITH_ZSTD=1 \ - PYARROW_WITH_BZ2=1 \ - PYARROW_BUNDLE_ARROW_CPP=1 \ - PYARROW_PARALLEL=$(nproc) \ - python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel && \ - mkdir -p /tmp/wheels && \ - cp dist/pyarrow-*.whl /tmp/wheels/ && \ - # Ensure wheels directory exists and has content - ls -la /tmp/wheels/; \ - else \ - # Create empty wheels directory for non-s390x - mkdir -p /tmp/wheels; \ - fi + --mount=type=cache,target=/root/.cache/dnf /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + # Install build dependencies + dnf install -y cmake make gcc-c++ pybind11-devel wget git \ + openssl-devel zlib-devel bzip2-devel lz4-devel \ + ninja-build + dnf clean all + # Source the environment variables + source /etc/profile.d/s390x.sh + # Clone specific version of arrow + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git + cd arrow + # Set environment variables for build + export ARROW_HOME=/usr/local + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig:${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH} + # Build C++ library first + cd cpp + mkdir build && cd build + cmake -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_ORC=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_DATASET=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_SNAPPY=OFF \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_BUILD_TESTS=OFF \ + -DARROW_BUILD_BENCHMARKS=OFF \ + -DARROW_USE_CCACHE=OFF \ + -GNinja \ + .. + ninja install + cd ../../python + # Install Python build requirements + pip install --no-cache-dir -r requirements-build.txt + # Build Python package + PYARROW_WITH_PARQUET=1 \ + PYARROW_WITH_DATASET=1 \ + PYARROW_WITH_FILESYSTEM=1 \ + PYARROW_WITH_JSON=1 \ + PYARROW_WITH_CSV=1 \ + PYARROW_WITH_LZ4=1 \ + PYARROW_WITH_ZSTD=1 \ + PYARROW_WITH_BZ2=1 \ + PYARROW_BUNDLE_ARROW_CPP=1 \ + PYARROW_PARALLEL=$(nproc) \ + python setup.py build_ext --build-type=release --bundle-arrow-cpp bdist_wheel + mkdir -p /tmp/wheels + cp dist/pyarrow-*.whl /tmp/wheels/ + # Ensure wheels directory exists and has content + ls -la /tmp/wheels/ +else + # Create empty wheels directory for non-s390x + mkdir -p /tmp/wheels +fi +EOF ################################### # openblas builder stage for ppc64le @@ -206,14 +224,18 @@ ENV OPENBLAS_VERSION=0.3.30 RUN echo "openblas-builder stage TARGETARCH: ${TARGETARCH}" # Download and build OpenBLAS -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - source /opt/rh/gcc-toolset-13/enable && \ - wget --progress=dot:giga https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip && \ - unzip OpenBLAS-${OPENBLAS_VERSION}.zip && cd OpenBLAS-${OPENBLAS_VERSION} && \ - make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0; \ - else \ - echo "Not ppc64le, skipping OpenBLAS build" && mkdir -p /root/OpenBLAS-dummy; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + source /opt/rh/gcc-toolset-13/enable + wget --progress=dot:giga https://github.com/OpenMathLib/OpenBLAS/releases/download/v${OPENBLAS_VERSION}/OpenBLAS-${OPENBLAS_VERSION}.zip + unzip OpenBLAS-${OPENBLAS_VERSION}.zip && cd OpenBLAS-${OPENBLAS_VERSION} + make -j$(nproc) TARGET=POWER9 BINARY=64 USE_OPENMP=1 USE_THREAD=1 NUM_THREADS=120 DYNAMIC_ARCH=1 INTERFACE64=0 +else + echo "Not ppc64le, skipping OpenBLAS build" + mkdir -p /root/OpenBLAS-dummy +fi +EOF ################################### # onnx builder stage for ppc64le @@ -229,18 +251,22 @@ ENV ONNX_VERSION=1.19.0 RUN echo "onnx-builder stage TARGETARCH: ${TARGETARCH}" -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - source /opt/rh/gcc-toolset-13/enable && \ - git clone --recursive https://github.com/onnx/onnx.git && \ - cd onnx && git checkout v${ONNX_VERSION} && \ - git submodule update --init --recursive && \ - pip install --no-cache-dir -r requirements.txt && \ - CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" && \ - export CMAKE_ARGS && \ - pip wheel . -w /onnx_wheels; \ - else \ - echo "Not ppc64le, skipping ONNX build" && mkdir -p /onnx_wheels; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + source /opt/rh/gcc-toolset-13/enable + git clone --recursive https://github.com/onnx/onnx.git + cd onnx && git checkout v${ONNX_VERSION} + git submodule update --init --recursive + pip install --no-cache-dir -r requirements.txt + CMAKE_ARGS="-DPython3_EXECUTABLE=$(which python3.12)" + export CMAKE_ARGS + pip wheel . -w /onnx_wheels +else + echo "Not ppc64le, skipping ONNX build" + mkdir -p /onnx_wheels +fi +EOF ################################### # pyarrow builder stage for ppc64le @@ -256,50 +282,54 @@ ENV PYARROW_VERSION=17.0.0 RUN echo "arrow-builder stage TARGETARCH: ${TARGETARCH}" -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git --recursive && \ - cd arrow && rm -rf .git && mkdir dist && \ - pip3 install --no-cache-dir -r python/requirements-build.txt && \ - ARROW_HOME=$(pwd)/dist && \ - export ARROW_HOME && \ - LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH && \ - export LD_LIBRARY_PATH && \ - export CMAKE_PREFIX_PATH=$ARROW_HOME:$CMAKE_PREFIX_PATH && \ - export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data" && \ - export ARROW_TEST_DATA="${PWD}/testing/data" && \ - cmake -S cpp -B cpp/build \ - -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ - -DCMAKE_BUILD_TYPE=release \ - -DARROW_WITH_BZ2=ON \ - -DARROW_WITH_ZLIB=ON \ - -DARROW_WITH_ZSTD=ON \ - -DARROW_WITH_LZ4=ON \ - -DARROW_WITH_SNAPPY=ON \ - -DARROW_WITH_BROTLI=ON \ - -DARROW_DATASET=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_COMPUTE=ON \ - -DARROW_JSON=ON \ - -DARROW_CSV=ON \ - -DARROW_PYTHON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_BUILD_SHARED=ON \ - -DARROW_BUILD_TESTS=OFF && \ - cd cpp/build && \ - make -j20 install && \ - export PYARROW_PARALLEL=20 && \ - export PYARROW_WITH_PARQUET=1 && \ - export PYARROW_WITH_DATASET=1 && \ - export PYARROW_BUNDLE_ARROW_CPP=1 && \ - pip3 install --no-cache-dir wheel && \ - cd ../../python && \ - python setup.py build_ext \ - --build-type=release \ - --bundle-arrow-cpp \ - bdist_wheel --dist-dir /arrowwheels; \ - else \ - echo "Not ppc64le, skipping pyarrow build" && mkdir -p /arrowwheels; \ - fi +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + git clone -b apache-arrow-${PYARROW_VERSION} https://github.com/apache/arrow.git --recursive + cd arrow && rm -rf .git && mkdir dist + pip3 install --no-cache-dir -r python/requirements-build.txt + ARROW_HOME=$(pwd)/dist + export ARROW_HOME + LD_LIBRARY_PATH=$(pwd)/dist/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + export LD_LIBRARY_PATH + export CMAKE_PREFIX_PATH=$ARROW_HOME:$CMAKE_PREFIX_PATH + export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data" + export ARROW_TEST_DATA="${PWD}/testing/data" + cmake -S cpp -B cpp/build \ + -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \ + -DCMAKE_BUILD_TYPE=release \ + -DARROW_WITH_BZ2=ON \ + -DARROW_WITH_ZLIB=ON \ + -DARROW_WITH_ZSTD=ON \ + -DARROW_WITH_LZ4=ON \ + -DARROW_WITH_SNAPPY=ON \ + -DARROW_WITH_BROTLI=ON \ + -DARROW_DATASET=ON \ + -DARROW_FILESYSTEM=ON \ + -DARROW_COMPUTE=ON \ + -DARROW_JSON=ON \ + -DARROW_CSV=ON \ + -DARROW_PYTHON=ON \ + -DARROW_PARQUET=ON \ + -DARROW_BUILD_SHARED=ON \ + -DARROW_BUILD_TESTS=OFF + cd cpp/build + make -j20 install + export PYARROW_PARALLEL=20 + export PYARROW_WITH_PARQUET=1 + export PYARROW_WITH_DATASET=1 + export PYARROW_BUNDLE_ARROW_CPP=1 + pip3 install --no-cache-dir wheel + cd ../../python + python setup.py build_ext \ + --build-type=release \ + --bundle-arrow-cpp \ + bdist_wheel --dist-dir /arrowwheels +else + echo "Not ppc64le, skipping pyarrow build" + mkdir -p /arrowwheels +fi +EOF ####################### # runtime-datascience # @@ -317,25 +347,33 @@ COPY --from=openblas-builder /root/OpenBLAS-* /openblas COPY --from=onnx-builder /onnx_wheels /tmp/onnx_wheels COPY --from=arrow-builder /arrowwheels /tmp/arrowwheels -RUN if [ "$TARGETARCH" = "ppc64le" ]; then \ - echo "Installing ppc64le ONNX, pyarrow wheels and OpenBLAS..." && \ - HOME=/root pip install --no-cache-dir /tmp/onnx_wheels/*.whl /tmp/arrowwheels/*.whl && \ - if [ -d "/openblas" ] && [ "$(ls -A /openblas 2>/dev/null)" ]; then \ - PREFIX=/usr/local make -C /openblas install; \ - fi && rm -rf /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ - else \ - echo "Skipping architecture-specific wheel installs for (${TARGETARCH})" && \ - rm -rf /tmp/wheels /openblas /tmp/onnx_wheels /tmp/arrowwheels; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "ppc64le" ]; then + echo "Installing ppc64le ONNX, pyarrow wheels and OpenBLAS..." + HOME=/root pip install --no-cache-dir /tmp/onnx_wheels/*.whl /tmp/arrowwheels/*.whl + if [ -d "/openblas" ] && [ "$(ls -A /openblas 2>/dev/null)" ]; then + PREFIX=/usr/local make -C /openblas install fi + rm -rf /openblas /tmp/onnx_wheels /tmp/arrowwheels +else + echo "Skipping architecture-specific wheel installs for (${TARGETARCH})" + rm -rf /tmp/wheels /openblas /tmp/onnx_wheels /tmp/arrowwheels +fi +EOF USER 0 # Copy wheels from build stage (s390x only) COPY --from=s390x-builder /tmp/wheels /tmp/wheels -RUN if [ "$TARGETARCH" = "s390x" ]; then \ - pip install --no-cache-dir /tmp/wheels/*.whl && rm -rf /tmp/wheels; \ -else \ - echo "Skipping wheel install for $TARGETARCH"; \ +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +if [ "$TARGETARCH" = "s390x" ]; then + pip install --no-cache-dir /tmp/wheels/*.whl + rm -rf /tmp/wheels +else + echo "Skipping wheel install for $TARGETARCH" fi +EOF # Install Python packages from pylock.toml @@ -343,28 +381,30 @@ COPY ${DATASCIENCE_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${DATASCIENCE_SOURCE_CODE}/utils ./utils/ -RUN --mount=type=cache,target=/root/.cache/pip \ - echo "Installing softwares and packages" && \ - if [ "$TARGETARCH" = "ppc64le" ]; then \ - export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig; \ - export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib:$LD_LIBRARY_PATH && \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - elif [ "$TARGETARCH" = "s390x" ]; then \ - # For s390x, we need special flags and environment variables for building packages - GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ - CFLAGS="-O3" CXXFLAGS="-O3" \ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - else \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml; \ - fi && \ - # change ownership to default user (all packages were installed as root and has root:root ownership - chown -R 1001:0 /opt/app-root/ && \ - chmod -R g=u /opt/app-root && \ - # Fix permissions to support pip in Openshift environments - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN --mount=type=cache,target=/root/.cache/pip /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +if [ "$TARGETARCH" = "ppc64le" ]; then + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig + export LD_LIBRARY_PATH=/usr/local/lib64:/usr/local/lib:/usr/lib64:/usr/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +elif [ "$TARGETARCH" = "s390x" ]; then + # For s390x, we need special flags and environment variables for building packages + GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 \ + CFLAGS="-O3" CXXFLAGS="-O3" \ + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +else + # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, + # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. + uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +fi +# change ownership to default user (all packages were installed as root and has root:root ownership +chown -R 1001:0 /opt/app-root/ +chmod -R g=u /opt/app-root +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF USER 1001 diff --git a/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf b/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf +++ b/runtimes/datascience/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/runtimes/datascience/ubi9-python-3.12/pylock.toml b/runtimes/datascience/ubi9-python-3.12/pylock.toml index 404dd820c3..f043fffa33 100644 --- a/runtimes/datascience/ubi9-python-3.12/pylock.toml +++ b/runtimes/datascience/ubi9-python-3.12/pylock.toml @@ -477,10 +477,10 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" +version = "0.32.1" marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -948,6 +948,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -957,6 +959,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -966,6 +970,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -975,6 +981,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -982,6 +990,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -991,6 +1001,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/runtimes/datascience/ubi9-python-3.12/pyproject.toml b/runtimes/datascience/ubi9-python-3.12/pyproject.toml index ac8ea34b3e..1cb06ecc04 100644 --- a/runtimes/datascience/ubi9-python-3.12/pyproject.toml +++ b/runtimes/datascience/ubi9-python-3.12/pyproject.toml @@ -15,7 +15,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0; platform_machine != 's390x' and platform_machine != 'ppc64le'", + "codeflare-sdk~=0.32.1; platform_machine != 's390x' and platform_machine != 'ppc64le'", "feast~=0.55.0", # DB connectors diff --git a/runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu b/runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu index 05b9383fc8..44611fbaf2 100644 --- a/runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu +++ b/runtimes/minimal/ubi9-python-3.12/Dockerfile.cpu @@ -25,32 +25,44 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN ARCH=$(uname -m) && \ - echo "Detected architecture: $ARCH" && \ - PACKAGES="perl mesa-libGL skopeo" && \ - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - PACKAGES="$PACKAGES gcc g++ make openssl-devel autoconf automake libtool cmake"; \ - fi && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +echo "Detected architecture: $ARCH" +PACKAGES="perl mesa-libGL skopeo" +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + PACKAGES="$PACKAGES gcc g++ make openssl-devel autoconf automake libtool cmake" +fi +dnf install -y $PACKAGES +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -77,12 +89,15 @@ COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${MINIMAL_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu b/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu index 3913bec0c2..090114a3ba 100644 --- a/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu +++ b/runtimes/minimal/ubi9-python-3.12/Dockerfile.konflux.cpu @@ -10,6 +10,8 @@ FROM registry.access.redhat.com/ubi9/ubi AS ubi-repos # cpu-base # #################### FROM ${BASE_IMAGE} AS cpu-base +USER 0 +RUN subscription-manager refresh WORKDIR /opt/app-root/bin @@ -25,32 +27,44 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN ARCH=$(uname -m) && \ - echo "Detected architecture: $ARCH" && \ - PACKAGES="perl mesa-libGL skopeo" && \ - if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then \ - PACKAGES="$PACKAGES gcc g++ make openssl-devel autoconf automake libtool cmake"; \ - fi && \ - dnf install -y $PACKAGES && \ - dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +ARCH=$(uname -m) +echo "Detected architecture: $ARCH" +PACKAGES="perl mesa-libGL skopeo" +if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then + PACKAGES="$PACKAGES gcc g++ make openssl-devel autoconf automake libtool cmake" +fi +dnf install -y $PACKAGES +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end #################### @@ -67,13 +81,16 @@ COPY ${MINIMAL_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${MINIMAL_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf b/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf index 4583ee67cb..8fe9d4edc9 100644 --- a/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf +++ b/runtimes/minimal/ubi9-python-3.12/build-args/cpu.conf @@ -1,3 +1,3 @@ -# Base Image : UBI 9 with Python 3.12 +# Base Image : RHEL 9.6 with Python 3.12 # Architectures: linux/arm64, linux/ppc64le, linux/x86_64, linux/s360x -BASE_IMAGE=registry.access.redhat.com/ubi9/python-312:latest +BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1761580156 diff --git a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda index f437345d2a..7220cce919 100644 --- a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda +++ b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.cuda @@ -27,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -72,12 +85,15 @@ COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda index 9837967d08..ea54bc5db1 100644 --- a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -27,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -62,13 +75,16 @@ COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml index 7057688f35..457c6f2504 100644 --- a/runtimes/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml +++ b/runtimes/pytorch+llmcompressor/ubi9-python-3.12/pylock.toml @@ -875,6 +875,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -884,6 +886,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -893,6 +897,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -902,6 +908,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -909,6 +917,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -918,6 +928,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda index 457d168a1d..1f78041f64 100644 --- a/runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda +++ b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.cuda @@ -27,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -72,12 +85,15 @@ COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda index 73169b4a63..ac153dcbd1 100644 --- a/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/runtimes/pytorch/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -27,25 +27,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################### @@ -62,13 +75,16 @@ COPY ${PYTORCH_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF WORKDIR /opt/app-root/src diff --git a/runtimes/pytorch/ubi9-python-3.12/pylock.toml b/runtimes/pytorch/ubi9-python-3.12/pylock.toml index 4d16ef9bc5..11d9c50252 100644 --- a/runtimes/pytorch/ubi9-python-3.12/pylock.toml +++ b/runtimes/pytorch/ubi9-python-3.12/pylock.toml @@ -476,9 +476,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -939,6 +939,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -948,6 +950,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -957,6 +961,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -966,6 +972,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -973,6 +981,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -982,6 +992,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/runtimes/pytorch/ubi9-python-3.12/pyproject.toml b/runtimes/pytorch/ubi9-python-3.12/pyproject.toml index 01b704c9e3..da70ffe523 100644 --- a/runtimes/pytorch/ubi9-python-3.12/pyproject.toml +++ b/runtimes/pytorch/ubi9-python-3.12/pyproject.toml @@ -20,7 +20,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "feast~=0.55.0", # DB connectors diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm index 6b7dac4bca..1313ea7b55 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -25,25 +25,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -62,19 +75,23 @@ COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ # Copy utility script COPY ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml - USER 0 -# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments -RUN ./de-vendor-torch.sh && \ - rm ./de-vendor-torch.sh && \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ - fix-permissions /opt/app-root -P -USER 1001 +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# De-vendor the ROCm libs that are embedded in Pytorch +./de-vendor-torch.sh +rm ./de-vendor-torch.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src LABEL name="rhoai/odh-pipeline-runtime-pytorch-rocm-py312-rhel9" \ diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm index 2902502b0a..75140b1b52 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/Dockerfile.rocm @@ -25,25 +25,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ######################## @@ -72,17 +85,21 @@ COPY ${PYTORCH_SOURCE_CODE}/utils ./utils/ # Copy utility script COPY ${PYTORCH_SOURCE_CODE}/de-vendor-torch.sh ./ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml - USER 0 -# De-vendor the ROCm libs that are embedded in Pytorch and fix permissions to support pip in Openshift environments -RUN ./de-vendor-torch.sh && \ - rm ./de-vendor-torch.sh && \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages || true && \ - fix-permissions /opt/app-root -P -USER 1001 +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# De-vendor the ROCm libs that are embedded in Pytorch +./de-vendor-torch.sh +rm ./de-vendor-torch.sh +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF + +USER 1001 WORKDIR /opt/app-root/src diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/pylock.toml b/runtimes/rocm-pytorch/ubi9-python-3.12/pylock.toml index 9c54b97a55..2182347c71 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/pylock.toml +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/pylock.toml @@ -476,9 +476,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -939,6 +939,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -948,6 +950,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -957,6 +961,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -966,6 +972,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -973,6 +981,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -982,6 +992,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/runtimes/rocm-pytorch/ubi9-python-3.12/pyproject.toml b/runtimes/rocm-pytorch/ubi9-python-3.12/pyproject.toml index a6c8d851b5..fd64796253 100644 --- a/runtimes/rocm-pytorch/ubi9-python-3.12/pyproject.toml +++ b/runtimes/rocm-pytorch/ubi9-python-3.12/pyproject.toml @@ -21,7 +21,7 @@ dependencies = [ "scipy~=1.16.2", "skl2onnx~=1.19.1", "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "feast~=0.55.0", # DB connectors diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm index 9e84179401..20997f4b7a 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.konflux.rocm @@ -25,25 +25,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ########################### @@ -61,20 +74,23 @@ COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +# Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ +USER 1001 WORKDIR /opt/app-root/src LABEL name="rhoai/odh-pipeline-runtime-tensorflow-rocm-py312-rhel9" \ diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm index 83133e7bcb..1a9b6b499d 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/Dockerfile.rocm @@ -25,25 +25,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ########################### @@ -71,18 +84,21 @@ COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - # Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml - -# Fix permissions to support pip in Openshift environments \ USER 0 -RUN chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P -USER 1001 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +# Not using --build-constraints=./requirements.txt because error: Unnamed requirements are not allowed as constraints (found: `https://repo.radeon.com/rocm/manylinux/rocm-rel-6.4/ +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ +USER 1001 WORKDIR /opt/app-root/src diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/pylock.toml b/runtimes/rocm-tensorflow/ubi9-python-3.12/pylock.toml index 23a0db98dc..5539ff06cf 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/pylock.toml +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/pylock.toml @@ -464,9 +464,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8ee [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorful" diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml b/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml index fe610eb08a..444846db98 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml @@ -25,7 +25,7 @@ dependencies = [ "skl2onnx~=1.19.1", # Required for skl2onnx, as upgraded version is not compatible with protobuf "onnxconverter-common~=1.13.0", - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", # DB connectors "pymongo~=4.15.3", diff --git a/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda index dbc463b84b..12e9db1ec1 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda +++ b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.cuda @@ -29,25 +29,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################ @@ -75,14 +88,20 @@ COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +USER 0 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ +USER 1001 WORKDIR /opt/app-root/src diff --git a/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda index 51a07c005d..ce56e0e866 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda +++ b/runtimes/tensorflow/ubi9-python-3.12/Dockerfile.konflux.cuda @@ -29,25 +29,38 @@ COPY --from=ubi-repos /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest -RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 +dnf clean all -y +EOF + # upgrade first to avoid fixable vulnerabilities end # Install useful OS packages -RUN dnf install -y perl mesa-libGL skopeo libxcrypt-compat && dnf clean all && rm -rf /var/cache/yum +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +dnf install -y perl mesa-libGL skopeo libxcrypt-compat +dnf clean all +rm -rf /var/cache/yum +EOF # Other apps and tools installed as default user USER 1001 # Install micropipenv and uv to deploy packages from requirements.txt begin -RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12" +RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12" # Install micropipenv and uv to deploy packages from requirements.txt end # Install the oc client begin -RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz +tar -xzvf /tmp/openshift-client-linux.tar.gz oc +rm -f /tmp/openshift-client-linux.tar.gz +EOF + # Install the oc client end ############################ @@ -65,16 +78,22 @@ COPY ${TENSORFLOW_SOURCE_CODE}/pylock.toml ./ # Copy Elyra dependencies for air-gapped enviroment COPY ${TENSORFLOW_SOURCE_CODE}/utils ./utils/ -RUN echo "Installing softwares and packages" && \ - # This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, - # we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. - uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml && \ - # Fix permissions to support pip in Openshift environments \ - chmod -R g+w /opt/app-root/lib/python3.12/site-packages && \ - fix-permissions /opt/app-root -P +USER 0 + +RUN /bin/bash <<'EOF' +set -Eeuxo pipefail +echo "Installing softwares and packages" +# This may have to download and compile some dependencies, and as we don't lock requirements from `build-system.requires`, +# we often don't know the correct hashes and `--require-hashes` would therefore fail on non amd64, where building is common. +uv pip install --strict --no-deps --no-cache --no-config --no-progress --verify-hashes --compile-bytecode --index-strategy=unsafe-best-match --requirements=./pylock.toml +# Fix permissions to support pip in Openshift environments +chmod -R g+w /opt/app-root/lib/python3.12/site-packages +fix-permissions /opt/app-root -P +EOF COPY ${JUPYTER_REUSABLE_UTILS}/usercustomize.pth ${JUPYTER_REUSABLE_UTILS}/monkey_patch_protobuf_6x.py /opt/app-root/lib/python3.12/site-packages/ +USER 1001 WORKDIR /opt/app-root/src LABEL name="rhoai/odh-pipeline-runtime-tensorflow-cuda-py312-rhel9" \ diff --git a/runtimes/tensorflow/ubi9-python-3.12/pylock.toml b/runtimes/tensorflow/ubi9-python-3.12/pylock.toml index e73befe45a..4dea64155c 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/pylock.toml +++ b/runtimes/tensorflow/ubi9-python-3.12/pylock.toml @@ -482,9 +482,9 @@ wheels = [{ url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a [[packages]] name = "codeflare-sdk" -version = "0.32.0" -sdist = { url = "https://files.pythonhosted.org/packages/75/84/fd7f089111ddae5896059f28f02997d9b7650ff97ccf8917e35964a12795/codeflare_sdk-0.32.0.tar.gz", upload-time = 2025-10-16T11:51:24Z, size = 150607, hashes = { sha256 = "8cc4bc9e471c8dd2ec5baacda94d5f17a0bbbf3d4a944a213307c37521e1a300" } } -wheels = [{ url = "https://files.pythonhosted.org/packages/9e/9f/5007a20bf72f86400cfd935e8ac53888db024fbbdf2f278d9d6fcadbb017/codeflare_sdk-0.32.0-py3-none-any.whl", upload-time = 2025-10-16T11:51:22Z, size = 219307, hashes = { sha256 = "583910545d4e97c8ca18692150d3a3bdc45ed37dfb3cfda2f891a191b584d3f8" } }] +version = "0.32.1" +sdist = { url = "https://files.pythonhosted.org/packages/47/b8/5b5942be8a430a1c67d90beca9b20f81fd7b54613e9758b091c3a5d8ff06/codeflare_sdk-0.32.1.tar.gz", upload-time = 2025-11-07T21:07:06Z, size = 151147, hashes = { sha256 = "870cb62610b3585014e62e1069051b3bbf02ab2a9e10d5e18e1f20866a3f7a44" } } +wheels = [{ url = "https://files.pythonhosted.org/packages/11/46/5223a5b7651d36251789e5426ce8caef579961a4d25a73db23ad4f4ebe22/codeflare_sdk-0.32.1-py3-none-any.whl", upload-time = 2025-11-07T21:07:04Z, size = 219859, hashes = { sha256 = "5f0d319d950f6ff9fa7e94a7ae4502c41d8c487a090cd7497ef95ff65c1b0951" } }] [[packages]] name = "colorama" @@ -966,6 +966,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:23Z, size = 584358, hashes = { sha256 = "c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" } }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:37Z, size = 1113550, hashes = { sha256 = "9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" } }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:20Z, size = 1137126, hashes = { sha256 = "8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" } }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:04Z, size = 1544904, hashes = { sha256 = "f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7" } }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:08Z, size = 1611228, hashes = { sha256 = "af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8" } }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", upload-time = 2025-08-07T13:50:00Z, size = 298654, hashes = { sha256 = "73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" } }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:41Z, size = 272305, hashes = { sha256 = "96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2" } }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:55Z, size = 632472, hashes = { sha256 = "1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246" } }, @@ -975,6 +977,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:25Z, size = 587684, hashes = { sha256 = "2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8" } }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:38Z, size = 1116647, hashes = { sha256 = "1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52" } }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:21Z, size = 1142073, hashes = { sha256 = "55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa" } }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:11Z, size = 1548385, hashes = { sha256 = "c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c" } }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:12Z, size = 1613329, hashes = { sha256 = "03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5" } }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", upload-time = 2025-08-07T13:44:12Z, size = 299100, hashes = { sha256 = "9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9" } }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:45Z, size = 274079, hashes = { sha256 = "3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd" } }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:56Z, size = 640997, hashes = { sha256 = "ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb" } }, @@ -984,6 +988,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:28Z, size = 607586, hashes = { sha256 = "3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0" } }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:39Z, size = 1123281, hashes = { sha256 = "abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0" } }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:22Z, size = 1151142, hashes = { sha256 = "20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f" } }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:15Z, size = 1564846, hashes = { sha256 = "ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0" } }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:17Z, size = 1633814, hashes = { sha256 = "326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d" } }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", upload-time = 2025-08-07T13:38:53Z, size = 299899, hashes = { sha256 = "a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02" } }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:15:50Z, size = 272814, hashes = { sha256 = "1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31" } }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:57Z, size = 641073, hashes = { sha256 = "cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945" } }, @@ -993,6 +999,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:31Z, size = 610497, hashes = { sha256 = "23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671" } }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:41Z, size = 1121662, hashes = { sha256 = "00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b" } }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:24Z, size = 1149210, hashes = { sha256 = "d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae" } }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:19Z, size = 1564759, hashes = { sha256 = "6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b" } }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:21Z, size = 1634288, hashes = { sha256 = "ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929" } }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", upload-time = 2025-08-07T13:24:38Z, size = 299685, hashes = { sha256 = "554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b" } }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:08Z, size = 273586, hashes = { sha256 = "49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0" } }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:42:59Z, size = 686346, hashes = { sha256 = "299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f" } }, @@ -1000,6 +1008,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", upload-time = 2025-08-07T13:53:17Z, size = 694659, hashes = { sha256 = "b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1" } }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", upload-time = 2025-08-07T13:18:34Z, size = 695355, hashes = { sha256 = "061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735" } }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:33Z, size = 657512, hashes = { sha256 = "44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337" } }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:23Z, size = 1612508, hashes = { sha256 = "2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269" } }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:25Z, size = 1680760, hashes = { sha256 = "015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681" } }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", upload-time = 2025-08-07T13:32:27Z, size = 303425, hashes = { sha256 = "e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01" } }, { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", upload-time = 2025-08-07T13:16:16Z, size = 269859, hashes = { sha256 = "b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c" } }, { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", upload-time = 2025-08-07T13:43:01Z, size = 627610, hashes = { sha256 = "27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d" } }, @@ -1009,6 +1019,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", upload-time = 2025-08-07T13:18:35Z, size = 582817, hashes = { sha256 = "c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df" } }, { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", upload-time = 2025-08-07T13:42:42Z, size = 1111985, hashes = { sha256 = "b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594" } }, { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", upload-time = 2025-08-07T13:18:26Z, size = 1136137, hashes = { sha256 = "81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98" } }, + { url = "https://files.pythonhosted.org/packages/4b/bf/7bd33643e48ed45dcc0e22572f650767832bd4e1287f97434943cc402148/greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", upload-time = 2025-11-04T12:42:27Z, size = 1542941, hashes = { sha256 = "28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10" } }, + { url = "https://files.pythonhosted.org/packages/9b/74/4bc433f91d0d09a1c22954a371f9df928cb85e72640870158853a83415e5/greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", upload-time = 2025-11-04T12:42:29Z, size = 1609685, hashes = { sha256 = "52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be" } }, { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", upload-time = 2025-08-07T14:02:20Z, size = 281400, hashes = { sha256 = "65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b" } }, { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", upload-time = 2025-08-07T13:56:34Z, size = 298533, hashes = { sha256 = "d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb" } }, ] diff --git a/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml b/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml index b23d4a4904..e806c2634d 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "skl2onnx~=1.19.1", # Required for skl2onnx, as upgraded version is not compatible with protobuf "onnxconverter-common~=1.13.0", - "codeflare-sdk~=0.32.0", + "codeflare-sdk~=0.32.1", "feast~=0.55.0", # DB connectors diff --git a/scripts/check-payload/config.toml b/scripts/check-payload/config.toml index 89d6caf39c..057ad9b9c8 100644 --- a/scripts/check-payload/config.toml +++ b/scripts/check-payload/config.toml @@ -170,6 +170,36 @@ dirs = ["/assets/downloads/cli"] error = "ErrGoMissingTag" dirs = ["/assets/downloads/cli"] +# AI Pipelines override (RHOAIENG-24702 and RHOAIENG-38121) +# When FIPS is enabled, /usr/bin/argoexec-fips is used. /usr/bin/argoexec is +# only used for portability reasons in non-FIPS environments. +[[payload.odh-data-science-pipelines-argo-argoexec-container.ignore]] +error = "ErrGoNotCgoEnabled" +files = ["/usr/bin/argoexec"] + +[[payload.odh-data-science-pipelines-argo-argoexec-container.ignore]] +error = "ErrGoMissingSymbols" +files = ["/usr/bin/argoexec"] + +[[payload.odh-data-science-pipelines-argo-argoexec-container.ignore]] +error = "ErrNotDynLinked" +files = ["/usr/bin/argoexec"] + +# AI Pipelines override (RHOAIENG-24702 and RHOAIENG-38121) +# When FIPS is enabled, /usr/bin/launcher-v2-fips is used. /usr/bin/launcher-v2 is +# only used for portability reasons in non-FIPS environments. +[[payload.odh-ml-pipelines-launcher-container.ignore]] +error = "ErrGoNotCgoEnabled" +files = ["/usr/bin/launcher-v2"] + +[[payload.odh-ml-pipelines-launcher-container.ignore]] +error = "ErrGoMissingSymbols" +files = ["/usr/bin/launcher-v2"] + +[[payload.odh-ml-pipelines-launcher-container.ignore]] +error = "ErrNotDynLinked" +files = ["/usr/bin/launcher-v2"] + # Temporary supprsssions for workbenches # https://github.com/openshift/check-payload/blob/main/internal/types/errors.go diff --git a/scripts/dockerfile_fragments.py b/scripts/dockerfile_fragments.py index e2b39829bb..4fca5be4f5 100755 --- a/scripts/dockerfile_fragments.py +++ b/scripts/dockerfile_fragments.py @@ -27,15 +27,19 @@ def main(): # Problem: The operation would result in removing the following protected packages: systemd # (try to add '--allowerasing' to command line to replace conflicting packages or '--skip-broken' to skip uninstallable packages) # Solution: --best --skip-broken does not work either, so use --nobest - RUN dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 \ - && dnf clean all -y + RUN /bin/bash <<'EOF' + set -Eeuxo pipefail + dnf -y upgrade --refresh --nobest --skip-broken --nodocs --noplugins --setopt=install_weak_deps=0 --setopt=keepcache=0 + dnf clean all -y + EOF + """), prefix="upgrade first to avoid fixable vulnerabilities", ) blockinfile( dockerfile, - textwrap.dedent('''RUN pip install --no-cache-dir -U "micropipenv[toml]==1.9.0" "uv==0.8.12"'''), + textwrap.dedent('''RUN pip install --no-cache-dir --extra-index-url https://pypi.org/simple -U "micropipenv[toml]==1.9.0" "uv==0.8.12"'''), prefix="Install micropipenv and uv to deploy packages from requirements.txt", ) @@ -43,10 +47,14 @@ def main(): blockinfile( dockerfile, textwrap.dedent(r""" - RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ - -o /tmp/openshift-client-linux.tar.gz && \ - tar -xzvf /tmp/openshift-client-linux.tar.gz oc && \ - rm -f /tmp/openshift-client-linux.tar.gz + RUN /bin/bash <<'EOF' + set -Eeuxo pipefail + curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/ocp/stable/openshift-client-linux.tar.gz \ + -o /tmp/openshift-client-linux.tar.gz + tar -xzvf /tmp/openshift-client-linux.tar.gz oc + rm -f /tmp/openshift-client-linux.tar.gz + EOF + """), prefix="Install the oc client", ) @@ -91,7 +99,10 @@ def blockinfile(filename: str | os.PathLike, contents: str, prefix: str | None = lines = original_lines[:] # NOTE: textwrap.dedent() with raw strings leaves leading and trailing newline - new_contents = contents.strip("\n").splitlines(keepends=True) + # we want to preserve the trailing one because HEREDOC has to have an empty trailing line for hadolint + new_contents = contents.lstrip("\n").splitlines(keepends=True) + if new_contents and new_contents[-1] == "\n": + new_contents = new_contents[:-1] if begin == end == -1: # add at the end if no markers found lines.append(f"\n{begin_marker}\n") @@ -126,6 +137,13 @@ def test_adding_new_block(self, fs: FakeFilesystem): assert fs.get_object("/config.txt").contents == "hello\nworld\n# begin\nkey=value\n# end\n" + def test_lastnewline_removal(self, fs: FakeFilesystem): + fs.create_file("/config.txt", contents="hello\nworld") + + blockinfile("/config.txt", "key=value\n\n") + + assert fs.get_object("/config.txt").contents == "hello\nworld\n# begin\nkey=value\n\n# end\n" + def test_updating_value_in_block(self, fs: FakeFilesystem): fs.create_file("/config.txt", contents="hello\nworld\n# begin\nkey=value1\n# end\n")