From de49b2f33ef62ea646487e0e1322ca3fce903c14 Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Wed, 14 Jan 2026 13:12:26 +0100 Subject: [PATCH 1/9] Add Ruby 4.0 --- .evergreen/config.yml | 217 +++++++++++++++++------------ .evergreen/config/axes.yml.erb | 20 ++- .evergreen/config/standard.yml.erb | 45 ++++-- 3 files changed, 174 insertions(+), 108 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index d64b5ba485..2dd5b967a2 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -777,6 +777,14 @@ axes: - id: "mongodb-version" display_name: MongoDB Version values: + - id: "latest" + display_name: "latest" + variables: + MONGODB_VERSION: "latest" + - id: "8.2" + display_name: "8.2" + variables: + MONGODB_VERSION: "8.2" - id: "8.0" display_name: "8.0" variables: @@ -914,10 +922,10 @@ axes: display_name: ruby-dev variables: RVM_RUBY: "ruby-dev" - - id: "ruby-3.5" - display_name: ruby-3.5 + - id: "ruby-4.0" + display_name: ruby-4.0 variables: - RVM_RUBY: "ruby-3.5" + RVM_RUBY: "ruby-4.0" - id: "ruby-3.4" display_name: ruby-3.4 variables: @@ -946,10 +954,10 @@ axes: display_name: ruby-2.7 variables: RVM_RUBY: "ruby-2.7" - - id: "jruby-10" - display_name: jruby-10 + - id: "jruby-10.0" + display_name: jruby-10.0 variables: - RVM_RUBY: "jruby-10" + RVM_RUBY: "jruby-10.0" - id: "jruby-9.4" display_name: jruby-9.4 variables: @@ -1172,8 +1180,8 @@ axes: buildvariants: - matrix_name: DriverBench matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: standalone os: ubuntu2204 display_name: DriverBench @@ -1183,41 +1191,45 @@ buildvariants: - matrix_name: "auth/ssl" matrix_spec: auth-and-ssl: ["auth-and-ssl", "noauth-and-nossl"] - ruby: "ruby-3.3" - mongodb-version: ["8.0", "7.0"] + ruby: "ruby-4.0" + mongodb-version: ["8.2", "8.0", "7.0"] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: ${auth-and-ssl} ${ruby} db-${mongodb-version} ${topology} + tags: ["pr"] tasks: - name: "run-main-test-suite" - matrix_name: "mongo-recent" matrix_spec: - ruby: ["ruby-3.3", "ruby-3.2", "jruby-9.4"] - mongodb-version: ["8.0", "7.0"] + ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "jruby-10.0"] + mongodb-version: ["8.2", "8.0", "7.0"] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - matrix_name: "mongo-8-arm" matrix_spec: - ruby: "ruby-3.3" + ruby: "ruby-4.0" mongodb-version: [ '8.0' ] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2404-arm display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - matrix_name: "mongo-5.x" matrix_spec: - ruby: ["ruby-3.3", "ruby-3.2"] + ruby: ["ruby-4.0", "ruby-3.4"] mongodb-version: ['5.0'] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2004 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -1228,89 +1240,97 @@ buildvariants: topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu1804 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - matrix_name: "single-lb" matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: load-balanced single-mongos: single-mongos os: ubuntu2204 display_name: "${mongodb-version} ${topology} single-lb ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: "mongo-api-version" matrix_spec: - ruby: "ruby-3.3" + ruby: "ruby-4.0" mongodb-version: '7.0' topology: standalone api-version-required: yes os: ubuntu2204 display_name: "${mongodb-version} api-version-required ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: "single-mongos" matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: "sharded-cluster" single-mongos: single-mongos os: ubuntu2204 display_name: "${mongodb-version} ${topology} single-mongos ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: CSOT matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: replica-set-single-node os: ubuntu2204 display_name: "CSOT - ${mongodb-version}" + tags: ["pr"] tasks: - name: test-csot - matrix_name: OTel matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: replica-set-single-node os: ubuntu2204 display_name: "OTel - ${mongodb-version}" + tags: ["pr"] tasks: - name: test-otel - matrix_name: "no-retry-reads" matrix_spec: retry-reads: no-retry-reads - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${retry-reads} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: "no-retry-writes" matrix_spec: retry-writes: no-retry-writes - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: [replica-set, sharded-cluster] os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${retry-writes} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: "lint" matrix_spec: lint: on - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${lint} ${ruby}" @@ -1320,8 +1340,8 @@ buildvariants: - matrix_name: "fork" matrix_spec: fork: on - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${topology} fork ${ruby}" @@ -1331,8 +1351,8 @@ buildvariants: - matrix_name: "solo" matrix_spec: solo: on - ruby: ["ruby-3.3", "ruby-3.2", "ruby-3.1"] - mongodb-version: "8.0" + ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] + mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${topology} solo ${ruby}" @@ -1353,19 +1373,20 @@ buildvariants: - matrix_name: "stress" matrix_spec: stress: on - ruby: "ruby-3.3" - mongodb-version: ["8.0", "7.0"] + ruby: "ruby-4.0" + mongodb-version: ["8.2", "8.0", "7.0"] topology: replica-set os: ubuntu2204 display_name: "${mongodb-version} ${topology} stress ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" # - matrix_name: "x509-tests" # matrix_spec: # auth-and-ssl: "x509" -# ruby: "ruby-3.3" -# mongodb-version: "8.0" +# ruby: "ruby-4.0" +# mongodb-version: "8.2" # topology: standalone # os: ubuntu2204 # display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" @@ -1375,35 +1396,38 @@ buildvariants: - matrix_name: "jruby-auth" matrix_spec: auth-and-ssl: [ "auth-and-ssl", "noauth-and-nossl" ] - ruby: jruby-9.4 - mongodb-version: "8.0" + ruby: jruby-10.0 + mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - - matrix_name: zlib-"ruby-3.3" + - matrix_name: zlib-"ruby-4.0" matrix_spec: auth-and-ssl: [ "auth-and-ssl", "noauth-and-nossl" ] - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: "replica-set" compressor: 'zlib' os: ubuntu2204 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - - matrix_name: snappy-"ruby-3.3" + - matrix_name: snappy-"ruby-4.0" matrix_spec: auth-and-ssl: [ "auth-and-ssl", "noauth-and-nossl" ] - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: "replica-set" compressor: 'snappy' os: ubuntu2204 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -1411,37 +1435,40 @@ buildvariants: # apparently a zstd-jni gem for JRuby that we could investigate here; if # this test is ever supported to support jruby, the `sample_mri_rubies` # reference should be replaced with `sample_rubies`. - - matrix_name: zstd-auth-"ruby-3.3" + - matrix_name: zstd-auth-"ruby-4.0" matrix_spec: auth-and-ssl: [ "auth-and-ssl", "noauth-and-nossl" ] - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: "replica-set" compressor: 'zstd' os: ubuntu2204 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" - - matrix_name: activesupport-"ruby-3.3" + - matrix_name: activesupport-"ruby-4.0" matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: replica-set as: as os: ubuntu2204 display_name: "AS ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - - matrix_name: bson-"ruby-3.3" + - matrix_name: bson-"ruby-4.0" matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: replica-set bson: "*" os: ubuntu2204 display_name: "bson-${bson} ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: zlib-"ruby-2.7" @@ -1453,6 +1480,7 @@ buildvariants: compressor: 'zlib' os: ubuntu2004 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -1465,6 +1493,7 @@ buildvariants: compressor: 'snappy' os: ubuntu2004 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -1481,6 +1510,7 @@ buildvariants: compressor: 'zstd' os: ubuntu2004 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -1492,6 +1522,7 @@ buildvariants: as: as os: ubuntu2004 display_name: "AS ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -1503,24 +1534,26 @@ buildvariants: bson: "*" os: ubuntu2004 display_name: "bson-${bson} ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" - matrix_name: "fle above 4.4" matrix_spec: auth-and-ssl: "noauth-and-nossl" - ruby: ["ruby-3.3", "ruby-3.2", "ruby-3.1"] + ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] topology: [replica-set, sharded-cluster] mongodb-version: [ '6.0', '7.0', '8.0' ] os: ubuntu2204 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-fle" # kerberos integration tests are broken (RUBY-3266) # - matrix_name: "kerberos-integration" # matrix_spec: - # ruby: ["ruby-3.3", "ruby-2.7", "jruby-9.4"] + # ruby: ["ruby-4.0", "ruby-2.7", "jruby-10.0"] # os: rhel8 # display_name: "Kerberos integration ${os} ${ruby}" # tasks: @@ -1528,8 +1561,8 @@ buildvariants: - matrix_name: "kerberos-unit" matrix_spec: - ruby: "ruby-3.3" - mongodb-version: "8.0" + ruby: "ruby-4.0" + mongodb-version: "8.2" topology: standalone os: ubuntu2204 auth-and-ssl: kerberos @@ -1556,11 +1589,12 @@ buildvariants: # auth-and-ssl: [ aws-regular, aws-assume-role, aws-ecs, aws-web-identity ] # https://jira.mongodb.org/browse/RUBY-3659 auth-and-ssl: [ aws-regular, aws-assume-role, aws-web-identity ] - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "AWS ${auth-and-ssl} ${mongodb-version} ${ruby}" + tags: ["pr"] tasks: - name: "test-aws-auth" @@ -1568,9 +1602,9 @@ buildvariants: matrix_spec: ocsp-verifier: true # No JRuby due to https://github.com/jruby/jruby-openssl/issues/210 - ruby: ["ruby-3.3", "ruby-3.2", "ruby-3.1"] + ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP verifier: ${mongodb-version} ${ruby}" tasks: @@ -1581,9 +1615,9 @@ buildvariants: ocsp-algorithm: ecdsa ocsp-must-staple: on ocsp-delegate: on - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - must staple: ${mongodb-version} ${ruby}" @@ -1594,9 +1628,9 @@ buildvariants: matrix_spec: ocsp-algorithm: rsa ocsp-status: unknown - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - unknown: ${mongodb-version} ${ruby}" @@ -1610,9 +1644,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "none" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1624,9 +1658,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "none" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1638,9 +1672,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: fail extra-uri-options: "none" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1652,9 +1686,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsInsecure=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1666,9 +1700,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsInsecure=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1680,9 +1714,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsInsecure=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1694,9 +1728,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsAllowInvalidCertificates=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1708,9 +1742,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsAllowInvalidCertificates=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1722,9 +1756,9 @@ buildvariants: ocsp-delegate: '*' ocsp-connectivity: pass extra-uri-options: "tlsAllowInvalidCertificates=true" - ruby: "ruby-3.3" + ruby: "ruby-4.0" topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: @@ -1742,9 +1776,9 @@ buildvariants: ocsp-status: [valid, unknown] ocsp-delegate: '*' ocsp-connectivity: pass - ruby: jruby-9.4 + ruby: jruby-10.0 topology: standalone - mongodb-version: "8.0" + mongodb-version: "8.2" os: ubuntu2204 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${mongodb-version} ${ruby}" tasks: @@ -1753,11 +1787,11 @@ buildvariants: # https://jira.mongodb.org/browse/RUBY-3540 #- matrix_name: testgcpkms-variant # matrix_spec: - # ruby: "ruby-3.3" + # ruby: "ruby-4.0" # fle: helper # topology: standalone # os: ubuntu2204 - # mongodb-version: "8.0" + # mongodb-version: "8.2" # display_name: "GCP KMS" # tasks: # - name: testgcpkms_task_group @@ -1778,17 +1812,18 @@ buildvariants: - matrix_name: atlas-full matrix_spec: - ruby: "ruby-3.3" - os: ubuntu2204 + ruby: "ruby-4.0" + os: ubuntu2404 display_name: "Atlas (Full)" tasks: - name: testatlas_full_task_group - matrix_name: "atlas" matrix_spec: - ruby: ["ruby-3.3", "ruby-3.2", "ruby-3.1"] + ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] os: ubuntu2204 display_name: "Atlas connectivity tests ${ruby}" + tags: ["pr"] tasks: - name: test-atlas diff --git a/.evergreen/config/axes.yml.erb b/.evergreen/config/axes.yml.erb index 3bd44c95ea..f1e247ed81 100644 --- a/.evergreen/config/axes.yml.erb +++ b/.evergreen/config/axes.yml.erb @@ -2,6 +2,14 @@ axes: - id: "mongodb-version" display_name: MongoDB Version values: + - id: "latest" + display_name: "latest" + variables: + MONGODB_VERSION: "latest" + - id: "8.2" + display_name: "8.2" + variables: + MONGODB_VERSION: "8.2" - id: "8.0" display_name: "8.0" variables: @@ -139,10 +147,10 @@ axes: display_name: ruby-dev variables: RVM_RUBY: "ruby-dev" - - id: "ruby-3.5" - display_name: ruby-3.5 + - id: "ruby-4.0" + display_name: ruby-4.0 variables: - RVM_RUBY: "ruby-3.5" + RVM_RUBY: "ruby-4.0" - id: "ruby-3.4" display_name: ruby-3.4 variables: @@ -171,10 +179,10 @@ axes: display_name: ruby-2.7 variables: RVM_RUBY: "ruby-2.7" - - id: "jruby-10" - display_name: jruby-10 + - id: "jruby-10.0" + display_name: jruby-10.0 variables: - RVM_RUBY: "jruby-10" + RVM_RUBY: "jruby-10.0" - id: "jruby-9.4" display_name: jruby-9.4 variables: diff --git a/.evergreen/config/standard.yml.erb b/.evergreen/config/standard.yml.erb index 6118d1bb99..49a9719cbf 100644 --- a/.evergreen/config/standard.yml.erb +++ b/.evergreen/config/standard.yml.erb @@ -3,28 +3,29 @@ # latest_ruby = the most recently released, stable version of Ruby # (make sure this version is being built by 10gen/mongo-ruby-toolchain) - latest_ruby = "ruby-3.3".inspect # so it gets quoted as a string + latest_ruby = "ruby-4.0".inspect # so it gets quoted as a string # these are used for testing against a few recent ruby versions - recent_rubies = %w( ruby-3.3 ruby-3.2 jruby-9.4 ) + recent_rubies = %w( ruby-4.0 ruby-3.4 ruby-3.3 jruby-10.0 ) - recent_mri_rubies = %w( ruby-3.3 ruby-3.2 ) + recent_mri_rubies = %w( ruby-4.0 ruby-3.4 ) # this is a list of the most recent 3.x and 2.x MRI ruby versions - sample_mri_rubies = %w( ruby-3.3 ruby-2.7 ) + sample_mri_rubies = %w( ruby-4.0 ruby-2.7 ) # as above, but including the most recent JRuby release - sample_rubies = sample_mri_rubies + %w( jruby-9.4 ) + sample_rubies = sample_mri_rubies + %w( jruby-10.0 ) # older Ruby versions provided by 10gen/mongo-ruby-toolchain older_rubies = %w( ruby-3.0 ruby-2.7 ) # all supported JRuby versions provided by 10gen/mongo-ruby-toolchain - jrubies = %w( jruby-9.4 jruby-9.3 ) + jrubies = %w( jruby-10.0 jruby-9.4 ) - supported_mri_rubies_3 = %w( ruby-3.3 ruby-3.2 ruby-3.1 ruby-3.0 ) + # We put 4.0 to rubies_3 so far + supported_mri_rubies_3 = %w( ruby-4.0 ruby-3.4 ruby-3.3 ruby-3.2 ruby-3.1 ruby-3.0 ) - supported_mri_rubies_3_ubuntu = %w( ruby-3.3 ruby-3.2 ruby-3.1 ) + supported_mri_rubies_3_ubuntu = %w( ruby-4.0 ruby-3.4 ruby-3.3 ruby-3.2 ruby-3.1 ) supported_mri_ruby_2 = "ruby-2.7".inspect @@ -33,10 +34,10 @@ jrubies # The latest stable version of MongoDB - latest_stable_mdb = "8.0".inspect # so it gets quoted as a string + latest_stable_mdb = "8.2".inspect # so it gets quoted as a string - recent_mdb = %w( 8.0 7.0 ) + recent_mdb = %w( 8.2 8.0 7.0 ) %> buildvariants: @@ -58,6 +59,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2204 display_name: ${auth-and-ssl} ${ruby} db-${mongodb-version} ${topology} + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -68,6 +70,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2204 display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -78,6 +81,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2404-arm display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -88,6 +92,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2004 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -98,6 +103,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu1804 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -109,6 +115,7 @@ buildvariants: single-mongos: single-mongos os: ubuntu2204 display_name: "${mongodb-version} ${topology} single-lb ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -120,6 +127,7 @@ buildvariants: api-version-required: yes os: ubuntu2204 display_name: "${mongodb-version} api-version-required ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -131,6 +139,7 @@ buildvariants: single-mongos: single-mongos os: ubuntu2204 display_name: "${mongodb-version} ${topology} single-mongos ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -141,6 +150,7 @@ buildvariants: topology: replica-set-single-node os: ubuntu2204 display_name: "CSOT - ${mongodb-version}" + tags: ["pr"] tasks: - name: test-csot @@ -151,6 +161,7 @@ buildvariants: topology: replica-set-single-node os: ubuntu2204 display_name: "OTel - ${mongodb-version}" + tags: ["pr"] tasks: - name: test-otel @@ -162,6 +173,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${retry-reads} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -173,6 +185,7 @@ buildvariants: topology: [replica-set, sharded-cluster] os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${retry-writes} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -228,6 +241,7 @@ buildvariants: topology: replica-set os: ubuntu2204 display_name: "${mongodb-version} ${topology} stress ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -250,6 +264,7 @@ buildvariants: topology: <%= topologies %> os: ubuntu2204 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -267,6 +282,7 @@ buildvariants: compressor: 'zlib' os: <%= distro %> display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -279,6 +295,7 @@ buildvariants: compressor: 'snappy' os: <%= distro %> display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -295,6 +312,7 @@ buildvariants: compressor: 'zstd' os: <%= distro %> display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" + tags: ["pr"] tasks: - name: "run-main-test-suite" @@ -306,6 +324,7 @@ buildvariants: as: as os: <%= distro %> display_name: "AS ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" @@ -317,6 +336,7 @@ buildvariants: bson: "*" os: <%= distro %> display_name: "bson-${bson} ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-mlaunch" <% end %> @@ -330,6 +350,7 @@ buildvariants: os: ubuntu2204 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" + tags: ["pr"] tasks: - name: "test-fle" # kerberos integration tests are broken (RUBY-3266) @@ -376,6 +397,7 @@ buildvariants: mongodb-version: <%= latest_stable_mdb %> os: ubuntu2204 display_name: "AWS ${auth-and-ssl} ${mongodb-version} ${ruby}" + tags: ["pr"] tasks: - name: "test-aws-auth" @@ -497,7 +519,7 @@ buildvariants: - matrix_name: atlas-full matrix_spec: ruby: <%= latest_ruby %> - os: ubuntu2204 + os: ubuntu2404 display_name: "Atlas (Full)" tasks: - name: testatlas_full_task_group @@ -507,6 +529,7 @@ buildvariants: ruby: <%= supported_mri_rubies_3_ubuntu %> os: ubuntu2204 display_name: "Atlas connectivity tests ${ruby}" + tags: ["pr"] tasks: - name: test-atlas From 310e73202f04566e45351477300c2f0b798aef8d Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Wed, 14 Jan 2026 14:14:16 +0100 Subject: [PATCH 2/9] Ubuntu 2404 to the rescue --- .evergreen/config.yml | 82 +++++++++++++++--------------- .evergreen/config/standard.yml.erb | 58 ++++++++++----------- gemfiles/standard.rb | 3 ++ 3 files changed, 73 insertions(+), 70 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 2dd5b967a2..f944557321 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1183,7 +1183,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: standalone - os: ubuntu2204 + os: ubuntu2404 display_name: DriverBench tasks: - name: "driver-bench" @@ -1194,7 +1194,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: ["8.2", "8.0", "7.0"] topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: ${auth-and-ssl} ${ruby} db-${mongodb-version} ${topology} tags: ["pr"] tasks: @@ -1205,7 +1205,7 @@ buildvariants: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "jruby-10.0"] mongodb-version: ["8.2", "8.0", "7.0"] topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1250,7 +1250,7 @@ buildvariants: mongodb-version: "8.2" topology: load-balanced single-mongos: single-mongos - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} single-lb ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1262,7 +1262,7 @@ buildvariants: mongodb-version: '7.0' topology: standalone api-version-required: yes - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} api-version-required ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1274,7 +1274,7 @@ buildvariants: mongodb-version: "8.2" topology: "sharded-cluster" single-mongos: single-mongos - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} single-mongos ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1285,7 +1285,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: replica-set-single-node - os: ubuntu2204 + os: ubuntu2404 display_name: "CSOT - ${mongodb-version}" tags: ["pr"] tasks: @@ -1296,7 +1296,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: replica-set-single-node - os: ubuntu2204 + os: ubuntu2404 display_name: "OTel - ${mongodb-version}" tags: ["pr"] tasks: @@ -1308,7 +1308,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${retry-reads} ${ruby}" tags: ["pr"] tasks: @@ -1320,7 +1320,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: [replica-set, sharded-cluster] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${retry-writes} ${ruby}" tags: ["pr"] tasks: @@ -1332,7 +1332,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${lint} ${ruby}" tasks: - name: "test-mlaunch" @@ -1343,7 +1343,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} fork ${ruby}" tasks: - name: "test-mlaunch" @@ -1354,7 +1354,7 @@ buildvariants: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} solo ${ruby}" tasks: - name: "test-mlaunch" @@ -1376,7 +1376,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: ["8.2", "8.0", "7.0"] topology: replica-set - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} stress ${ruby}" tags: ["pr"] tasks: @@ -1388,7 +1388,7 @@ buildvariants: # ruby: "ruby-4.0" # mongodb-version: "8.2" # topology: standalone -# os: ubuntu2204 +# os: ubuntu2404 # display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" # tasks: # - name: "test-mlaunch" @@ -1399,7 +1399,7 @@ buildvariants: ruby: jruby-10.0 mongodb-version: "8.2" topology: ["standalone", "replica-set", "sharded-cluster"] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1412,7 +1412,7 @@ buildvariants: mongodb-version: "8.2" topology: "replica-set" compressor: 'zlib' - os: ubuntu2204 + os: ubuntu2404 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1425,7 +1425,7 @@ buildvariants: mongodb-version: "8.2" topology: "replica-set" compressor: 'snappy' - os: ubuntu2204 + os: ubuntu2404 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1442,7 +1442,7 @@ buildvariants: mongodb-version: "8.2" topology: "replica-set" compressor: 'zstd' - os: ubuntu2204 + os: ubuntu2404 display_name: "${compressor} ${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -1454,7 +1454,7 @@ buildvariants: mongodb-version: "8.2" topology: replica-set as: as - os: ubuntu2204 + os: ubuntu2404 display_name: "AS ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] tasks: @@ -1466,7 +1466,7 @@ buildvariants: mongodb-version: "8.2" topology: replica-set bson: "*" - os: ubuntu2204 + os: ubuntu2404 display_name: "bson-${bson} ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] tasks: @@ -1544,7 +1544,7 @@ buildvariants: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] topology: [replica-set, sharded-cluster] mongodb-version: [ '6.0', '7.0', '8.0' ] - os: ubuntu2204 + os: ubuntu2404 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] @@ -1564,7 +1564,7 @@ buildvariants: ruby: "ruby-4.0" mongodb-version: "8.2" topology: standalone - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: kerberos display_name: "Kerberos Tests" tasks: @@ -1576,7 +1576,7 @@ buildvariants: # ruby: # topology: [replica-set, sharded-cluster] # mongodb-version: [ 'latest' ] -# os: ubuntu2204 +# os: ubuntu2404 # fle: helper # display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" # tasks: @@ -1592,7 +1592,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "AWS ${auth-and-ssl} ${mongodb-version} ${ruby}" tags: ["pr"] tasks: @@ -1605,7 +1605,7 @@ buildvariants: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP verifier: ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1618,7 +1618,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - must staple: ${mongodb-version} ${ruby}" tasks: @@ -1631,7 +1631,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - unknown: ${mongodb-version} ${ruby}" tasks: @@ -1647,7 +1647,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1661,7 +1661,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1675,7 +1675,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1689,7 +1689,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1703,7 +1703,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1717,7 +1717,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1731,7 +1731,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1745,7 +1745,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1759,7 +1759,7 @@ buildvariants: ruby: "ruby-4.0" topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1779,7 +1779,7 @@ buildvariants: ruby: jruby-10.0 topology: standalone mongodb-version: "8.2" - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -1790,7 +1790,7 @@ buildvariants: # ruby: "ruby-4.0" # fle: helper # topology: standalone - # os: ubuntu2204 + # os: ubuntu2404 # mongodb-version: "8.2" # display_name: "GCP KMS" # tasks: @@ -1821,7 +1821,7 @@ buildvariants: - matrix_name: "atlas" matrix_spec: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] - os: ubuntu2204 + os: ubuntu2404 display_name: "Atlas connectivity tests ${ruby}" tags: ["pr"] tasks: @@ -1831,7 +1831,7 @@ buildvariants: # - matrix_name: "aws-lambda" # matrix_spec: # ruby: 'ruby-3.2' -# os: ubuntu2204 +# os: ubuntu2404 # display_name: "AWS Lambda" # tasks: # - name: test_aws_lambda_task_group diff --git a/.evergreen/config/standard.yml.erb b/.evergreen/config/standard.yml.erb index 49a9719cbf..6b4781cd9b 100644 --- a/.evergreen/config/standard.yml.erb +++ b/.evergreen/config/standard.yml.erb @@ -46,7 +46,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: standalone - os: ubuntu2204 + os: ubuntu2404 display_name: DriverBench tasks: - name: "driver-bench" @@ -57,7 +57,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= recent_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: ${auth-and-ssl} ${ruby} db-${mongodb-version} ${topology} tags: ["pr"] tasks: @@ -68,7 +68,7 @@ buildvariants: ruby: <%= recent_rubies %> mongodb-version: <%= recent_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -113,7 +113,7 @@ buildvariants: mongodb-version: <%= latest_stable_mdb %> topology: load-balanced single-mongos: single-mongos - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} single-lb ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -125,7 +125,7 @@ buildvariants: mongodb-version: '7.0' topology: standalone api-version-required: yes - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} api-version-required ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -137,7 +137,7 @@ buildvariants: mongodb-version: <%= latest_stable_mdb %> topology: "sharded-cluster" single-mongos: single-mongos - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} single-mongos ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: @@ -148,7 +148,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: replica-set-single-node - os: ubuntu2204 + os: ubuntu2404 display_name: "CSOT - ${mongodb-version}" tags: ["pr"] tasks: @@ -159,7 +159,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: replica-set-single-node - os: ubuntu2204 + os: ubuntu2404 display_name: "OTel - ${mongodb-version}" tags: ["pr"] tasks: @@ -171,7 +171,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${retry-reads} ${ruby}" tags: ["pr"] tasks: @@ -183,7 +183,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: [replica-set, sharded-cluster] - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${retry-writes} ${ruby}" tags: ["pr"] tasks: @@ -195,7 +195,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${lint} ${ruby}" tasks: - name: "test-mlaunch" @@ -206,7 +206,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} fork ${ruby}" tasks: - name: "test-mlaunch" @@ -217,7 +217,7 @@ buildvariants: ruby: <%= supported_mri_rubies_3_ubuntu %> mongodb-version: <%= latest_stable_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} solo ${ruby}" tasks: - name: "test-mlaunch" @@ -239,7 +239,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= recent_mdb %> topology: replica-set - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} stress ${ruby}" tags: ["pr"] tasks: @@ -251,7 +251,7 @@ buildvariants: # ruby: <%= latest_ruby %> # mongodb-version: <%= latest_stable_mdb %> # topology: standalone -# os: ubuntu2204 +# os: ubuntu2404 # display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" # tasks: # - name: "test-mlaunch" @@ -262,14 +262,14 @@ buildvariants: ruby: <%= jrubies.first %> mongodb-version: <%= latest_stable_mdb %> topology: <%= topologies %> - os: ubuntu2204 + os: ubuntu2404 display_name: "${mongodb-version} ${topology} ${auth-and-ssl} ${ruby}" tags: ["pr"] tasks: - name: "run-main-test-suite" <% [ - [latest_ruby, latest_stable_mdb, 'ubuntu2204'], + [latest_ruby, latest_stable_mdb, 'ubuntu2404'], [supported_mri_ruby_2, '"6.0"', 'ubuntu2004'] ].each do |rubies, mdb, distro| %> @@ -347,7 +347,7 @@ buildvariants: ruby: <%= supported_mri_rubies_3_ubuntu %> topology: [replica-set, sharded-cluster] mongodb-version: [ '6.0', '7.0', '8.0' ] - os: ubuntu2204 + os: ubuntu2404 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] @@ -367,7 +367,7 @@ buildvariants: ruby: <%= latest_ruby %> mongodb-version: <%= latest_stable_mdb %> topology: standalone - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: kerberos display_name: "Kerberos Tests" tasks: @@ -379,7 +379,7 @@ buildvariants: # ruby: <%#= latest_ruby %> # topology: [replica-set, sharded-cluster] # mongodb-version: [ 'latest' ] -# os: ubuntu2204 +# os: ubuntu2404 # fle: helper # display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" # tasks: @@ -395,7 +395,7 @@ buildvariants: ruby: <%= latest_ruby %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 display_name: "AWS ${auth-and-ssl} ${mongodb-version} ${ruby}" tags: ["pr"] tasks: @@ -408,7 +408,7 @@ buildvariants: ruby: <%= supported_mri_rubies_3_ubuntu %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP verifier: ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -421,7 +421,7 @@ buildvariants: ruby: <%= latest_ruby %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - must staple: ${mongodb-version} ${ruby}" tasks: @@ -434,7 +434,7 @@ buildvariants: ruby: <%= latest_ruby %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 auth-and-ssl: noauth-and-ssl display_name: "OCSP integration - unknown: ${mongodb-version} ${ruby}" tasks: @@ -464,7 +464,7 @@ buildvariants: ruby: <%= latest_ruby %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${extra-uri-options} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -485,7 +485,7 @@ buildvariants: ruby: <%= jrubies.first %> topology: standalone mongodb-version: <%= latest_stable_mdb %> - os: ubuntu2204 + os: ubuntu2404 display_name: "OCSP connectivity: ${ocsp-algorithm} ${ocsp-status} ${ocsp-delegate} ${mongodb-version} ${ruby}" tasks: - name: test-mlaunch @@ -496,7 +496,7 @@ buildvariants: # ruby: <%= latest_ruby %> # fle: helper # topology: standalone - # os: ubuntu2204 + # os: ubuntu2404 # mongodb-version: <%= latest_stable_mdb %> # display_name: "GCP KMS" # tasks: @@ -527,7 +527,7 @@ buildvariants: - matrix_name: "atlas" matrix_spec: ruby: <%= supported_mri_rubies_3_ubuntu %> - os: ubuntu2204 + os: ubuntu2404 display_name: "Atlas connectivity tests ${ruby}" tags: ["pr"] tasks: @@ -537,7 +537,7 @@ buildvariants: # - matrix_name: "aws-lambda" # matrix_spec: # ruby: 'ruby-3.2' -# os: ubuntu2204 +# os: ubuntu2404 # display_name: "AWS Lambda" # tasks: # - name: test_aws_lambda_task_group diff --git a/gemfiles/standard.rb b/gemfiles/standard.rb index d03b9ec7f3..5c45d27260 100644 --- a/gemfiles/standard.rb +++ b/gemfiles/standard.rb @@ -41,6 +41,9 @@ def standard_dependencies gem 'ruby-debug-ide' end end + if RUBY_VERSION >= '3.4' + gem 'ostruct' + end end group :testing do From 3beae6a1cf3ff3cbc3dad21fd783d524d9f00ef1 Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Wed, 14 Jan 2026 17:18:51 +0100 Subject: [PATCH 3/9] Update download script --- .evergreen/config/standard.yml.erb | 4 ++-- .evergreen/find-python3.sh | 1 + .evergreen/mongosh_dl.py | 1 + .mod/drivers-evergreen-tools | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) create mode 120000 .evergreen/find-python3.sh create mode 120000 .evergreen/mongosh_dl.py diff --git a/.evergreen/config/standard.yml.erb b/.evergreen/config/standard.yml.erb index 6b4781cd9b..7b7724da0b 100644 --- a/.evergreen/config/standard.yml.erb +++ b/.evergreen/config/standard.yml.erb @@ -37,7 +37,7 @@ latest_stable_mdb = "8.2".inspect # so it gets quoted as a string - recent_mdb = %w( 8.2 8.0 7.0 ) + recent_mdb = %w( 8.2 8.0 ) %> buildvariants: @@ -122,7 +122,7 @@ buildvariants: - matrix_name: "mongo-api-version" matrix_spec: ruby: <%= latest_ruby %> - mongodb-version: '7.0' + mongodb-version: '8.0' topology: standalone api-version-required: yes os: ubuntu2404 diff --git a/.evergreen/find-python3.sh b/.evergreen/find-python3.sh new file mode 120000 index 0000000000..65027fd574 --- /dev/null +++ b/.evergreen/find-python3.sh @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/find-python3.sh \ No newline at end of file diff --git a/.evergreen/mongosh_dl.py b/.evergreen/mongosh_dl.py new file mode 120000 index 0000000000..9a234910ec --- /dev/null +++ b/.evergreen/mongosh_dl.py @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/mongosh_dl.py \ No newline at end of file diff --git a/.mod/drivers-evergreen-tools b/.mod/drivers-evergreen-tools index 9142b7055e..5514d6a1c1 160000 --- a/.mod/drivers-evergreen-tools +++ b/.mod/drivers-evergreen-tools @@ -1 +1 @@ -Subproject commit 9142b7055ea5940e59ad41c4b069376f867031da +Subproject commit 5514d6a1c1887693c898073aa31f5be98cab53ea From 9c446c39c80ed4035fcf8524d1b11ee9d81d40b2 Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 12:07:04 +0100 Subject: [PATCH 4/9] Fix failing specs --- .evergreen/config.yml | 12 ++++++------ .evergreen/config/standard.yml.erb | 4 ++-- spec/mongo/client_construction_spec.rb | 10 ++++++---- .../mongo/monitoring/event/cmap/pool_created_spec.rb | 2 +- 4 files changed, 15 insertions(+), 13 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index f944557321..4ae56097b0 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1192,7 +1192,7 @@ buildvariants: matrix_spec: auth-and-ssl: ["auth-and-ssl", "noauth-and-nossl"] ruby: "ruby-4.0" - mongodb-version: ["8.2", "8.0", "7.0"] + mongodb-version: ["8.2", "8.0"] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2404 display_name: ${auth-and-ssl} ${ruby} db-${mongodb-version} ${topology} @@ -1203,7 +1203,7 @@ buildvariants: - matrix_name: "mongo-recent" matrix_spec: ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "jruby-10.0"] - mongodb-version: ["8.2", "8.0", "7.0"] + mongodb-version: ["8.2", "8.0"] topology: ["standalone", "replica-set", "sharded-cluster"] os: ubuntu2404 display_name: "${mongodb-version} ${os} ${topology} ${auth-and-ssl} ${ruby}" @@ -1259,7 +1259,7 @@ buildvariants: - matrix_name: "mongo-api-version" matrix_spec: ruby: "ruby-4.0" - mongodb-version: '7.0' + mongodb-version: '8.0' topology: standalone api-version-required: yes os: ubuntu2404 @@ -1374,7 +1374,7 @@ buildvariants: matrix_spec: stress: on ruby: "ruby-4.0" - mongodb-version: ["8.2", "8.0", "7.0"] + mongodb-version: ["8.2", "8.0"] topology: replica-set os: ubuntu2404 display_name: "${mongodb-version} ${topology} stress ${ruby}" @@ -1543,8 +1543,8 @@ buildvariants: auth-and-ssl: "noauth-and-nossl" ruby: ["ruby-4.0", "ruby-3.4", "ruby-3.3", "ruby-3.2", "ruby-3.1"] topology: [replica-set, sharded-cluster] - mongodb-version: [ '6.0', '7.0', '8.0' ] - os: ubuntu2404 + mongodb-version: [ '6.0', '7.0', '8.0', '8.2' ] + os: ubuntu2204 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] diff --git a/.evergreen/config/standard.yml.erb b/.evergreen/config/standard.yml.erb index 7b7724da0b..600d321c33 100644 --- a/.evergreen/config/standard.yml.erb +++ b/.evergreen/config/standard.yml.erb @@ -346,8 +346,8 @@ buildvariants: auth-and-ssl: "noauth-and-nossl" ruby: <%= supported_mri_rubies_3_ubuntu %> topology: [replica-set, sharded-cluster] - mongodb-version: [ '6.0', '7.0', '8.0' ] - os: ubuntu2404 + mongodb-version: [ '6.0', '7.0', '8.0', '8.2' ] + os: ubuntu2204 fle: helper display_name: "FLE: ${mongodb-version} ${topology} ${ruby}" tags: ["pr"] diff --git a/spec/mongo/client_construction_spec.rb b/spec/mongo/client_construction_spec.rb index 48f4039b4a..e1331f1dea 100644 --- a/spec/mongo/client_construction_spec.rb +++ b/spec/mongo/client_construction_spec.rb @@ -1666,21 +1666,23 @@ require_no_linting it 'rejects bogus read preference as symbol' do + read = BSON::Document.new({ mode: :bogus }) expect do - new_local_client_nmio(SINGLE_CLIENT, read: { mode: :bogus }) + new_local_client_nmio(SINGLE_CLIENT, read: read) end.to raise_error( Mongo::Error::InvalidReadOption, - 'Invalid read preference value: {"mode"=>:bogus}: ' \ + "Invalid read preference value: #{read}: " \ 'mode bogus is not one of recognized modes' ) end it 'rejects bogus read preference as string' do + read = BSON::Document.new({ mode: 'bogus' }) expect do - new_local_client_nmio(SINGLE_CLIENT, read: { mode: 'bogus' }) + new_local_client_nmio(SINGLE_CLIENT, read: read) end.to raise_error( Mongo::Error::InvalidReadOption, - 'Invalid read preference value: {"mode"=>"bogus"}: mode bogus is not one of recognized modes' + "Invalid read preference value: #{read}: mode bogus is not one of recognized modes" ) end diff --git a/spec/mongo/monitoring/event/cmap/pool_created_spec.rb b/spec/mongo/monitoring/event/cmap/pool_created_spec.rb index 637c46ba18..cf9a1d980f 100644 --- a/spec/mongo/monitoring/event/cmap/pool_created_spec.rb +++ b/spec/mongo/monitoring/event/cmap/pool_created_spec.rb @@ -30,7 +30,7 @@ end it 'renders correctly' do - expect(event.summary).to eq("#3, :min_pool_size=>5} pool=0x#{pool.object_id}>") + expect(event.summary).to eq("#") end end end From 489638829b25d072be6414904351f1ad33faddc0 Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 14:21:30 +0100 Subject: [PATCH 5/9] Copy scripts --- .evergreen/download-mongodb.sh | 233 +++++- .evergreen/find-python3.sh | 295 +++++++- .evergreen/mongodl.py | 1259 +++++++++++++++++++++++++++++++- .evergreen/mongosh_dl.py | 212 +++++- gemfiles/standard.rb | 4 +- 5 files changed, 1996 insertions(+), 7 deletions(-) mode change 120000 => 100755 .evergreen/download-mongodb.sh mode change 120000 => 100755 .evergreen/find-python3.sh mode change 120000 => 100755 .evergreen/mongodl.py mode change 120000 => 100755 .evergreen/mongosh_dl.py diff --git a/.evergreen/download-mongodb.sh b/.evergreen/download-mongodb.sh deleted file mode 120000 index d0b2306cc8..0000000000 --- a/.evergreen/download-mongodb.sh +++ /dev/null @@ -1 +0,0 @@ -../.mod/drivers-evergreen-tools/.evergreen/download-mongodb.sh \ No newline at end of file diff --git a/.evergreen/download-mongodb.sh b/.evergreen/download-mongodb.sh new file mode 100755 index 0000000000..f5bce8a686 --- /dev/null +++ b/.evergreen/download-mongodb.sh @@ -0,0 +1,232 @@ +#!/usr/bin/env bash +# shellcheck shell=sh + +# This file is no longer used directly by drivers-evergreen-tools. +# If using this file to download mongodb binaries, you should consider instead using `mongodl.py` and `mongosh-dl.py`. +# If using this file for get_distro, use `get-distro.sh`. +set -o errexit # Exit the script with error if any of the commands fail + +get_distro () +{ + # shellcheck disable=SC3028 + _script_dir="$(dirname ${BASH_SOURCE:-$0})" + . ${_script_dir}/get-distro.sh +} + +# get_mongodb_download_url_for "linux-distro-version-architecture" "latest|44|42|40|36|34|32|30|28|26|24" "true|false" +# Sets EXTRACT to appropriate extract command +# Sets MONGODB_DOWNLOAD_URL to the appropriate download url +# Sets MONGO_CRYPT_SHARED_DOWNLOAD_URL to the corresponding URL to a crypt_shared library archive +get_mongodb_download_url_for () +{ + _DISTRO=$1 + _VERSION=$2 + _DEBUG=$3 + + EXTRACT="tar zxf" + EXTRACT_MONGOSH=$EXTRACT + + case "$_DEBUG" in + true) + _component="archive-debug" + ;; + *) + _component="archive" + ;; + esac + + case "$_DISTRO" in + darwin-*) + EXTRACT_MONGOSH="unzip -q" + ;; + windows32* | cygwin*-i686) + EXTRACT="/cygdrive/c/Progra~1/7-Zip/7z.exe x" + EXTRACT_MONGOSH="/cygdrive/c/Progra~1/7-Zip/7z.exe x" + ;; + windows64* | cygwin*-x86_64) + EXTRACT="/cygdrive/c/Progra~2/7-Zip/7z.exe x" + EXTRACT_MONGOSH="/cygdrive/c/Progra~2/7-Zip/7z.exe x" + ;; + # Windows on GitHub Actions + mingw64_nt-*-x86_64) + EXTRACT="7z.exe x" + EXTRACT_MONGOSH="7z.exe x" + ;; + esac + + # Get the download url for the latest MongoSH. + # shellcheck disable=SC3028 + _script_dir="$(dirname ${BASH_SOURCE:-$0})" + _python3=$(bash -c ". $_script_dir/find-python3.sh && ensure_python3 2>/dev/null") + MONGOSH_DOWNLOAD_URL=$($_python3 "${_script_dir}/mongosh_dl.py" --no-download | tr -d '\r') + + # Get the download url for MongoDB for the given version. + MONGODB_DOWNLOAD_URL="$($_python3 "${_script_dir}/mongodl.py" --version $_VERSION --component $_component --no-download | tr -d '\r')" + + if [ -z "$MONGODB_DOWNLOAD_URL" ]; then + echo "Unknown version: $_VERSION for $_DISTRO" + exit 1 + fi + + MONGO_CRYPT_SHARED_DOWNLOAD_URL=$($_python3 "${_script_dir}/mongodl.py" --version $_VERSION --component crypt_shared --no-download | tr -d '\r') + + echo "$MONGODB_DOWNLOAD_URL" +} + +# curl_retry emulates running curl with `--retry 5` and `--retry-all-errors`. +curl_retry () +{ + for i in 1 2 4 8 16; do + { curl --fail -sS --max-time 300 "$@" && return 0; } || sleep $i + done + return 1 +} + +# download_and_extract_package downloads a MongoDB server package. +download_and_extract_package () +{ + MONGODB_DOWNLOAD_URL=$1 + EXTRACT=$2 + + if [ -n "${MONGODB_BINARIES:-}" ]; then + cd "$(dirname "$(dirname "${MONGODB_BINARIES:?}")")" + else + cd $DRIVERS_TOOLS + fi + + echo "Installing server binaries..." + curl_retry "$MONGODB_DOWNLOAD_URL" --output mongodb-binaries.tgz + + $EXTRACT mongodb-binaries.tgz + echo "Installing server binaries... done." + + rm -f mongodb-binaries.tgz + mv mongodb* mongodb + chmod -R +x mongodb + # Clear the environment to avoid "find: The environment is too large for exec()" + # error on Windows. + env -i PATH="$PATH" find . -name vcredist_x64.exe -exec {} /install /quiet \; + echo "MongoDB server version: $(./mongodb/bin/mongod --version)" + cd - +} + +download_and_extract_mongosh () +{ + MONGOSH_DOWNLOAD_URL=$1 + EXTRACT_MONGOSH=${2:-"tar zxf"} + + if [ -z "$MONGOSH_DOWNLOAD_URL" ]; then + get_mongodb_download_url_for "$(get_distro)" latest false + fi + + if [ -n "${MONGODB_BINARIES:-}" ]; then + cd "$(dirname "$(dirname "${MONGODB_BINARIES:?}")")" + else + cd $DRIVERS_TOOLS + fi + + echo "Installing MongoDB shell..." + curl_retry $MONGOSH_DOWNLOAD_URL --output mongosh.tgz + $EXTRACT_MONGOSH mongosh.tgz + + rm -f mongosh.tgz + mv mongosh-* mongosh + mkdir -p mongodb/bin + mv mongosh/bin/* mongodb/bin + rm -rf mongosh + chmod -R +x mongodb/bin + echo "Installing MongoDB shell... done." + echo "MongoDB shell version: $(./mongodb/bin/mongosh --version)" + cd - +} + +# download_and_extract downloads a requested MongoDB server package. +# If the legacy shell is not included in the download, the legacy shell is also downloaded from the 5.0 package. +download_and_extract () +{ + MONGODB_DOWNLOAD_URL=$1 + EXTRACT=$2 + MONGOSH_DOWNLOAD_URL=$3 + EXTRACT_MONGOSH=$4 + + download_and_extract_package "$MONGODB_DOWNLOAD_URL" "$EXTRACT" + + if [ "$MONGOSH_DOWNLOAD_URL" ]; then + download_and_extract_mongosh "$MONGOSH_DOWNLOAD_URL" "$EXTRACT_MONGOSH" + fi + + if [ ! -z "${INSTALL_LEGACY_SHELL:-}" ] && [ ! -e $DRIVERS_TOOLS/mongodb/bin/mongo ] && [ ! -e $DRIVERS_TOOLS/mongodb/bin/mongo.exe ]; then + # The legacy mongo shell is not included in server downloads of 6.0.0-rc6 or later. Refer: SERVER-64352. + # Some test scripts use the mongo shell for setup. + # Download 5.0 package to get the legacy mongo shell as a workaround until DRIVERS-2328 is addressed. + echo "Legacy 'mongo' shell not detected." + echo "Download legacy shell from 5.0 ... begin" + # Use a subshell to avoid overwriting MONGODB_DOWNLOAD_URL and MONGO_CRYPT_SHARED_DOWNLOAD_URL. + MONGODB50_DOWNLOAD_URL=$( + get_mongodb_download_url_for "$DISTRO" "5.0" > /dev/null + echo "$MONGODB_DOWNLOAD_URL" + ) + + SAVED_DRIVERS_TOOLS=$DRIVERS_TOOLS + mkdir $DRIVERS_TOOLS/legacy-shell-download + DRIVERS_TOOLS=$DRIVERS_TOOLS/legacy-shell-download + download_and_extract_package "$MONGODB50_DOWNLOAD_URL" "$EXTRACT" + if [ -e $DRIVERS_TOOLS/mongodb/bin/mongo ]; then + cp $DRIVERS_TOOLS/mongodb/bin/mongo $SAVED_DRIVERS_TOOLS/mongodb/bin + elif [ -e $DRIVERS_TOOLS/mongodb/bin/mongo.exe ]; then + cp $DRIVERS_TOOLS/mongodb/bin/mongo.exe $SAVED_DRIVERS_TOOLS/mongodb/bin + fi + DRIVERS_TOOLS=$SAVED_DRIVERS_TOOLS + rm -rf $DRIVERS_TOOLS/legacy-shell-download + echo "Download legacy shell from 5.0 ... end" + fi + + # Define SKIP_CRYPT_SHARED=1 to skip downloading crypt_shared. This is useful for platforms that have a + # server release but don't ship a corresponding crypt_shared release, like Amazon 2018. + if [ -z "${SKIP_CRYPT_SHARED:-}" ]; then + if [ -z "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" ]; then + echo "There is no crypt_shared library for distro='$DISTRO' and version='$MONGODB_VERSION'". + else + echo "Downloading crypt_shared package from $MONGO_CRYPT_SHARED_DOWNLOAD_URL" + download_and_extract_crypt_shared "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" "$EXTRACT" CRYPT_SHARED_LIB_PATH + echo "CRYPT_SHARED_LIB_PATH:" $CRYPT_SHARED_LIB_PATH + if [ -z $CRYPT_SHARED_LIB_PATH ]; then + echo "CRYPT_SHARED_LIB_PATH must be assigned, but wasn't" 1>&2 # write to stderr" + exit 1 + fi + fi + fi +} + +# download_and_extract_crypt_shared downloads and extracts a crypt_shared package into the current directory. +# Use get_mongodb_download_url_for to get a MONGO_CRYPT_SHARED_DOWNLOAD_URL. +download_and_extract_crypt_shared () +{ + MONGO_CRYPT_SHARED_DOWNLOAD_URL=$1 + EXTRACT=$2 + __CRYPT_SHARED_LIB_PATH=${3:-CRYPT_SHARED_LIB_PATH} + rm -rf crypt_shared_download + mkdir crypt_shared_download + cd crypt_shared_download + + curl_retry $MONGO_CRYPT_SHARED_DOWNLOAD_URL --output crypt_shared-binaries.tgz + $EXTRACT crypt_shared-binaries.tgz + + LIBRARY_NAME="mongo_crypt_v1" + # Windows package includes .dll in 'bin' directory. + if [ -d ./bin ]; then + cp bin/$LIBRARY_NAME.* .. + else + cp lib/$LIBRARY_NAME.* .. + fi + cd .. + rm -rf crypt_shared_download + + RELATIVE_CRYPT_SHARED_LIB_PATH="$(find . -maxdepth 1 -type f \( -name "$LIBRARY_NAME.dll" -o -name "$LIBRARY_NAME.so" -o -name "$LIBRARY_NAME.dylib" \))" + ABSOLUTE_CRYPT_SHARED_LIB_PATH=$(pwd)/$(basename $RELATIVE_CRYPT_SHARED_LIB_PATH) + if [ "Windows_NT" = "$OS" ]; then + # If we're on Windows, convert the "cygdrive" path to Windows-style paths. + ABSOLUTE_CRYPT_SHARED_LIB_PATH=$(cygpath -m $ABSOLUTE_CRYPT_SHARED_LIB_PATH) + fi + eval $__CRYPT_SHARED_LIB_PATH=$ABSOLUTE_CRYPT_SHARED_LIB_PATH +} diff --git a/.evergreen/find-python3.sh b/.evergreen/find-python3.sh deleted file mode 120000 index 65027fd574..0000000000 --- a/.evergreen/find-python3.sh +++ /dev/null @@ -1 +0,0 @@ -../.mod/drivers-evergreen-tools/.evergreen/find-python3.sh \ No newline at end of file diff --git a/.evergreen/find-python3.sh b/.evergreen/find-python3.sh new file mode 100755 index 0000000000..fe8eba8702 --- /dev/null +++ b/.evergreen/find-python3.sh @@ -0,0 +1,294 @@ +#!/usr/bin/env bash +# +# find-python3.sh +# +# Usage: +# . /path/to/find-python3.sh +# +# This file defines the following utility functions: +# - is_python3 +# - is_venv_capable +# - is_virtualenv_capable +# - find_python3 +# These functions may be invoked from any working directory. + +if [ -z "$BASH" ]; then + echo "find-python3.sh must be run in a Bash shell!" 1>&2 + return 1 +fi + +# is_python3 +# +# Usage: +# is_python3 python +# is_python3 /path/to/python +# +# Parameters: +# "$1": The name or path of the python binary to test. +# +# Return 0 (true) if the given argument "$1" is a viable Python 3 binary. +# Return a non-zero value (false) otherwise. +# +# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null +# to silence these messages. +is_python3() ( + set -o errexit + set -o pipefail + + HERE="$(dirname "${BASH_SOURCE[0]}")" + + # Binary to use, e.g. "python". + local -r bin="${1:?'is_python3 requires a name or path of a python binary to test'}" + + # Binary must be executable. + command -V "$bin" &>/dev/null || return + + echo "- ${bin}" + + "$bin" "${HERE:?}/is_python3.py" || return +) 1>&2 + +# is_venv_capable +# +# Usage: +# is_venv_capable python +# is_venv_capable /path/to/python +# +# Parameters: +# "$1": The name or path of the python binary to test. +# +# Return 0 (true) if the given argument "$1" can successfully evaluate the +# command: +# "$1" -m venv venv +# and activate the created virtual environment. +# Return a non-zero value (false) otherwise. +# +# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null +# to silence these messages. +is_venv_capable() ( + set -o errexit + set -o pipefail + + local -r bin="${1:?'is_venv_capable requires a name or path of a python binary to test'}" + + # Use a temporary directory to avoid polluting the caller's environment. + local -r tmp="$(mktemp -d)" + trap 'rm -rf "$tmp"' EXIT + + if [[ "${OSTYPE:?}" == cygwin ]]; then + local -r real_path="$(cygpath -aw "$tmp")" || return + else + local -r real_path="$tmp" + fi + + "$bin" -m venv "$real_path" || return + + if [[ -f "$tmp/bin/activate" ]]; then + # shellcheck source=/dev/null + . "$tmp/bin/activate" + elif [[ -f "$tmp/Scripts/activate" ]]; then + # Workaround https://github.com/python/cpython/issues/76632: + # activate: line 3: $'\r': command not found + dos2unix -q "$tmp/Scripts/activate" || true + # shellcheck source=/dev/null + . "$tmp/Scripts/activate" + else + echo "Could not find an activation script in $tmp!" + return 1 + fi +) 1>&2 + +# is_virtualenv_capable +# +# Usage: +# is_virtualenv_capable python +# is_virtualenv_capable /path/to/python +# +# Parameters: +# "$1": The name or path of the python binary to test. +# +# Return 0 (true) if the given argument $1 can successfully evaluate the +# command: +# "$1" -m virtualenv -p "$1" venv +# and activate the created virtual environment. +# Return a non-zero value (false) otherwise. +# +# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null +# to silence these messages. +is_virtualenv_capable() ( + set -o errexit + set -o pipefail + + local -r bin="${1:?'is_virtualenv_capable requires a name or path of a python binary to test'}" + + # Use a temporary directory to avoid polluting the caller's environment. + local tmp + tmp="$(mktemp -d)" + trap 'rm -rf "$tmp"' EXIT + + local real_path + if [[ "${OSTYPE:?}" == cygwin ]]; then + real_path="$(cygpath -aw "$tmp")" || return + else + real_path="$tmp" + fi + + # -p: some old versions of virtualenv (e.g. installed on Debian 10) are buggy. + # Without -p, the created virtual environment may use the wrong Python binary + # (e.g. using a Python 2 binary even if it was created by a Python 3 binary). + "$bin" -m virtualenv -p "$bin" "$real_path" || return + + if [[ -f "$tmp/bin/activate" ]]; then + # shellcheck source=/dev/null + . "$tmp/bin/activate" + elif [[ -f "$tmp/Scripts/activate" ]]; then + # Workaround https://github.com/python/cpython/issues/76632: + # activate: line 3: $'\r': command not found + dos2unix -q "$tmp/Scripts/activate" || true + # shellcheck source=/dev/null + . "$tmp/Scripts/activate" + else + echo "Could not find an activation script in $tmp!" + return 1 + fi +) 1>&2 + +# find_python3 +# +# Usage: +# find_python3 +# PYTHON_BINARY=$(find_python3) +# PYTHON_BINARY=$(find_python3 2>/dev/null) +# +# Return 0 (true) if a Python 3 binary capable of creating a virtual environment +# with either venv or virtualenv can be found. +# Return a non-zero (false) value otherwise. +# +# If successful, print the name of the binary stdout (pipe 1). +# Otherwise, no output is printed to stdout (pipe 1). +# +# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null +# with `2>/dev/null` to silence these messages. +# +# Example: +# PYTHON_BINARY=$(find_python3) +# if [[ -z "$PYTHON_BINARY" ]]; then +# # Handle missing Python binary situation. +# fi +# +# if "$PYTHON_BINARY" -m venv -h; then +# "$PYTHON_BINARY" -m venv venv +# else +# "$PYTHON_BINARY" -m virtualenv -p "$PYTHON_BINARY" venv +# fi +find_python3() ( + set -o errexit + set -o pipefail + + # Prefer Python Toolchain Current version. + declare python_binary="" + case "${OSTYPE:?}" in + cygwin) + python_binary="C:/python/Current/python.exe" + ;; + darwin*) + python_binary="/Library/Frameworks/Python.Framework/Versions/Current/bin/python3" + ;; + *) + python_binary="/opt/python/Current/bin/python3" + ;; + esac + if is_python3 "${python_binary:?}"; then + echo "Using Python binary ${python_binary:?}" >&2 + echo "${python_binary:?}" + return + fi + + test_bins() { + local -r bin_path="${1:?'missing path'}" + shift + local -r pattern="${1:?'missing pattern'}" + shift + local -ar suffixes=("${@:?'missing suffixes'}") + + for dir in $(find "$bin_path" -maxdepth 1 -name "$pattern" -type d 2>/dev/null | sort -rV); do + for bin in "${suffixes[@]}"; do + if is_python3 "$dir/$bin"; then + echo "$dir/$bin" + return + fi + done + done + } + + # Look in other standard locations. + case "${OSTYPE:?}" in + cygwin) + # Python toolchain: C:/python/Python3X/bin/python + python_binary="$(test_bins "C:/python" "Python3[0-9]*" "python3.exe" "python.exe")" + ;; + darwin*) + # Standard location: /Library/Frameworks/Python.Framework/Versions/XXX/bin/python3 + python_binary="$(test_bins "/Library/Frameworks/Python.Framework/Versions/" "[0-9]*" "bin/python3")" + ;; + *) + # MongoDB toolchain: /opt/mongodbtoolchain/vX/bin/python + python_binary="$(test_bins "/opt/mongodbtoolchain" "v[0-9]*" "bin/python3" "bin/python")" + if [ -z "$python_binary" ]; then + # Python toolchain: /opt/python/3.X/bin/python + python_binary=$(test_bins "/opt/python" "3.[0-9]*" "bin/python3" "bin/python") + fi + ;; + esac + + # Fall back to looking for a system python. + if [ -z "${python_binary}" ]; then + if is_python3 "python3"; then + python_binary="python3" + elif is_python3 "python"; then + python_binary="python" + fi + fi + + # Handle success. + if [ -n "${python_binary}" ]; then + echo "Using Python binary ${python_binary:?}" >&2 + echo "${python_binary:?}" + return + else + echo "No valid pythons found!" >&2 + fi +) + +# +# Usage: +# ensure_python3 +# PYTHON_BINARY=$(ensure_python3) +# PYTHON_BINARY=$(ensure_python3 2>/dev/null) +# +# If successful, print the name of the binary stdout (pipe 1). +# Otherwise, no output is printed to stdout (pipe 1). +# +# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null +# with `2>/dev/null` to silence these messages. +# +# If DRIVERS_TOOLS_PYTHON is set, it will return that value. Otherwise +# it will fall back to using find_python3 to return a suitable value. +# +ensure_python3() { + # Use "$DRIVERS_TOOLS_PYTHON". + if command -v "${DRIVERS_TOOLS_PYTHON:-}" >/dev/null; then + echo "Using Python binary ${DRIVERS_TOOLS_PYTHON:?}" >&2 + echo "${DRIVERS_TOOLS_PYTHON:?}" + return + fi + + # Use find_python3. + declare python_binary="" + { + echo "Finding Python3 binary..." + python_binary="$(find_python3 2>/dev/null)" + echo "Finding Python3 binary... done." + } 1>&2 + echo "${python_binary}" +} diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py deleted file mode 120000 index ce13a359c1..0000000000 --- a/.evergreen/mongodl.py +++ /dev/null @@ -1 +0,0 @@ -../.mod/drivers-evergreen-tools/.evergreen/mongodl.py \ No newline at end of file diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py new file mode 100755 index 0000000000..aebaa5d06c --- /dev/null +++ b/.evergreen/mongodl.py @@ -0,0 +1,1258 @@ +#!/usr/bin/env python3 +""" +Download and extract MongoDB components. + +Can also be imported and used as a module. Refer: + +- class Cache - Manage, query, and use a cache +- class CacheDB - Manage and query a cache db +- func infer_target() - Infer the download target of the host OS +- func infer_arch() - Infer the architecture of the host OS +- user_caches_root() - Where programs should put their cache data +- default_cache_dir() - Default directory for mongodl cache data + +Use '--help' for more information. +""" + +import argparse +import enum +import hashlib +import json +import logging +import os +import platform +import re +import shutil +import sqlite3 +import ssl +import sys +import tarfile +import textwrap +import time +import urllib.error +import urllib.request +import warnings +import zipfile +from collections import namedtuple +from contextlib import contextmanager +from fnmatch import fnmatch +from pathlib import Path, PurePath, PurePosixPath +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Iterable, + Iterator, + NamedTuple, + Optional, + cast, +) + +LOGGER = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") + +SSL_CONTEXT = ssl.create_default_context() +try: + import certifi + + SSL_CONTEXT.load_verify_locations(certifi.where()) +except ImportError: + pass + +# These versions are used for performance benchmarking. Do not update to a newer version. +PERF_VERSIONS = {"v6.0-perf": "6.0.6", "v8.0-perf": "8.0.1"} + +#: Map common distribution names to the distribution named used in the MongoDB download list +DISTRO_ID_MAP = { + "elementary": "ubuntu", + "fedora": "rhel", + "centos": "rhel", + "mint": "ubuntu", + "linuxmint": "ubuntu", + "opensuse-leap": "sles", + "opensuse": "sles", + "pop": "ubuntu", + "redhat": "rhel", + "rocky": "rhel", +} + +#: Map derived distro versions to their base distribution versions +DISTRO_VERSION_MAP = { + "elementary": { + "6": "20.04", + "6.*": "20.04", + }, + "fedora": { + "32": "8", + "33": "8", + "34": "8", + "35": "8", + "36": "8", + }, + "linuxmint": { + "19": "18.04", + "19.*": "18.04", + "20": "20.04", + "20.*": "20.04", + "21": "22.04", + "21.*": "22.04", + }, + "pop": { + "20.04": "20.04", + "22.04": "22.04", + }, +} + +#: Map distribution IDs with version fnmatch() patterns to download platform targets +DISTRO_ID_TO_TARGET = { + "ubuntu": { + "24.*": "ubuntu2404", + "22.*": "ubuntu2204", + "20.*": "ubuntu2004", + "18.*": "ubuntu1804", + "16.*": "ubuntu1604", + "14.*": "ubuntu1404", + }, + "debian": { + "9": "debian92", + "10": "debian10", + "11": "debian11", + "12": "debian12", + }, + "rhel": { + "6": "rhel6", + "6.*": "rhel6", + "7": "rhel7", + "7.*": "rhel7", + "8": "rhel8", + "8.*": "rhel8", + "9": "rhel9", + "9.*": "rhel9", + }, + "sles": { + "10.*": "suse10", + "11.*": "suse11", + "12.*": "suse12", + "13.*": "suse13", + "15.*": "suse15", + }, + "amzn": { + "2023": "amazon2023", + "2018.*": "amzn64", + "2": "amazon2", + }, +} + +# The list of valid targets that are not related to a specific Linux distro. +TARGETS_THAT_ARE_NOT_DISTROS = ["linux_i686", "linux_x86_64", "osx", "macos", "windows"] + + +def infer_target(version: Optional[str] = None) -> str: + """ + Infer the download target of the current host system. + """ + if sys.platform == "win32": + return "windows" + if sys.platform == "darwin": + # Older versions of the server used 'osx' as the target. + if version is not None: + if version.startswith("4.0") or version[0] == "3": + return "osx" + return "macos" + # Now the tricky bit + cands = (Path(p) for p in ["/etc/os-release", "/usr/lib/os-release"]) + existing = (p for p in cands if p.is_file()) + found = next(iter(existing), None) + if found: + return infer_target_from_os_release(found) + raise RuntimeError( + "We don't know how to find the default '--target'" + " option for this system. Please contribute!" + ) + + +def infer_target_from_os_release(osr: Path) -> str: + """ + Infer the download target based on the content of os-release + """ + with osr.open("r", encoding="utf-8") as f: + os_rel = f.read() + # Extract the "ID" field + id_re = re.compile(r'\bID=("?)(.*)\1') + mat = id_re.search(os_rel) + assert mat, f"Unable to detect ID from [{osr}] content:\n{os_rel}" + os_id = mat.group(2) + if os_id == "arch": + # There are no Archlinux-specific MongoDB downloads, so we'll just use + # the build for RHEL8, which is reasonably compatible with other modern + # distributions (including Arch). + return "rhel80" + # Extract the "VERSION_ID" field + ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') + mat = ver_id_re.search(os_rel) + assert mat, f"Unable to detect VERSION_ID from [{osr}] content:\n{os_rel}" + ver_id = mat.group(2) + # Map the ID to the download ID + mapped_id = DISTRO_ID_MAP.get(os_id) + if mapped_id: + # Map the distro version to its upstream version + ver_mapper = DISTRO_VERSION_MAP.get(os_id, {}) + # Find the version based on a fnmatch pattern: + matching = (ver for pat, ver in ver_mapper.items() if fnmatch(ver_id, pat)) + # The default is to keep the version ID. + mapped_version = next(iter(matching), None) + if mapped_version is None: + # If this raises, a version/pattern needs to be added + # to DISTRO_VERSION_MAP + raise RuntimeError( + f"We don't know how to map {os_id} version '{ver_id}' " + f"to an upstream {mapped_id} version. Please contribute!" + ) + ver_id = mapped_version + os_id = mapped_id + os_id = os_id.lower() + if os_id not in DISTRO_ID_TO_TARGET: + raise RuntimeError( + f"We don't know how to map '{os_id}' to a distribution " + "download target. Please contribute!" + ) + # Find the download target based on a filename-style pattern: + ver_table = DISTRO_ID_TO_TARGET[os_id] + for pattern, target in ver_table.items(): + if fnmatch(ver_id, pattern): + return target + raise RuntimeError( + f"We don't know how to map '{os_id}' version '{ver_id}' to a distribution " + "download target. Please contribute!" + ) + + +def user_caches_root() -> Path: + """ + Obtain the directory for user-local caches + """ + if sys.platform == "win32": + return Path(os.environ["LocalAppData"]) + if sys.platform == "darwin": + return Path(os.environ["HOME"] + "/Library/Caches") + xdg_cache = os.getenv("XDG_CACHE_HOME") + if xdg_cache: + return Path(xdg_cache) + return Path(os.environ["HOME"] + "/.cache") + + +def default_cache_dir() -> Path: + """ + Get the path to the default directory of mongodl caches. + """ + return user_caches_root().joinpath("mongodl").absolute() + + +if TYPE_CHECKING: + DownloadResult = NamedTuple( + "DownloadResult", [("is_changed", bool), ("path", Path)] + ) + DownloadableComponent = NamedTuple( + "DownloadableComponent", + [ + ("version", str), + ("target", str), + ("arch", str), + ("edition", str), + ("key", str), + ("data_json", str), + ], + ) +else: + DownloadResult = namedtuple("DownloadResult", ["is_changed", "path"]) + DownloadableComponent = namedtuple( + "DownloadableComponent", + ["version", "target", "arch", "edition", "key", "data_json"], + ) + +#: Regular expression that matches the version numbers from 'full.json' +VERSION_RE = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:-([a-z]+)(\d+))?") +MAJOR_VERSION_RE = re.compile(r"(\d+)\.(\d+)$") +STABLE_MAX_RC = 9999 + + +def version_tup(version: str) -> "tuple[int, int, int, int, int]": + if MAJOR_VERSION_RE.match(version): + maj, min = version.split(".") + return tuple([int(maj), int(min), 0, 0, 0]) + + mat = VERSION_RE.match(version) + assert mat, f'Failed to parse "{version}" as a version number' + major, minor, patch, tag, tagnum = list(mat.groups()) + if tag is None: + # No rc tag is greater than an equal base version with any rc tag + tag = STABLE_MAX_RC + tagnum = 0 + else: + tag = { + "alpha": 1, + "beta": 2, + "rc": 3, + }[tag] + return tuple(map(int, (major, minor, patch, tag, tagnum))) + + +def collate_mdb_version(left: str, right: str) -> int: + lhs = version_tup(left) + rhs = version_tup(right) + if lhs < rhs: + return -1 + if lhs > rhs: + return 1 + return 0 + + +def mdb_version_not_rc(version: str) -> bool: + tup = version_tup(version) + return tup[-1] == STABLE_MAX_RC + + +def mdb_version_rapid(version: str) -> bool: + tup = version_tup(version) + return tup[1] > 0 + + +class DownloadRetrier: + """Class that handles retry logic. It performs exponential backoff with a maximum delay of 10 minutes between retry attempts.""" + + def __init__(self, retries: int) -> None: + self.retries = retries + self.attempt = 0 + assert self.retries >= 0 + + def retry(self) -> bool: + if self.attempt >= self.retries: + return False + self.attempt += 1 + LOGGER.warning( + f"Download attempt failed, retrying attempt {self.attempt} of {self.retries}" + ) + ten_minutes = 600 + time.sleep(min(2 ** (self.attempt - 1), ten_minutes)) + return True + + +class CacheDB: + """ + Abstract a mongodl cache SQLite database. + """ + + def __init__(self, db: sqlite3.Connection) -> None: + self._db = db + # Use a cursor to get access to lastrowid + self._cursor = self._db.cursor() + + @staticmethod + def open(fpath: Path) -> "CacheDB": + """ + Open a caching database at the given filepath. + """ + db = sqlite3.connect(str(fpath), isolation_level=None) + db.execute(r""" + CREATE TABLE IF NOT EXISTS mdl_http_downloads ( + url TEXT NOT NULL UNIQUE, + etag TEXT, + last_modified TEXT + )""") + db.create_collation("mdb_version", collate_mdb_version) + db.create_function("mdb_version_not_rc", 1, mdb_version_not_rc) + db.create_function("mdb_version_rapid", 1, mdb_version_rapid) + return CacheDB(db) + + def __call__( + self, query: str, **params: "str | int | bool | float | None" + ) -> "Iterable[sqlite3.Row]": + """ + Execute a query with the given named parameters. + """ + return self._cursor.execute(query, params) + + @contextmanager + def transaction(self) -> "Iterator[None]": + """ + Create a context for a database transaction. + """ + if self._db.in_transaction: + yield + return + + with self._db: + # Must do an explicit BEGIN because isolation_level=None + self("BEGIN") + yield + + def import_json_file(self, json_file: Path) -> None: + """ + Import the given downloads content from the given JSON file + """ + with json_file.open("r", encoding="utf-8") as f: + data = json.load(f) + self.import_json_data(data) + + def import_json_data(self, data: "Any") -> None: + """ + Import the given downloads content from the given JSON-like data + """ + with self.transaction(): + self._import_json_data(data) + + def _import_json_data(self, data: "Any") -> None: + # We're reloading everything, so just drop and re-create the tables. + # Bonus: We don't have to worry about schema changes + self("DROP TABLE IF EXISTS mdl_components") + self("DROP TABLE IF EXISTS mdl_downloads") + self("DROP TABLE IF EXISTS mdl_versions") + self(r""" + CREATE TABLE mdl_versions ( + version_id INTEGER PRIMARY KEY, + date TEXT NOT NULL, + version TEXT NOT NULL, + githash TEXT NOT NULL + ) + """) + self(r""" + CREATE TABLE mdl_downloads ( + download_id INTEGER PRIMARY KEY, + version_id INTEGER NOT NULL REFERENCES mdl_versions, + target TEXT NOT NULL, + arch TEXT NOT NULL, + edition TEXT NOT NULL, + ar_url TEXT NOT NULL, + ar_debug_url TEXT, + data TEXT NOT NULL + ) + """) + self(r""" + CREATE TABLE mdl_components ( + component_id INTEGER PRIMARY KEY, + key TEXT NOT NULL, + download_id INTEGER NOT NULL REFERENCES mdl_downloads, + data NOT NULL, + UNIQUE(key, download_id) + ) + """) + + for ver in data["versions"]: + version = ver["version"] + githash = ver["githash"] + date = ver["date"] + self( + r""" + INSERT INTO mdl_versions (date, version, githash) + VALUES (:date, :version, :githash) + """, + date=date, + version=version, + githash=githash, + ) + version_id = self._cursor.lastrowid + missing = set() + for dl in ver["downloads"]: + arch = dl.get("arch", "null") + target = dl.get("target", "null") + # Normalize RHEL target names to include just the major version. + if target.startswith("rhel") and len(target) == 6: + target = target[:-1] + found = False + for distro in DISTRO_ID_TO_TARGET.values(): + if target in list(distro.values()): + found = True + if not found and target not in TARGETS_THAT_ARE_NOT_DISTROS: + missing.add(target) + edition = dl["edition"] + ar_url = dl["archive"]["url"] + ar_debug_url = dl["archive"].get("debug_symbols") + self( + r""" + INSERT INTO mdl_downloads (version_id, + target, + arch, + edition, + ar_url, + ar_debug_url, + data) + VALUES (:version_id, + :target, + :arch, + :edition, + :ar_url, + :ar_debug_url, + :data) + """, + version_id=version_id, + target=target, + arch=arch, + edition=edition, + ar_url=ar_url, + ar_debug_url=ar_debug_url, + data=json.dumps(dl), + ) + dl_id = self._cursor.lastrowid + for key, data in dl.items(): + if "url" not in data: + # Some fields aren't downloadable items. Skip them + continue + self( + r""" + INSERT INTO mdl_components (key, download_id, data) + VALUES (:key, :dl_id, :data) + """, + key=key, + dl_id=dl_id, + data=json.dumps(data), + ) + if missing: + LOGGER.error("Missing targets in DISTRO_ID_TO_TARGET:") + for item in missing: + LOGGER.error(f" - {item}") + if os.environ.get("VALIDATE_DISTROS") == "1": + sys.exit(1) + + def iter_available( + self, + *, + version: "str | None" = None, + target: "str | None" = None, + arch: "str | None" = None, + edition: "str | None" = None, + component: "str | None" = None, + ) -> "Iterable[DownloadableComponent]": + """ + Iterate over the matching downloadable components according to the + given attribute filters. + """ + rows = self( + r""" + SELECT version, target, arch, edition, key, mdl_components.data + FROM mdl_components, + mdl_downloads USING(download_id), + mdl_versions USING(version_id) + WHERE (:component IS NULL OR key=:component) + AND (:target IS NULL OR target=:target) + AND (:arch IS NULL OR arch=:arch) + AND (:edition IS NULL OR edition=:edition) + AND ( + CASE + WHEN :version='latest-release' + THEN 1 + WHEN :version='latest-stable' + THEN mdb_version_not_rc(version) + WHEN :version='rapid' + THEN mdb_version_rapid(version) + WHEN :version IS NULL + THEN 1 + ELSE version=:version OR version LIKE :version_pattern + END) + ORDER BY version COLLATE mdb_version DESC + """, + version=version, + version_pattern=f"{version}.%", + target=target, + arch=arch, + edition=edition, + component=component, + ) + for row in rows: + yield DownloadableComponent(*row) # type: ignore + + +class Cache: + """ + Abstraction over a mongodl downloads cache directory. + """ + + def __init__(self, dirpath: Path, db: CacheDB) -> None: + self._dirpath = dirpath + self._db = db + + @staticmethod + def open_default() -> "Cache": + """ + Open the default user-local cache directory + """ + return Cache.open_in(default_cache_dir()) + + @staticmethod + def open_in(dirpath: Path) -> "Cache": + """ + Open or create a cache directory at the given path. + """ + _mkdir(dirpath) + db = CacheDB.open(dirpath / "data.db") + return Cache(dirpath, db) + + @property + def db(self): + """The backing cache database""" + return self._db + + def download_file(self, url: str) -> DownloadResult: + """ + Obtain a local copy of the file at the given URL. + """ + info = self._db( + "SELECT etag, last_modified " "FROM mdl_http_downloads WHERE url=:url", + url=url, + ) + etag = None # type: str|None + modtime = None # type: str|None + etag, modtime = next(iter(info), (None, None)) # type: ignore + headers = {} # type: dict[str, str] + if etag: + headers["If-None-Match"] = etag + if modtime: + headers["If-Modified-Since"] = modtime + digest = hashlib.sha256(url.encode("utf-8")).hexdigest()[:4] + file_name = PurePosixPath(url).name + dest = self._dirpath / "files" / digest / file_name + if not dest.exists(): + headers = {} + req = urllib.request.Request(url, headers=headers) + + try: + resp = urllib.request.urlopen(req, context=SSL_CONTEXT, timeout=30) + except urllib.error.HTTPError as e: + if e.code != 304: + raise RuntimeError(f"Failed to download [{url}]") from e + assert dest.is_file(), ( + "The download cache is missing an expected file", + dest, + ) + LOGGER.info("Using cached file %s", file_name) + return DownloadResult(False, dest) + + _mkdir(dest.parent) + got_etag = resp.getheader("ETag") + got_modtime = resp.getheader("Last-Modified") + got_len = int(resp.getheader("Content-Length")) + with dest.open("wb") as of: + shutil.copyfileobj(resp, of, length=got_len) + file_size = dest.stat().st_size + if file_size != got_len: + raise RuntimeError( + f"File size: {file_size} does not match download size: {got_len}" + ) + self._db( + "INSERT OR REPLACE INTO mdl_http_downloads (url, etag, last_modified) " + "VALUES (:url, :etag, :mtime)", + url=url, + etag=got_etag, + mtime=got_modtime, + ) + return DownloadResult(True, dest) + + def refresh_full_json(self) -> None: + """ + Sync the content of the MongoDB full.json downloads list. + """ + default_source = "https://downloads.mongodb.org/full.json" + download_source = os.environ.get("MONGODB_DOWNLOAD_SOURCE", default_source) + with self._db.transaction(): + dl = self.download_file(download_source) + if not dl.is_changed: + # We still have a good cache + return + self._db.import_json_file(dl.path) + + +def _mkdir(dirpath: Path) -> None: + """ + Ensure a directory at ``dirpath``, and all parent directories thereof. + + (Cannot using Path.mkdir(parents, exist_ok) on some Python versions that + we need to support.) + """ + if dirpath.is_dir(): + return + par = dirpath.parent + if par != dirpath: + _mkdir(par) + try: + dirpath.mkdir() + except FileExistsError: + pass + + +def _print_list( + db: CacheDB, + version: "str | None", + target: "str | None", + arch: "str | None", + edition: "str | None", + component: "str | None", +): + if version or target or arch or edition or component: + counter = 0 + matching = db.iter_available( + version=version, + target=target, + arch=arch, + edition=edition, + component=component, + ) + for version, target, arch, edition, comp_key, comp_data in matching: + counter += 1 + print( + f"Download: {comp_key}\n" + f" Version: {version}\n" + f" Target: {target}\n" + f" Arch: {arch}\n" + f" Edition: {edition}\n" + f" Info: {comp_data}\n\n" + ) + if counter == 1: + print("Only one matching item") + elif counter == 0: + print("No items matched the listed filters") + else: + print(f"{counter} available downloadable components") + print("(Omit filter arguments for a list of available filters)") + return + + tup = next( + iter( # type: ignore + db(r""" + VALUES( + (select group_concat(arch, ', ') from (select distinct arch from mdl_downloads)), + (select group_concat(target, ', ') from (select distinct target from mdl_downloads)), + (select group_concat(edition, ', ') from (select distinct edition from mdl_downloads)), + (select group_concat(version, ', ') from ( + select distinct version from mdl_versions + ORDER BY version COLLATE mdb_version)), + (select group_concat(key, ', ') from (select distinct key from mdl_components)) + ) + """) + ) + ) # type: tuple[str, str, str, str, str] + arches, targets, editions, versions, components = tup + if "archive" in components: + components = components.split(", ") + components.append("archive-debug") + components = ", ".join(sorted(components)) + versions = "\n".join( + textwrap.wrap(versions, width=78, initial_indent=" ", subsequent_indent=" ") + ) + targets = "\n".join( + textwrap.wrap(targets, width=78, initial_indent=" ", subsequent_indent=" ") + ) + print( + "Architectures:\n" + f" {arches}\n" + "Targets:\n" + f"{targets}\n" + "Editions:\n" + f" {editions}\n" + "Versions:\n" + f"{versions}\n" + "Components:\n" + f" {components}\n" + ) + + +def infer_arch(): + a = platform.machine() or platform.processor() + # Remap platform names to the names used for downloads + return { + "AMD64": "x86_64", + }.get(a, a) + + +class ExpandResult(enum.Enum): + Empty = 0 + "No files were/would be extracted" + Okay = 1 + "One or more files were/would be extracted" + + +def _published_build_url( + cache: Cache, version: str, target: str, arch: str, edition: str, component: str +) -> tuple[str, str]: + """ + Get the URL and SHASUM for a "published" build (that is: a build that was published in full.json) + """ + value = "url" + if component == "archive-debug": + component = "archive" + value = "debug_symbols" + matching = cache.db.iter_available( + version=version, target=target, arch=arch, edition=edition, component=component + ) + tup = next(iter(matching), None) + if tup is None: + raise ValueError( + "No download was found for " + f'version="{version}" target="{target}" arch="{arch}" edition="{edition}" component="{component}"' + ) + data = json.loads(tup.data_json) + return data[value], data["sha256"] + + +def _latest_build_url( + cache: Cache, + target: str, + arch: str, + edition: str, + component: str, + branch: "str|None", +) -> str: + """ + Get the URL for an "unpublished" "latest" build. + + These builds aren't published in a JSON manifest, so we have to form the URL + according to the user's parameters. We might fail to download a build if + there is no matching file. + """ + # Normalize the filename components based on the download target + platform = { + "windows": "windows", + "win32": "win32", + "macos": "osx", + }.get(target, "linux") + typ = { + "windows": "windows", + "win32": "win32", + "macos": "macos", + }.get(target, "linux") + component_name = { + "archive": "mongodb", + "crypt_shared": "mongo_crypt_shared_v1", + }.get(component, component) + base = f"https://downloads.10gen.com/{platform}" + # Windows has Zip files + ext = "zip" if target == "windows" else "tgz" + # Enterprise builds have an "enterprise" infix + ent_infix = "enterprise-" if edition == "enterprise" else "" + if "rhel" in target: + # Some RHEL targets include a minor version, like "rhel93". Check the URL of the latest release. + latest_release_url, _ = _published_build_url( + cache, "latest-release", target, arch, edition, component + ) + got = re.search(r"rhel[0-9][0-9]", latest_release_url) + if got is not None: + # Rewrite target like "rhel9" to "rhel93" to match published URL. + target = got.group(0) + # Some platforms have a filename infix + tgt_infix = (target + "-") if target not in ("windows", "win32", "macos") else "" + # Non-master branch uses a filename infix + br_infix = (branch + "-") if (branch is not None and branch != "master") else "" + filename = ( + f"{component_name}-{typ}-{arch}-{ent_infix}{tgt_infix}{br_infix}latest.{ext}" + ) + return f"{base}/{filename}" + + +def _dl_component( + cache: Cache, + out_dir: Path, + version: str, + target: str, + arch: str, + edition: str, + component: str, + pattern: "str | None", + strip_components: int, + test: bool, + no_download: bool, + latest_build_branch: "str|None", + retries: int, +) -> ExpandResult: + LOGGER.info(f"Download {component} {version}-{edition} for {target}-{arch}") + if version in ("latest-build", "latest"): + dl_url = _latest_build_url( + cache, target, arch, edition, component, latest_build_branch + ) + sha256 = None + else: + try: + dl_url, sha256 = _published_build_url( + cache, version, target, arch, edition, component + ) + except ValueError: + if component == "crypt_shared" and version != "latest-release": + warnings.warn( + "No matching version of crypt_shared found, using 'latest-release'", + stacklevel=2, + ) + version = "latest-release" + # The target will be macos on latest-release. + if target == "osx": + target = "macos" + else: + raise + dl_url, sha256 = _published_build_url( + cache, version, target, arch, edition, component + ) + + # This must go to stdout to be consumed by the calling program. + print(dl_url) + + LOGGER.info("Download url: %s", dl_url) + + if no_download: + return None + + retrier = DownloadRetrier(retries) + while True: + try: + cached = cache.download_file(dl_url).path + if sha256 is not None and not _check_shasum256(cached, sha256): + raise ValueError("Incorrect shasum256 for %s", cached) + return _expand_archive( + cached, out_dir, pattern, strip_components, test=test + ) + except Exception as e: + LOGGER.exception(e) + if not retrier.retry(): + raise + + +def _check_shasum256(filename, shasum): + """Check if the file with the name "filename" matches the SHA-256 sum + in "shasum".""" + h = hashlib.sha256() + # This will raise an exception if the file doesn't exist. Catching + # and handling it is left as an exercise for the reader. + with open(filename, "rb") as fh: + # Read and hash the file in 4K chunks. Reading the whole + # file at once might consume a lot of memory if it is + # large. + while True: + data = fh.read(4096) + if len(data) == 0: + break + h.update(data) + return shasum == h.hexdigest() + + +def _pathjoin(items: "Iterable[str]") -> PurePath: + """ + Return a path formed by joining the given path components + """ + return PurePath("/".join(items)) + + +def _test_pattern(path: PurePath, pattern: "PurePath | None") -> bool: + """ + Test whether the given 'path' string matches the globbing pattern 'pattern'. + + Supports the '**' pattern to match any number of intermediate directories. + """ + if pattern is None: + return True + # Split pattern into parts + pattern_parts = pattern.parts + if not pattern_parts: + # An empty pattern always matches + return True + path_parts = path.parts + if not path_parts: + # Non-empty pattern requires more path components + return False + pattern_head = pattern_parts[0] + pattern_tail = _pathjoin(pattern_parts[1:]) + if pattern_head == "**": + # Special "**" pattern matches any suffix of the path + # Generate each suffix: + tails = (path_parts[i:] for i in range(len(path_parts))) + # Test if any of the suffixes match the remainder of the pattern: + return any(_test_pattern(_pathjoin(t), pattern_tail) for t in tails) + if not fnmatch(path.parts[0], pattern_head): + # Leading path component cannot match + return False + # The first component matches. Test the remainder: + return _test_pattern(_pathjoin(path_parts[1:]), pattern_tail) + + +def _expand_archive( + ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool +) -> ExpandResult: + """ + Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, + only extracts members that match the pattern. + """ + LOGGER.debug(f"Extract from: [{ar.name}]") + LOGGER.debug(f" into: [{dest}]") + if ar.suffix == ".zip": + n_extracted = _expand_zip(ar, dest, pattern, strip_components, test=test) + elif ar.suffix == ".tgz": + n_extracted = _expand_tgz(ar, dest, pattern, strip_components, test=test) + else: + raise RuntimeError("Unknown archive file extension: " + ar.suffix) + verb = "would be" if test else "were" + if n_extracted == 0: + if pattern and strip_components: + LOGGER.warning( + f"NOTE: No files {verb} extracted. Likely all files {verb} " + f'excluded by "--only={pattern}" and/or "--strip-components={strip_components}"' + ) + elif pattern: + LOGGER.warning( + f"NOTE: No files {verb} extracted. Likely all files {verb} " + f'excluded by the "--only={pattern}" filter' + ) + elif strip_components: + LOGGER.warning( + f"NOTE: No files {verb} extracted. Likely all files {verb} " + f'excluded by "--strip-components={strip_components}"' + ) + else: + LOGGER.warning(f"NOTE: No files {verb} extracted. Empty archive?") + return ExpandResult.Empty + if n_extracted == 1: + LOGGER.info(f"One file {verb} extracted") + return ExpandResult.Okay + LOGGER.info(f"{n_extracted} files {verb} extracted") + return ExpandResult.Okay + + +def _expand_tgz( + ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool +) -> int: + "Expand a tar.gz archive" + n_extracted = 0 + with tarfile.open(str(ar), "r:*") as tf: + for mem in tf.getmembers(): + n_extracted += _maybe_extract_member( + dest, + PurePath(mem.name), + pattern, + strip_components, + mem.isdir(), + lambda: cast("IO[bytes]", tf.extractfile(mem)), # noqa: B023 + mem.mode | 0o222, # make sure file is writable + test=test, + ) + return n_extracted + + +def _expand_zip( + ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool +) -> int: + "Expand a .zip archive." + n_extracted = 0 + with zipfile.ZipFile(str(ar), "r") as zf: + for item in zf.infolist(): + n_extracted += _maybe_extract_member( + dest, + PurePath(item.filename), + pattern, + strip_components, + item.filename.endswith("/"), ## Equivalent to: item.is_dir(), + lambda: zf.open(item, "r"), # noqa: B023 + 0o777, + test=test, + ) + return n_extracted + + +def _maybe_extract_member( + out: Path, + relpath: PurePath, + pattern: "str | None", + strip: int, + is_dir: bool, + opener: "Callable[[], IO[bytes]]", + modebits: int, + test: bool, +) -> int: + """ + Try to extract an archive member according to the given arguments. + + :return: Zero if the file was excluded by filters, one otherwise. + """ + relpath = PurePath(relpath) + LOGGER.debug(" | {:-<65} |".format(str(relpath) + " ")) + if len(relpath.parts) <= strip: + # Not enough path components + LOGGER.debug(" (Excluded by --strip-components)") + return 0 + if not _test_pattern(relpath, PurePath(pattern) if pattern else None): + # Doesn't match our pattern + LOGGER.debug(" (excluded by pattern)") + return 0 + stripped = _pathjoin(relpath.parts[strip:]) + dest = Path(out) / stripped + LOGGER.debug(f"-> [{dest}]") + if test: + # We are running in test-only mode: Do not do anything + return 1 + if is_dir: + _mkdir(dest) + return 1 + with opener() as infile: + _mkdir(dest.parent) + with dest.open("wb") as outfile: + shutil.copyfileobj(infile, outfile) + os.chmod(str(dest), modebits) + return 1 + + +def main(argv=None): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + "--cache-dir", + type=Path, + default=default_cache_dir(), + help="Directory where download caches and metadata will be stored", + ) + parser.add_argument( + "--verbose", "-v", action="store_true", help="Whether to log at the DEBUG level" + ) + parser.add_argument( + "--quiet", "-q", action="store_true", help="Whether to log at the WARNING level" + ) + grp = parser.add_argument_group("List arguments") + grp.add_argument( + "--list", + action="store_true", + help="List available components, targets, editions, and " + "architectures. Download arguments will act as filters.", + ) + dl_grp = parser.add_argument_group( + "Download arguments", + description="Select what to download and extract. " + "Some arguments will be inferred " + "based on the host system.", + ) + dl_grp.add_argument( + "--target", + "-T", + default="auto", + help="The target platform for which to download. " + 'Use "--list" to list available targets.', + ) + dl_grp.add_argument( + "--arch", "-A", default="auto", help="The architecture for which to download" + ) + dl_grp.add_argument( + "--edition", + "-E", + default="enterprise", + help='The edition of the product to download (Default is "enterprise"). ' + 'Use "--list" to list available editions.', + ) + dl_grp.add_argument( + "--out", + "-o", + help="The directory in which to download components.", + type=Path, + ) + dl_grp.add_argument( + "--version", + "-V", + default="latest-build", + help='The product version to download. Use "latest-release" to download ' + "the newest available version (including release candidates). Use " + '"latest-stable" to download the newest version, excluding release ' + 'candidates. Use "rapid" to download the latest rapid release. ' + ' Use "latest-build" or "latest" to download the most recent build of ' + 'the named component. Use "--list" to list available versions.', + ) + dl_grp.add_argument( + "--component", + "-C", + default="archive", + help="The component to download. " 'Use "--list" to list available components.', + ) + dl_grp.add_argument( + "--only", + help="Restrict extraction to items that match the given globbing expression. " + 'The full archive member path is matched, so a pattern like "*.exe" ' + 'will only match "*.exe" at the top level of the archive. To match ' + 'recursively, use the "**" pattern to match any number of ' + "intermediate directories.", + ) + dl_grp.add_argument( + "--strip-path-components", + "-p", + dest="strip_components", + metavar="N", + default=0, + type=int, + help="Strip the given number of path components from archive members before " + "extracting into the destination. The relative path of the archive " + "member will be used to form the destination path. For example, a " + "member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. " + "Using --strip-components=1 will remove the first path component, extracting " + "such an item to [/mongod.exe]. If the path has fewer than N components, " + "that archive member will be ignored.", + ) + dl_grp.add_argument( + "--no-download", + action="store_true", + help="Do not download the file, only print its url.", + ) + dl_grp.add_argument( + "--test", + action="store_true", + help="Do not extract or place any files/directories. " + "Only print what will be extracted without placing any files.", + ) + dl_grp.add_argument( + "--empty-is-error", + action="store_true", + help="If all files are excluded by other filters, " + "treat that situation as an error and exit non-zero.", + ) + dl_grp.add_argument( + "--latest-build-branch", + help="Specify the name of the branch to " + 'download the with "--version=latest-build"', + metavar="BRANCH_NAME", + ) + dl_grp.add_argument("--retries", help="The number of times to retry", default=0) + args = parser.parse_args(argv) + cache = Cache.open_in(args.cache_dir) + cache.refresh_full_json() + + version = args.version + if version in PERF_VERSIONS: + version = PERF_VERSIONS[version] + target = args.target + if target == "auto": + target = infer_target(version) + arch = args.arch + if arch == "auto": + arch = infer_arch() + + if args.verbose: + LOGGER.setLevel(logging.DEBUG) + elif args.quiet: + LOGGER.setLevel(logging.WARNING) + + if args.list: + _print_list(cache.db, version, target, arch, args.edition, args.component) + return + + out = args.out or Path.cwd() + out = out.absolute() + + result = _dl_component( + cache, + out, + version=version, + target=target, + arch=arch, + edition=args.edition, + component=args.component, + pattern=args.only, + strip_components=args.strip_components, + test=args.test, + no_download=args.no_download, + latest_build_branch=args.latest_build_branch, + retries=int(args.retries), + ) + if result is ExpandResult.Empty and args.empty_is_error: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.evergreen/mongosh_dl.py b/.evergreen/mongosh_dl.py deleted file mode 120000 index 9a234910ec..0000000000 --- a/.evergreen/mongosh_dl.py +++ /dev/null @@ -1 +0,0 @@ -../.mod/drivers-evergreen-tools/.evergreen/mongosh_dl.py \ No newline at end of file diff --git a/.evergreen/mongosh_dl.py b/.evergreen/mongosh_dl.py new file mode 100755 index 0000000000..3e2a2e6968 --- /dev/null +++ b/.evergreen/mongosh_dl.py @@ -0,0 +1,211 @@ +#!/usr/bin/env python3 +""" +Download and extract MongoSH. + +Use '--help' for more information. +""" + +import argparse +import json +import logging +import re +import subprocess +import sys +from pathlib import Path + +LOGGER = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") + +HERE = Path(__file__).absolute().parent +sys.path.insert(0, str(HERE)) +from mongodl import LOGGER as DL_LOGGER +from mongodl import ( + Cache, + DownloadRetrier, + ExpandResult, + _expand_archive, + default_cache_dir, + infer_arch, +) + + +def _get_latest_version(cache: Cache, retries: int) -> str: + dl_url = "https://downloads.mongodb.com/compass/mongosh.json" + retrier = DownloadRetrier(retries) + while True: + try: + cached = cache.download_file(dl_url).path + data = json.loads(cached.read_text()) + return data["versions"][0]["version"] + except Exception as e: + LOGGER.exception(e) + if not retrier.retry(): + raise + + +def _download( + cache: Cache, + out_dir: Path, + version: str, + target: str, + arch: str, + pattern: "str | None", + strip_components: int, + test: bool, + no_download: bool, + retries: int, +) -> int: + LOGGER.info(f"Download {version} mongosh for {target}-{arch}") + if version == "latest": + version = _get_latest_version(cache, retries) + if arch == "x86_64": + arch = "x64" + elif arch == "aarch64": + arch = "arm64" + if target == "linux": + suffix = ".tgz" + if sys.platform == "linux" and arch in ["x64", "arm64"]: + openssl = subprocess.check_output(["openssl", "version"]) + if "3." in openssl.decode("utf-8"): + suffix = "-openssl3.tgz" + elif re.match("1.1.1[e-w] ", openssl.decode("utf-8")): + suffix = "-openssl11.tgz" + else: + suffix = ".zip" + dl_url = f"https://downloads.mongodb.com/compass/mongosh-{version}-{target}-{arch}{suffix}" + # This must go to stdout to be consumed by the calling program. + print(dl_url) + LOGGER.info("Download url: %s", dl_url) + + if no_download: + return ExpandResult.Okay + + retrier = DownloadRetrier(retries) + while True: + try: + cached = cache.download_file(dl_url).path + return _expand_archive( + cached, out_dir, pattern, strip_components, test=test + ) + except Exception as e: + LOGGER.exception(e) + if not retrier.retry(): + raise + + +def main(argv=None): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + "--verbose", "-v", action="store_true", help="Whether to log at the DEBUG level" + ) + parser.add_argument( + "--quiet", "-q", action="store_true", help="Whether to log at the WARNING level" + ) + parser.add_argument( + "--cache-dir", + type=Path, + default=default_cache_dir(), + help="Directory where download caches and metadata will be stored", + ) + dl_grp = parser.add_argument_group( + "Download arguments", + description="Select what to download and extract. " + "Some arguments will be inferred " + "based on the host system.", + ) + dl_grp.add_argument( + "--target", + "-T", + default="auto", + help="The target platform for which to download. " + 'Use "--list" to list available targets.', + ) + dl_grp.add_argument( + "--arch", "-A", default="auto", help="The architecture for which to download" + ) + dl_grp.add_argument( + "--out", + "-o", + help="The directory in which to download components.", + type=Path, + ) + dl_grp.add_argument( + "--version", + "-V", + default="latest", + help='The product version to download. Use "latest" to download ' + "the newest available stable version.", + ) + dl_grp.add_argument( + "--only", + help="Restrict extraction to items that match the given globbing expression. " + 'The full archive member path is matched, so a pattern like "*.exe" ' + 'will only match "*.exe" at the top level of the archive. To match ' + 'recursively, use the "**" pattern to match any number of ' + "intermediate directories.", + ) + dl_grp.add_argument( + "--strip-path-components", + "-p", + dest="strip_components", + metavar="N", + default=0, + type=int, + help="Strip the given number of path components from archive members before " + "extracting into the destination. The relative path of the archive " + "member will be used to form the destination path. For example, a " + "member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. " + "Using --strip-components=1 will remove the first path component, extracting " + "such an item to [/mongod.exe]. If the path has fewer than N components, " + "that archive member will be ignored.", + ) + dl_grp.add_argument( + "--no-download", + action="store_true", + help="Do not download the file, only print its url.", + ) + dl_grp.add_argument( + "--test", + action="store_true", + help="Do not extract or place any files/directories. " + "Only print what will be extracted without placing any files.", + ) + dl_grp.add_argument("--retries", help="The number of times to retry", default=0) + args = parser.parse_args(argv) + + target = args.target + if target == "auto": + target = sys.platform + arch = args.arch + if arch == "auto": + arch = infer_arch() + out = args.out or Path.cwd() + out = out.absolute() + if args.verbose: + LOGGER.setLevel(logging.DEBUG) + DL_LOGGER.setLevel(logging.DEBUG) + elif args.quiet: + LOGGER.setLevel(logging.WARNING) + DL_LOGGER.setLevel(logging.WARNING) + + cache = Cache.open_in(args.cache_dir) + result = _download( + cache, + out, + version=args.version, + target=target, + arch=arch, + pattern=args.only, + strip_components=args.strip_components, + test=args.test, + no_download=args.no_download, + retries=int(args.retries), + ) + if result is ExpandResult.Empty: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/gemfiles/standard.rb b/gemfiles/standard.rb index 5c45d27260..3e51fe0c3f 100644 --- a/gemfiles/standard.rb +++ b/gemfiles/standard.rb @@ -41,9 +41,7 @@ def standard_dependencies gem 'ruby-debug-ide' end end - if RUBY_VERSION >= '3.4' - gem 'ostruct' - end + gem 'ostruct' if RUBY_VERSION >= '3.4' end group :testing do From 313819366a53e39872e6d72e1a21715a3a6ff5be Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 14:22:23 +0100 Subject: [PATCH 6/9] wip --- .mod/drivers-evergreen-tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mod/drivers-evergreen-tools b/.mod/drivers-evergreen-tools index 5514d6a1c1..9142b7055e 160000 --- a/.mod/drivers-evergreen-tools +++ b/.mod/drivers-evergreen-tools @@ -1 +1 @@ -Subproject commit 5514d6a1c1887693c898073aa31f5be98cab53ea +Subproject commit 9142b7055ea5940e59ad41c4b069376f867031da From acbccf79b3e4bdc06ebca80f26a969288ed97212 Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 14:43:32 +0100 Subject: [PATCH 7/9] Revert "wip" This reverts commit 313819366a53e39872e6d72e1a21715a3a6ff5be. --- .mod/drivers-evergreen-tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mod/drivers-evergreen-tools b/.mod/drivers-evergreen-tools index 9142b7055e..5514d6a1c1 160000 --- a/.mod/drivers-evergreen-tools +++ b/.mod/drivers-evergreen-tools @@ -1 +1 @@ -Subproject commit 9142b7055ea5940e59ad41c4b069376f867031da +Subproject commit 5514d6a1c1887693c898073aa31f5be98cab53ea From ad393b7bbf4c5dddd724f26623f547ba13c6b76d Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 14:43:47 +0100 Subject: [PATCH 8/9] Revert "Copy scripts" This reverts commit 489638829b25d072be6414904351f1ad33faddc0. --- .evergreen/download-mongodb.sh | 233 +----- .evergreen/find-python3.sh | 295 +------- .evergreen/mongodl.py | 1259 +------------------------------- .evergreen/mongosh_dl.py | 212 +----- gemfiles/standard.rb | 4 +- 5 files changed, 7 insertions(+), 1996 deletions(-) mode change 100755 => 120000 .evergreen/download-mongodb.sh mode change 100755 => 120000 .evergreen/find-python3.sh mode change 100755 => 120000 .evergreen/mongodl.py mode change 100755 => 120000 .evergreen/mongosh_dl.py diff --git a/.evergreen/download-mongodb.sh b/.evergreen/download-mongodb.sh deleted file mode 100755 index f5bce8a686..0000000000 --- a/.evergreen/download-mongodb.sh +++ /dev/null @@ -1,232 +0,0 @@ -#!/usr/bin/env bash -# shellcheck shell=sh - -# This file is no longer used directly by drivers-evergreen-tools. -# If using this file to download mongodb binaries, you should consider instead using `mongodl.py` and `mongosh-dl.py`. -# If using this file for get_distro, use `get-distro.sh`. -set -o errexit # Exit the script with error if any of the commands fail - -get_distro () -{ - # shellcheck disable=SC3028 - _script_dir="$(dirname ${BASH_SOURCE:-$0})" - . ${_script_dir}/get-distro.sh -} - -# get_mongodb_download_url_for "linux-distro-version-architecture" "latest|44|42|40|36|34|32|30|28|26|24" "true|false" -# Sets EXTRACT to appropriate extract command -# Sets MONGODB_DOWNLOAD_URL to the appropriate download url -# Sets MONGO_CRYPT_SHARED_DOWNLOAD_URL to the corresponding URL to a crypt_shared library archive -get_mongodb_download_url_for () -{ - _DISTRO=$1 - _VERSION=$2 - _DEBUG=$3 - - EXTRACT="tar zxf" - EXTRACT_MONGOSH=$EXTRACT - - case "$_DEBUG" in - true) - _component="archive-debug" - ;; - *) - _component="archive" - ;; - esac - - case "$_DISTRO" in - darwin-*) - EXTRACT_MONGOSH="unzip -q" - ;; - windows32* | cygwin*-i686) - EXTRACT="/cygdrive/c/Progra~1/7-Zip/7z.exe x" - EXTRACT_MONGOSH="/cygdrive/c/Progra~1/7-Zip/7z.exe x" - ;; - windows64* | cygwin*-x86_64) - EXTRACT="/cygdrive/c/Progra~2/7-Zip/7z.exe x" - EXTRACT_MONGOSH="/cygdrive/c/Progra~2/7-Zip/7z.exe x" - ;; - # Windows on GitHub Actions - mingw64_nt-*-x86_64) - EXTRACT="7z.exe x" - EXTRACT_MONGOSH="7z.exe x" - ;; - esac - - # Get the download url for the latest MongoSH. - # shellcheck disable=SC3028 - _script_dir="$(dirname ${BASH_SOURCE:-$0})" - _python3=$(bash -c ". $_script_dir/find-python3.sh && ensure_python3 2>/dev/null") - MONGOSH_DOWNLOAD_URL=$($_python3 "${_script_dir}/mongosh_dl.py" --no-download | tr -d '\r') - - # Get the download url for MongoDB for the given version. - MONGODB_DOWNLOAD_URL="$($_python3 "${_script_dir}/mongodl.py" --version $_VERSION --component $_component --no-download | tr -d '\r')" - - if [ -z "$MONGODB_DOWNLOAD_URL" ]; then - echo "Unknown version: $_VERSION for $_DISTRO" - exit 1 - fi - - MONGO_CRYPT_SHARED_DOWNLOAD_URL=$($_python3 "${_script_dir}/mongodl.py" --version $_VERSION --component crypt_shared --no-download | tr -d '\r') - - echo "$MONGODB_DOWNLOAD_URL" -} - -# curl_retry emulates running curl with `--retry 5` and `--retry-all-errors`. -curl_retry () -{ - for i in 1 2 4 8 16; do - { curl --fail -sS --max-time 300 "$@" && return 0; } || sleep $i - done - return 1 -} - -# download_and_extract_package downloads a MongoDB server package. -download_and_extract_package () -{ - MONGODB_DOWNLOAD_URL=$1 - EXTRACT=$2 - - if [ -n "${MONGODB_BINARIES:-}" ]; then - cd "$(dirname "$(dirname "${MONGODB_BINARIES:?}")")" - else - cd $DRIVERS_TOOLS - fi - - echo "Installing server binaries..." - curl_retry "$MONGODB_DOWNLOAD_URL" --output mongodb-binaries.tgz - - $EXTRACT mongodb-binaries.tgz - echo "Installing server binaries... done." - - rm -f mongodb-binaries.tgz - mv mongodb* mongodb - chmod -R +x mongodb - # Clear the environment to avoid "find: The environment is too large for exec()" - # error on Windows. - env -i PATH="$PATH" find . -name vcredist_x64.exe -exec {} /install /quiet \; - echo "MongoDB server version: $(./mongodb/bin/mongod --version)" - cd - -} - -download_and_extract_mongosh () -{ - MONGOSH_DOWNLOAD_URL=$1 - EXTRACT_MONGOSH=${2:-"tar zxf"} - - if [ -z "$MONGOSH_DOWNLOAD_URL" ]; then - get_mongodb_download_url_for "$(get_distro)" latest false - fi - - if [ -n "${MONGODB_BINARIES:-}" ]; then - cd "$(dirname "$(dirname "${MONGODB_BINARIES:?}")")" - else - cd $DRIVERS_TOOLS - fi - - echo "Installing MongoDB shell..." - curl_retry $MONGOSH_DOWNLOAD_URL --output mongosh.tgz - $EXTRACT_MONGOSH mongosh.tgz - - rm -f mongosh.tgz - mv mongosh-* mongosh - mkdir -p mongodb/bin - mv mongosh/bin/* mongodb/bin - rm -rf mongosh - chmod -R +x mongodb/bin - echo "Installing MongoDB shell... done." - echo "MongoDB shell version: $(./mongodb/bin/mongosh --version)" - cd - -} - -# download_and_extract downloads a requested MongoDB server package. -# If the legacy shell is not included in the download, the legacy shell is also downloaded from the 5.0 package. -download_and_extract () -{ - MONGODB_DOWNLOAD_URL=$1 - EXTRACT=$2 - MONGOSH_DOWNLOAD_URL=$3 - EXTRACT_MONGOSH=$4 - - download_and_extract_package "$MONGODB_DOWNLOAD_URL" "$EXTRACT" - - if [ "$MONGOSH_DOWNLOAD_URL" ]; then - download_and_extract_mongosh "$MONGOSH_DOWNLOAD_URL" "$EXTRACT_MONGOSH" - fi - - if [ ! -z "${INSTALL_LEGACY_SHELL:-}" ] && [ ! -e $DRIVERS_TOOLS/mongodb/bin/mongo ] && [ ! -e $DRIVERS_TOOLS/mongodb/bin/mongo.exe ]; then - # The legacy mongo shell is not included in server downloads of 6.0.0-rc6 or later. Refer: SERVER-64352. - # Some test scripts use the mongo shell for setup. - # Download 5.0 package to get the legacy mongo shell as a workaround until DRIVERS-2328 is addressed. - echo "Legacy 'mongo' shell not detected." - echo "Download legacy shell from 5.0 ... begin" - # Use a subshell to avoid overwriting MONGODB_DOWNLOAD_URL and MONGO_CRYPT_SHARED_DOWNLOAD_URL. - MONGODB50_DOWNLOAD_URL=$( - get_mongodb_download_url_for "$DISTRO" "5.0" > /dev/null - echo "$MONGODB_DOWNLOAD_URL" - ) - - SAVED_DRIVERS_TOOLS=$DRIVERS_TOOLS - mkdir $DRIVERS_TOOLS/legacy-shell-download - DRIVERS_TOOLS=$DRIVERS_TOOLS/legacy-shell-download - download_and_extract_package "$MONGODB50_DOWNLOAD_URL" "$EXTRACT" - if [ -e $DRIVERS_TOOLS/mongodb/bin/mongo ]; then - cp $DRIVERS_TOOLS/mongodb/bin/mongo $SAVED_DRIVERS_TOOLS/mongodb/bin - elif [ -e $DRIVERS_TOOLS/mongodb/bin/mongo.exe ]; then - cp $DRIVERS_TOOLS/mongodb/bin/mongo.exe $SAVED_DRIVERS_TOOLS/mongodb/bin - fi - DRIVERS_TOOLS=$SAVED_DRIVERS_TOOLS - rm -rf $DRIVERS_TOOLS/legacy-shell-download - echo "Download legacy shell from 5.0 ... end" - fi - - # Define SKIP_CRYPT_SHARED=1 to skip downloading crypt_shared. This is useful for platforms that have a - # server release but don't ship a corresponding crypt_shared release, like Amazon 2018. - if [ -z "${SKIP_CRYPT_SHARED:-}" ]; then - if [ -z "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" ]; then - echo "There is no crypt_shared library for distro='$DISTRO' and version='$MONGODB_VERSION'". - else - echo "Downloading crypt_shared package from $MONGO_CRYPT_SHARED_DOWNLOAD_URL" - download_and_extract_crypt_shared "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" "$EXTRACT" CRYPT_SHARED_LIB_PATH - echo "CRYPT_SHARED_LIB_PATH:" $CRYPT_SHARED_LIB_PATH - if [ -z $CRYPT_SHARED_LIB_PATH ]; then - echo "CRYPT_SHARED_LIB_PATH must be assigned, but wasn't" 1>&2 # write to stderr" - exit 1 - fi - fi - fi -} - -# download_and_extract_crypt_shared downloads and extracts a crypt_shared package into the current directory. -# Use get_mongodb_download_url_for to get a MONGO_CRYPT_SHARED_DOWNLOAD_URL. -download_and_extract_crypt_shared () -{ - MONGO_CRYPT_SHARED_DOWNLOAD_URL=$1 - EXTRACT=$2 - __CRYPT_SHARED_LIB_PATH=${3:-CRYPT_SHARED_LIB_PATH} - rm -rf crypt_shared_download - mkdir crypt_shared_download - cd crypt_shared_download - - curl_retry $MONGO_CRYPT_SHARED_DOWNLOAD_URL --output crypt_shared-binaries.tgz - $EXTRACT crypt_shared-binaries.tgz - - LIBRARY_NAME="mongo_crypt_v1" - # Windows package includes .dll in 'bin' directory. - if [ -d ./bin ]; then - cp bin/$LIBRARY_NAME.* .. - else - cp lib/$LIBRARY_NAME.* .. - fi - cd .. - rm -rf crypt_shared_download - - RELATIVE_CRYPT_SHARED_LIB_PATH="$(find . -maxdepth 1 -type f \( -name "$LIBRARY_NAME.dll" -o -name "$LIBRARY_NAME.so" -o -name "$LIBRARY_NAME.dylib" \))" - ABSOLUTE_CRYPT_SHARED_LIB_PATH=$(pwd)/$(basename $RELATIVE_CRYPT_SHARED_LIB_PATH) - if [ "Windows_NT" = "$OS" ]; then - # If we're on Windows, convert the "cygdrive" path to Windows-style paths. - ABSOLUTE_CRYPT_SHARED_LIB_PATH=$(cygpath -m $ABSOLUTE_CRYPT_SHARED_LIB_PATH) - fi - eval $__CRYPT_SHARED_LIB_PATH=$ABSOLUTE_CRYPT_SHARED_LIB_PATH -} diff --git a/.evergreen/download-mongodb.sh b/.evergreen/download-mongodb.sh new file mode 120000 index 0000000000..d0b2306cc8 --- /dev/null +++ b/.evergreen/download-mongodb.sh @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/download-mongodb.sh \ No newline at end of file diff --git a/.evergreen/find-python3.sh b/.evergreen/find-python3.sh deleted file mode 100755 index fe8eba8702..0000000000 --- a/.evergreen/find-python3.sh +++ /dev/null @@ -1,294 +0,0 @@ -#!/usr/bin/env bash -# -# find-python3.sh -# -# Usage: -# . /path/to/find-python3.sh -# -# This file defines the following utility functions: -# - is_python3 -# - is_venv_capable -# - is_virtualenv_capable -# - find_python3 -# These functions may be invoked from any working directory. - -if [ -z "$BASH" ]; then - echo "find-python3.sh must be run in a Bash shell!" 1>&2 - return 1 -fi - -# is_python3 -# -# Usage: -# is_python3 python -# is_python3 /path/to/python -# -# Parameters: -# "$1": The name or path of the python binary to test. -# -# Return 0 (true) if the given argument "$1" is a viable Python 3 binary. -# Return a non-zero value (false) otherwise. -# -# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null -# to silence these messages. -is_python3() ( - set -o errexit - set -o pipefail - - HERE="$(dirname "${BASH_SOURCE[0]}")" - - # Binary to use, e.g. "python". - local -r bin="${1:?'is_python3 requires a name or path of a python binary to test'}" - - # Binary must be executable. - command -V "$bin" &>/dev/null || return - - echo "- ${bin}" - - "$bin" "${HERE:?}/is_python3.py" || return -) 1>&2 - -# is_venv_capable -# -# Usage: -# is_venv_capable python -# is_venv_capable /path/to/python -# -# Parameters: -# "$1": The name or path of the python binary to test. -# -# Return 0 (true) if the given argument "$1" can successfully evaluate the -# command: -# "$1" -m venv venv -# and activate the created virtual environment. -# Return a non-zero value (false) otherwise. -# -# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null -# to silence these messages. -is_venv_capable() ( - set -o errexit - set -o pipefail - - local -r bin="${1:?'is_venv_capable requires a name or path of a python binary to test'}" - - # Use a temporary directory to avoid polluting the caller's environment. - local -r tmp="$(mktemp -d)" - trap 'rm -rf "$tmp"' EXIT - - if [[ "${OSTYPE:?}" == cygwin ]]; then - local -r real_path="$(cygpath -aw "$tmp")" || return - else - local -r real_path="$tmp" - fi - - "$bin" -m venv "$real_path" || return - - if [[ -f "$tmp/bin/activate" ]]; then - # shellcheck source=/dev/null - . "$tmp/bin/activate" - elif [[ -f "$tmp/Scripts/activate" ]]; then - # Workaround https://github.com/python/cpython/issues/76632: - # activate: line 3: $'\r': command not found - dos2unix -q "$tmp/Scripts/activate" || true - # shellcheck source=/dev/null - . "$tmp/Scripts/activate" - else - echo "Could not find an activation script in $tmp!" - return 1 - fi -) 1>&2 - -# is_virtualenv_capable -# -# Usage: -# is_virtualenv_capable python -# is_virtualenv_capable /path/to/python -# -# Parameters: -# "$1": The name or path of the python binary to test. -# -# Return 0 (true) if the given argument $1 can successfully evaluate the -# command: -# "$1" -m virtualenv -p "$1" venv -# and activate the created virtual environment. -# Return a non-zero value (false) otherwise. -# -# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null -# to silence these messages. -is_virtualenv_capable() ( - set -o errexit - set -o pipefail - - local -r bin="${1:?'is_virtualenv_capable requires a name or path of a python binary to test'}" - - # Use a temporary directory to avoid polluting the caller's environment. - local tmp - tmp="$(mktemp -d)" - trap 'rm -rf "$tmp"' EXIT - - local real_path - if [[ "${OSTYPE:?}" == cygwin ]]; then - real_path="$(cygpath -aw "$tmp")" || return - else - real_path="$tmp" - fi - - # -p: some old versions of virtualenv (e.g. installed on Debian 10) are buggy. - # Without -p, the created virtual environment may use the wrong Python binary - # (e.g. using a Python 2 binary even if it was created by a Python 3 binary). - "$bin" -m virtualenv -p "$bin" "$real_path" || return - - if [[ -f "$tmp/bin/activate" ]]; then - # shellcheck source=/dev/null - . "$tmp/bin/activate" - elif [[ -f "$tmp/Scripts/activate" ]]; then - # Workaround https://github.com/python/cpython/issues/76632: - # activate: line 3: $'\r': command not found - dos2unix -q "$tmp/Scripts/activate" || true - # shellcheck source=/dev/null - . "$tmp/Scripts/activate" - else - echo "Could not find an activation script in $tmp!" - return 1 - fi -) 1>&2 - -# find_python3 -# -# Usage: -# find_python3 -# PYTHON_BINARY=$(find_python3) -# PYTHON_BINARY=$(find_python3 2>/dev/null) -# -# Return 0 (true) if a Python 3 binary capable of creating a virtual environment -# with either venv or virtualenv can be found. -# Return a non-zero (false) value otherwise. -# -# If successful, print the name of the binary stdout (pipe 1). -# Otherwise, no output is printed to stdout (pipe 1). -# -# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null -# with `2>/dev/null` to silence these messages. -# -# Example: -# PYTHON_BINARY=$(find_python3) -# if [[ -z "$PYTHON_BINARY" ]]; then -# # Handle missing Python binary situation. -# fi -# -# if "$PYTHON_BINARY" -m venv -h; then -# "$PYTHON_BINARY" -m venv venv -# else -# "$PYTHON_BINARY" -m virtualenv -p "$PYTHON_BINARY" venv -# fi -find_python3() ( - set -o errexit - set -o pipefail - - # Prefer Python Toolchain Current version. - declare python_binary="" - case "${OSTYPE:?}" in - cygwin) - python_binary="C:/python/Current/python.exe" - ;; - darwin*) - python_binary="/Library/Frameworks/Python.Framework/Versions/Current/bin/python3" - ;; - *) - python_binary="/opt/python/Current/bin/python3" - ;; - esac - if is_python3 "${python_binary:?}"; then - echo "Using Python binary ${python_binary:?}" >&2 - echo "${python_binary:?}" - return - fi - - test_bins() { - local -r bin_path="${1:?'missing path'}" - shift - local -r pattern="${1:?'missing pattern'}" - shift - local -ar suffixes=("${@:?'missing suffixes'}") - - for dir in $(find "$bin_path" -maxdepth 1 -name "$pattern" -type d 2>/dev/null | sort -rV); do - for bin in "${suffixes[@]}"; do - if is_python3 "$dir/$bin"; then - echo "$dir/$bin" - return - fi - done - done - } - - # Look in other standard locations. - case "${OSTYPE:?}" in - cygwin) - # Python toolchain: C:/python/Python3X/bin/python - python_binary="$(test_bins "C:/python" "Python3[0-9]*" "python3.exe" "python.exe")" - ;; - darwin*) - # Standard location: /Library/Frameworks/Python.Framework/Versions/XXX/bin/python3 - python_binary="$(test_bins "/Library/Frameworks/Python.Framework/Versions/" "[0-9]*" "bin/python3")" - ;; - *) - # MongoDB toolchain: /opt/mongodbtoolchain/vX/bin/python - python_binary="$(test_bins "/opt/mongodbtoolchain" "v[0-9]*" "bin/python3" "bin/python")" - if [ -z "$python_binary" ]; then - # Python toolchain: /opt/python/3.X/bin/python - python_binary=$(test_bins "/opt/python" "3.[0-9]*" "bin/python3" "bin/python") - fi - ;; - esac - - # Fall back to looking for a system python. - if [ -z "${python_binary}" ]; then - if is_python3 "python3"; then - python_binary="python3" - elif is_python3 "python"; then - python_binary="python" - fi - fi - - # Handle success. - if [ -n "${python_binary}" ]; then - echo "Using Python binary ${python_binary:?}" >&2 - echo "${python_binary:?}" - return - else - echo "No valid pythons found!" >&2 - fi -) - -# -# Usage: -# ensure_python3 -# PYTHON_BINARY=$(ensure_python3) -# PYTHON_BINARY=$(ensure_python3 2>/dev/null) -# -# If successful, print the name of the binary stdout (pipe 1). -# Otherwise, no output is printed to stdout (pipe 1). -# -# Diagnostic messages may be printed to stderr (pipe 2). Redirect to /dev/null -# with `2>/dev/null` to silence these messages. -# -# If DRIVERS_TOOLS_PYTHON is set, it will return that value. Otherwise -# it will fall back to using find_python3 to return a suitable value. -# -ensure_python3() { - # Use "$DRIVERS_TOOLS_PYTHON". - if command -v "${DRIVERS_TOOLS_PYTHON:-}" >/dev/null; then - echo "Using Python binary ${DRIVERS_TOOLS_PYTHON:?}" >&2 - echo "${DRIVERS_TOOLS_PYTHON:?}" - return - fi - - # Use find_python3. - declare python_binary="" - { - echo "Finding Python3 binary..." - python_binary="$(find_python3 2>/dev/null)" - echo "Finding Python3 binary... done." - } 1>&2 - echo "${python_binary}" -} diff --git a/.evergreen/find-python3.sh b/.evergreen/find-python3.sh new file mode 120000 index 0000000000..65027fd574 --- /dev/null +++ b/.evergreen/find-python3.sh @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/find-python3.sh \ No newline at end of file diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py deleted file mode 100755 index aebaa5d06c..0000000000 --- a/.evergreen/mongodl.py +++ /dev/null @@ -1,1258 +0,0 @@ -#!/usr/bin/env python3 -""" -Download and extract MongoDB components. - -Can also be imported and used as a module. Refer: - -- class Cache - Manage, query, and use a cache -- class CacheDB - Manage and query a cache db -- func infer_target() - Infer the download target of the host OS -- func infer_arch() - Infer the architecture of the host OS -- user_caches_root() - Where programs should put their cache data -- default_cache_dir() - Default directory for mongodl cache data - -Use '--help' for more information. -""" - -import argparse -import enum -import hashlib -import json -import logging -import os -import platform -import re -import shutil -import sqlite3 -import ssl -import sys -import tarfile -import textwrap -import time -import urllib.error -import urllib.request -import warnings -import zipfile -from collections import namedtuple -from contextlib import contextmanager -from fnmatch import fnmatch -from pathlib import Path, PurePath, PurePosixPath -from typing import ( - IO, - TYPE_CHECKING, - Any, - Callable, - Iterable, - Iterator, - NamedTuple, - Optional, - cast, -) - -LOGGER = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") - -SSL_CONTEXT = ssl.create_default_context() -try: - import certifi - - SSL_CONTEXT.load_verify_locations(certifi.where()) -except ImportError: - pass - -# These versions are used for performance benchmarking. Do not update to a newer version. -PERF_VERSIONS = {"v6.0-perf": "6.0.6", "v8.0-perf": "8.0.1"} - -#: Map common distribution names to the distribution named used in the MongoDB download list -DISTRO_ID_MAP = { - "elementary": "ubuntu", - "fedora": "rhel", - "centos": "rhel", - "mint": "ubuntu", - "linuxmint": "ubuntu", - "opensuse-leap": "sles", - "opensuse": "sles", - "pop": "ubuntu", - "redhat": "rhel", - "rocky": "rhel", -} - -#: Map derived distro versions to their base distribution versions -DISTRO_VERSION_MAP = { - "elementary": { - "6": "20.04", - "6.*": "20.04", - }, - "fedora": { - "32": "8", - "33": "8", - "34": "8", - "35": "8", - "36": "8", - }, - "linuxmint": { - "19": "18.04", - "19.*": "18.04", - "20": "20.04", - "20.*": "20.04", - "21": "22.04", - "21.*": "22.04", - }, - "pop": { - "20.04": "20.04", - "22.04": "22.04", - }, -} - -#: Map distribution IDs with version fnmatch() patterns to download platform targets -DISTRO_ID_TO_TARGET = { - "ubuntu": { - "24.*": "ubuntu2404", - "22.*": "ubuntu2204", - "20.*": "ubuntu2004", - "18.*": "ubuntu1804", - "16.*": "ubuntu1604", - "14.*": "ubuntu1404", - }, - "debian": { - "9": "debian92", - "10": "debian10", - "11": "debian11", - "12": "debian12", - }, - "rhel": { - "6": "rhel6", - "6.*": "rhel6", - "7": "rhel7", - "7.*": "rhel7", - "8": "rhel8", - "8.*": "rhel8", - "9": "rhel9", - "9.*": "rhel9", - }, - "sles": { - "10.*": "suse10", - "11.*": "suse11", - "12.*": "suse12", - "13.*": "suse13", - "15.*": "suse15", - }, - "amzn": { - "2023": "amazon2023", - "2018.*": "amzn64", - "2": "amazon2", - }, -} - -# The list of valid targets that are not related to a specific Linux distro. -TARGETS_THAT_ARE_NOT_DISTROS = ["linux_i686", "linux_x86_64", "osx", "macos", "windows"] - - -def infer_target(version: Optional[str] = None) -> str: - """ - Infer the download target of the current host system. - """ - if sys.platform == "win32": - return "windows" - if sys.platform == "darwin": - # Older versions of the server used 'osx' as the target. - if version is not None: - if version.startswith("4.0") or version[0] == "3": - return "osx" - return "macos" - # Now the tricky bit - cands = (Path(p) for p in ["/etc/os-release", "/usr/lib/os-release"]) - existing = (p for p in cands if p.is_file()) - found = next(iter(existing), None) - if found: - return infer_target_from_os_release(found) - raise RuntimeError( - "We don't know how to find the default '--target'" - " option for this system. Please contribute!" - ) - - -def infer_target_from_os_release(osr: Path) -> str: - """ - Infer the download target based on the content of os-release - """ - with osr.open("r", encoding="utf-8") as f: - os_rel = f.read() - # Extract the "ID" field - id_re = re.compile(r'\bID=("?)(.*)\1') - mat = id_re.search(os_rel) - assert mat, f"Unable to detect ID from [{osr}] content:\n{os_rel}" - os_id = mat.group(2) - if os_id == "arch": - # There are no Archlinux-specific MongoDB downloads, so we'll just use - # the build for RHEL8, which is reasonably compatible with other modern - # distributions (including Arch). - return "rhel80" - # Extract the "VERSION_ID" field - ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') - mat = ver_id_re.search(os_rel) - assert mat, f"Unable to detect VERSION_ID from [{osr}] content:\n{os_rel}" - ver_id = mat.group(2) - # Map the ID to the download ID - mapped_id = DISTRO_ID_MAP.get(os_id) - if mapped_id: - # Map the distro version to its upstream version - ver_mapper = DISTRO_VERSION_MAP.get(os_id, {}) - # Find the version based on a fnmatch pattern: - matching = (ver for pat, ver in ver_mapper.items() if fnmatch(ver_id, pat)) - # The default is to keep the version ID. - mapped_version = next(iter(matching), None) - if mapped_version is None: - # If this raises, a version/pattern needs to be added - # to DISTRO_VERSION_MAP - raise RuntimeError( - f"We don't know how to map {os_id} version '{ver_id}' " - f"to an upstream {mapped_id} version. Please contribute!" - ) - ver_id = mapped_version - os_id = mapped_id - os_id = os_id.lower() - if os_id not in DISTRO_ID_TO_TARGET: - raise RuntimeError( - f"We don't know how to map '{os_id}' to a distribution " - "download target. Please contribute!" - ) - # Find the download target based on a filename-style pattern: - ver_table = DISTRO_ID_TO_TARGET[os_id] - for pattern, target in ver_table.items(): - if fnmatch(ver_id, pattern): - return target - raise RuntimeError( - f"We don't know how to map '{os_id}' version '{ver_id}' to a distribution " - "download target. Please contribute!" - ) - - -def user_caches_root() -> Path: - """ - Obtain the directory for user-local caches - """ - if sys.platform == "win32": - return Path(os.environ["LocalAppData"]) - if sys.platform == "darwin": - return Path(os.environ["HOME"] + "/Library/Caches") - xdg_cache = os.getenv("XDG_CACHE_HOME") - if xdg_cache: - return Path(xdg_cache) - return Path(os.environ["HOME"] + "/.cache") - - -def default_cache_dir() -> Path: - """ - Get the path to the default directory of mongodl caches. - """ - return user_caches_root().joinpath("mongodl").absolute() - - -if TYPE_CHECKING: - DownloadResult = NamedTuple( - "DownloadResult", [("is_changed", bool), ("path", Path)] - ) - DownloadableComponent = NamedTuple( - "DownloadableComponent", - [ - ("version", str), - ("target", str), - ("arch", str), - ("edition", str), - ("key", str), - ("data_json", str), - ], - ) -else: - DownloadResult = namedtuple("DownloadResult", ["is_changed", "path"]) - DownloadableComponent = namedtuple( - "DownloadableComponent", - ["version", "target", "arch", "edition", "key", "data_json"], - ) - -#: Regular expression that matches the version numbers from 'full.json' -VERSION_RE = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:-([a-z]+)(\d+))?") -MAJOR_VERSION_RE = re.compile(r"(\d+)\.(\d+)$") -STABLE_MAX_RC = 9999 - - -def version_tup(version: str) -> "tuple[int, int, int, int, int]": - if MAJOR_VERSION_RE.match(version): - maj, min = version.split(".") - return tuple([int(maj), int(min), 0, 0, 0]) - - mat = VERSION_RE.match(version) - assert mat, f'Failed to parse "{version}" as a version number' - major, minor, patch, tag, tagnum = list(mat.groups()) - if tag is None: - # No rc tag is greater than an equal base version with any rc tag - tag = STABLE_MAX_RC - tagnum = 0 - else: - tag = { - "alpha": 1, - "beta": 2, - "rc": 3, - }[tag] - return tuple(map(int, (major, minor, patch, tag, tagnum))) - - -def collate_mdb_version(left: str, right: str) -> int: - lhs = version_tup(left) - rhs = version_tup(right) - if lhs < rhs: - return -1 - if lhs > rhs: - return 1 - return 0 - - -def mdb_version_not_rc(version: str) -> bool: - tup = version_tup(version) - return tup[-1] == STABLE_MAX_RC - - -def mdb_version_rapid(version: str) -> bool: - tup = version_tup(version) - return tup[1] > 0 - - -class DownloadRetrier: - """Class that handles retry logic. It performs exponential backoff with a maximum delay of 10 minutes between retry attempts.""" - - def __init__(self, retries: int) -> None: - self.retries = retries - self.attempt = 0 - assert self.retries >= 0 - - def retry(self) -> bool: - if self.attempt >= self.retries: - return False - self.attempt += 1 - LOGGER.warning( - f"Download attempt failed, retrying attempt {self.attempt} of {self.retries}" - ) - ten_minutes = 600 - time.sleep(min(2 ** (self.attempt - 1), ten_minutes)) - return True - - -class CacheDB: - """ - Abstract a mongodl cache SQLite database. - """ - - def __init__(self, db: sqlite3.Connection) -> None: - self._db = db - # Use a cursor to get access to lastrowid - self._cursor = self._db.cursor() - - @staticmethod - def open(fpath: Path) -> "CacheDB": - """ - Open a caching database at the given filepath. - """ - db = sqlite3.connect(str(fpath), isolation_level=None) - db.execute(r""" - CREATE TABLE IF NOT EXISTS mdl_http_downloads ( - url TEXT NOT NULL UNIQUE, - etag TEXT, - last_modified TEXT - )""") - db.create_collation("mdb_version", collate_mdb_version) - db.create_function("mdb_version_not_rc", 1, mdb_version_not_rc) - db.create_function("mdb_version_rapid", 1, mdb_version_rapid) - return CacheDB(db) - - def __call__( - self, query: str, **params: "str | int | bool | float | None" - ) -> "Iterable[sqlite3.Row]": - """ - Execute a query with the given named parameters. - """ - return self._cursor.execute(query, params) - - @contextmanager - def transaction(self) -> "Iterator[None]": - """ - Create a context for a database transaction. - """ - if self._db.in_transaction: - yield - return - - with self._db: - # Must do an explicit BEGIN because isolation_level=None - self("BEGIN") - yield - - def import_json_file(self, json_file: Path) -> None: - """ - Import the given downloads content from the given JSON file - """ - with json_file.open("r", encoding="utf-8") as f: - data = json.load(f) - self.import_json_data(data) - - def import_json_data(self, data: "Any") -> None: - """ - Import the given downloads content from the given JSON-like data - """ - with self.transaction(): - self._import_json_data(data) - - def _import_json_data(self, data: "Any") -> None: - # We're reloading everything, so just drop and re-create the tables. - # Bonus: We don't have to worry about schema changes - self("DROP TABLE IF EXISTS mdl_components") - self("DROP TABLE IF EXISTS mdl_downloads") - self("DROP TABLE IF EXISTS mdl_versions") - self(r""" - CREATE TABLE mdl_versions ( - version_id INTEGER PRIMARY KEY, - date TEXT NOT NULL, - version TEXT NOT NULL, - githash TEXT NOT NULL - ) - """) - self(r""" - CREATE TABLE mdl_downloads ( - download_id INTEGER PRIMARY KEY, - version_id INTEGER NOT NULL REFERENCES mdl_versions, - target TEXT NOT NULL, - arch TEXT NOT NULL, - edition TEXT NOT NULL, - ar_url TEXT NOT NULL, - ar_debug_url TEXT, - data TEXT NOT NULL - ) - """) - self(r""" - CREATE TABLE mdl_components ( - component_id INTEGER PRIMARY KEY, - key TEXT NOT NULL, - download_id INTEGER NOT NULL REFERENCES mdl_downloads, - data NOT NULL, - UNIQUE(key, download_id) - ) - """) - - for ver in data["versions"]: - version = ver["version"] - githash = ver["githash"] - date = ver["date"] - self( - r""" - INSERT INTO mdl_versions (date, version, githash) - VALUES (:date, :version, :githash) - """, - date=date, - version=version, - githash=githash, - ) - version_id = self._cursor.lastrowid - missing = set() - for dl in ver["downloads"]: - arch = dl.get("arch", "null") - target = dl.get("target", "null") - # Normalize RHEL target names to include just the major version. - if target.startswith("rhel") and len(target) == 6: - target = target[:-1] - found = False - for distro in DISTRO_ID_TO_TARGET.values(): - if target in list(distro.values()): - found = True - if not found and target not in TARGETS_THAT_ARE_NOT_DISTROS: - missing.add(target) - edition = dl["edition"] - ar_url = dl["archive"]["url"] - ar_debug_url = dl["archive"].get("debug_symbols") - self( - r""" - INSERT INTO mdl_downloads (version_id, - target, - arch, - edition, - ar_url, - ar_debug_url, - data) - VALUES (:version_id, - :target, - :arch, - :edition, - :ar_url, - :ar_debug_url, - :data) - """, - version_id=version_id, - target=target, - arch=arch, - edition=edition, - ar_url=ar_url, - ar_debug_url=ar_debug_url, - data=json.dumps(dl), - ) - dl_id = self._cursor.lastrowid - for key, data in dl.items(): - if "url" not in data: - # Some fields aren't downloadable items. Skip them - continue - self( - r""" - INSERT INTO mdl_components (key, download_id, data) - VALUES (:key, :dl_id, :data) - """, - key=key, - dl_id=dl_id, - data=json.dumps(data), - ) - if missing: - LOGGER.error("Missing targets in DISTRO_ID_TO_TARGET:") - for item in missing: - LOGGER.error(f" - {item}") - if os.environ.get("VALIDATE_DISTROS") == "1": - sys.exit(1) - - def iter_available( - self, - *, - version: "str | None" = None, - target: "str | None" = None, - arch: "str | None" = None, - edition: "str | None" = None, - component: "str | None" = None, - ) -> "Iterable[DownloadableComponent]": - """ - Iterate over the matching downloadable components according to the - given attribute filters. - """ - rows = self( - r""" - SELECT version, target, arch, edition, key, mdl_components.data - FROM mdl_components, - mdl_downloads USING(download_id), - mdl_versions USING(version_id) - WHERE (:component IS NULL OR key=:component) - AND (:target IS NULL OR target=:target) - AND (:arch IS NULL OR arch=:arch) - AND (:edition IS NULL OR edition=:edition) - AND ( - CASE - WHEN :version='latest-release' - THEN 1 - WHEN :version='latest-stable' - THEN mdb_version_not_rc(version) - WHEN :version='rapid' - THEN mdb_version_rapid(version) - WHEN :version IS NULL - THEN 1 - ELSE version=:version OR version LIKE :version_pattern - END) - ORDER BY version COLLATE mdb_version DESC - """, - version=version, - version_pattern=f"{version}.%", - target=target, - arch=arch, - edition=edition, - component=component, - ) - for row in rows: - yield DownloadableComponent(*row) # type: ignore - - -class Cache: - """ - Abstraction over a mongodl downloads cache directory. - """ - - def __init__(self, dirpath: Path, db: CacheDB) -> None: - self._dirpath = dirpath - self._db = db - - @staticmethod - def open_default() -> "Cache": - """ - Open the default user-local cache directory - """ - return Cache.open_in(default_cache_dir()) - - @staticmethod - def open_in(dirpath: Path) -> "Cache": - """ - Open or create a cache directory at the given path. - """ - _mkdir(dirpath) - db = CacheDB.open(dirpath / "data.db") - return Cache(dirpath, db) - - @property - def db(self): - """The backing cache database""" - return self._db - - def download_file(self, url: str) -> DownloadResult: - """ - Obtain a local copy of the file at the given URL. - """ - info = self._db( - "SELECT etag, last_modified " "FROM mdl_http_downloads WHERE url=:url", - url=url, - ) - etag = None # type: str|None - modtime = None # type: str|None - etag, modtime = next(iter(info), (None, None)) # type: ignore - headers = {} # type: dict[str, str] - if etag: - headers["If-None-Match"] = etag - if modtime: - headers["If-Modified-Since"] = modtime - digest = hashlib.sha256(url.encode("utf-8")).hexdigest()[:4] - file_name = PurePosixPath(url).name - dest = self._dirpath / "files" / digest / file_name - if not dest.exists(): - headers = {} - req = urllib.request.Request(url, headers=headers) - - try: - resp = urllib.request.urlopen(req, context=SSL_CONTEXT, timeout=30) - except urllib.error.HTTPError as e: - if e.code != 304: - raise RuntimeError(f"Failed to download [{url}]") from e - assert dest.is_file(), ( - "The download cache is missing an expected file", - dest, - ) - LOGGER.info("Using cached file %s", file_name) - return DownloadResult(False, dest) - - _mkdir(dest.parent) - got_etag = resp.getheader("ETag") - got_modtime = resp.getheader("Last-Modified") - got_len = int(resp.getheader("Content-Length")) - with dest.open("wb") as of: - shutil.copyfileobj(resp, of, length=got_len) - file_size = dest.stat().st_size - if file_size != got_len: - raise RuntimeError( - f"File size: {file_size} does not match download size: {got_len}" - ) - self._db( - "INSERT OR REPLACE INTO mdl_http_downloads (url, etag, last_modified) " - "VALUES (:url, :etag, :mtime)", - url=url, - etag=got_etag, - mtime=got_modtime, - ) - return DownloadResult(True, dest) - - def refresh_full_json(self) -> None: - """ - Sync the content of the MongoDB full.json downloads list. - """ - default_source = "https://downloads.mongodb.org/full.json" - download_source = os.environ.get("MONGODB_DOWNLOAD_SOURCE", default_source) - with self._db.transaction(): - dl = self.download_file(download_source) - if not dl.is_changed: - # We still have a good cache - return - self._db.import_json_file(dl.path) - - -def _mkdir(dirpath: Path) -> None: - """ - Ensure a directory at ``dirpath``, and all parent directories thereof. - - (Cannot using Path.mkdir(parents, exist_ok) on some Python versions that - we need to support.) - """ - if dirpath.is_dir(): - return - par = dirpath.parent - if par != dirpath: - _mkdir(par) - try: - dirpath.mkdir() - except FileExistsError: - pass - - -def _print_list( - db: CacheDB, - version: "str | None", - target: "str | None", - arch: "str | None", - edition: "str | None", - component: "str | None", -): - if version or target or arch or edition or component: - counter = 0 - matching = db.iter_available( - version=version, - target=target, - arch=arch, - edition=edition, - component=component, - ) - for version, target, arch, edition, comp_key, comp_data in matching: - counter += 1 - print( - f"Download: {comp_key}\n" - f" Version: {version}\n" - f" Target: {target}\n" - f" Arch: {arch}\n" - f" Edition: {edition}\n" - f" Info: {comp_data}\n\n" - ) - if counter == 1: - print("Only one matching item") - elif counter == 0: - print("No items matched the listed filters") - else: - print(f"{counter} available downloadable components") - print("(Omit filter arguments for a list of available filters)") - return - - tup = next( - iter( # type: ignore - db(r""" - VALUES( - (select group_concat(arch, ', ') from (select distinct arch from mdl_downloads)), - (select group_concat(target, ', ') from (select distinct target from mdl_downloads)), - (select group_concat(edition, ', ') from (select distinct edition from mdl_downloads)), - (select group_concat(version, ', ') from ( - select distinct version from mdl_versions - ORDER BY version COLLATE mdb_version)), - (select group_concat(key, ', ') from (select distinct key from mdl_components)) - ) - """) - ) - ) # type: tuple[str, str, str, str, str] - arches, targets, editions, versions, components = tup - if "archive" in components: - components = components.split(", ") - components.append("archive-debug") - components = ", ".join(sorted(components)) - versions = "\n".join( - textwrap.wrap(versions, width=78, initial_indent=" ", subsequent_indent=" ") - ) - targets = "\n".join( - textwrap.wrap(targets, width=78, initial_indent=" ", subsequent_indent=" ") - ) - print( - "Architectures:\n" - f" {arches}\n" - "Targets:\n" - f"{targets}\n" - "Editions:\n" - f" {editions}\n" - "Versions:\n" - f"{versions}\n" - "Components:\n" - f" {components}\n" - ) - - -def infer_arch(): - a = platform.machine() or platform.processor() - # Remap platform names to the names used for downloads - return { - "AMD64": "x86_64", - }.get(a, a) - - -class ExpandResult(enum.Enum): - Empty = 0 - "No files were/would be extracted" - Okay = 1 - "One or more files were/would be extracted" - - -def _published_build_url( - cache: Cache, version: str, target: str, arch: str, edition: str, component: str -) -> tuple[str, str]: - """ - Get the URL and SHASUM for a "published" build (that is: a build that was published in full.json) - """ - value = "url" - if component == "archive-debug": - component = "archive" - value = "debug_symbols" - matching = cache.db.iter_available( - version=version, target=target, arch=arch, edition=edition, component=component - ) - tup = next(iter(matching), None) - if tup is None: - raise ValueError( - "No download was found for " - f'version="{version}" target="{target}" arch="{arch}" edition="{edition}" component="{component}"' - ) - data = json.loads(tup.data_json) - return data[value], data["sha256"] - - -def _latest_build_url( - cache: Cache, - target: str, - arch: str, - edition: str, - component: str, - branch: "str|None", -) -> str: - """ - Get the URL for an "unpublished" "latest" build. - - These builds aren't published in a JSON manifest, so we have to form the URL - according to the user's parameters. We might fail to download a build if - there is no matching file. - """ - # Normalize the filename components based on the download target - platform = { - "windows": "windows", - "win32": "win32", - "macos": "osx", - }.get(target, "linux") - typ = { - "windows": "windows", - "win32": "win32", - "macos": "macos", - }.get(target, "linux") - component_name = { - "archive": "mongodb", - "crypt_shared": "mongo_crypt_shared_v1", - }.get(component, component) - base = f"https://downloads.10gen.com/{platform}" - # Windows has Zip files - ext = "zip" if target == "windows" else "tgz" - # Enterprise builds have an "enterprise" infix - ent_infix = "enterprise-" if edition == "enterprise" else "" - if "rhel" in target: - # Some RHEL targets include a minor version, like "rhel93". Check the URL of the latest release. - latest_release_url, _ = _published_build_url( - cache, "latest-release", target, arch, edition, component - ) - got = re.search(r"rhel[0-9][0-9]", latest_release_url) - if got is not None: - # Rewrite target like "rhel9" to "rhel93" to match published URL. - target = got.group(0) - # Some platforms have a filename infix - tgt_infix = (target + "-") if target not in ("windows", "win32", "macos") else "" - # Non-master branch uses a filename infix - br_infix = (branch + "-") if (branch is not None and branch != "master") else "" - filename = ( - f"{component_name}-{typ}-{arch}-{ent_infix}{tgt_infix}{br_infix}latest.{ext}" - ) - return f"{base}/{filename}" - - -def _dl_component( - cache: Cache, - out_dir: Path, - version: str, - target: str, - arch: str, - edition: str, - component: str, - pattern: "str | None", - strip_components: int, - test: bool, - no_download: bool, - latest_build_branch: "str|None", - retries: int, -) -> ExpandResult: - LOGGER.info(f"Download {component} {version}-{edition} for {target}-{arch}") - if version in ("latest-build", "latest"): - dl_url = _latest_build_url( - cache, target, arch, edition, component, latest_build_branch - ) - sha256 = None - else: - try: - dl_url, sha256 = _published_build_url( - cache, version, target, arch, edition, component - ) - except ValueError: - if component == "crypt_shared" and version != "latest-release": - warnings.warn( - "No matching version of crypt_shared found, using 'latest-release'", - stacklevel=2, - ) - version = "latest-release" - # The target will be macos on latest-release. - if target == "osx": - target = "macos" - else: - raise - dl_url, sha256 = _published_build_url( - cache, version, target, arch, edition, component - ) - - # This must go to stdout to be consumed by the calling program. - print(dl_url) - - LOGGER.info("Download url: %s", dl_url) - - if no_download: - return None - - retrier = DownloadRetrier(retries) - while True: - try: - cached = cache.download_file(dl_url).path - if sha256 is not None and not _check_shasum256(cached, sha256): - raise ValueError("Incorrect shasum256 for %s", cached) - return _expand_archive( - cached, out_dir, pattern, strip_components, test=test - ) - except Exception as e: - LOGGER.exception(e) - if not retrier.retry(): - raise - - -def _check_shasum256(filename, shasum): - """Check if the file with the name "filename" matches the SHA-256 sum - in "shasum".""" - h = hashlib.sha256() - # This will raise an exception if the file doesn't exist. Catching - # and handling it is left as an exercise for the reader. - with open(filename, "rb") as fh: - # Read and hash the file in 4K chunks. Reading the whole - # file at once might consume a lot of memory if it is - # large. - while True: - data = fh.read(4096) - if len(data) == 0: - break - h.update(data) - return shasum == h.hexdigest() - - -def _pathjoin(items: "Iterable[str]") -> PurePath: - """ - Return a path formed by joining the given path components - """ - return PurePath("/".join(items)) - - -def _test_pattern(path: PurePath, pattern: "PurePath | None") -> bool: - """ - Test whether the given 'path' string matches the globbing pattern 'pattern'. - - Supports the '**' pattern to match any number of intermediate directories. - """ - if pattern is None: - return True - # Split pattern into parts - pattern_parts = pattern.parts - if not pattern_parts: - # An empty pattern always matches - return True - path_parts = path.parts - if not path_parts: - # Non-empty pattern requires more path components - return False - pattern_head = pattern_parts[0] - pattern_tail = _pathjoin(pattern_parts[1:]) - if pattern_head == "**": - # Special "**" pattern matches any suffix of the path - # Generate each suffix: - tails = (path_parts[i:] for i in range(len(path_parts))) - # Test if any of the suffixes match the remainder of the pattern: - return any(_test_pattern(_pathjoin(t), pattern_tail) for t in tails) - if not fnmatch(path.parts[0], pattern_head): - # Leading path component cannot match - return False - # The first component matches. Test the remainder: - return _test_pattern(_pathjoin(path_parts[1:]), pattern_tail) - - -def _expand_archive( - ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool -) -> ExpandResult: - """ - Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, - only extracts members that match the pattern. - """ - LOGGER.debug(f"Extract from: [{ar.name}]") - LOGGER.debug(f" into: [{dest}]") - if ar.suffix == ".zip": - n_extracted = _expand_zip(ar, dest, pattern, strip_components, test=test) - elif ar.suffix == ".tgz": - n_extracted = _expand_tgz(ar, dest, pattern, strip_components, test=test) - else: - raise RuntimeError("Unknown archive file extension: " + ar.suffix) - verb = "would be" if test else "were" - if n_extracted == 0: - if pattern and strip_components: - LOGGER.warning( - f"NOTE: No files {verb} extracted. Likely all files {verb} " - f'excluded by "--only={pattern}" and/or "--strip-components={strip_components}"' - ) - elif pattern: - LOGGER.warning( - f"NOTE: No files {verb} extracted. Likely all files {verb} " - f'excluded by the "--only={pattern}" filter' - ) - elif strip_components: - LOGGER.warning( - f"NOTE: No files {verb} extracted. Likely all files {verb} " - f'excluded by "--strip-components={strip_components}"' - ) - else: - LOGGER.warning(f"NOTE: No files {verb} extracted. Empty archive?") - return ExpandResult.Empty - if n_extracted == 1: - LOGGER.info(f"One file {verb} extracted") - return ExpandResult.Okay - LOGGER.info(f"{n_extracted} files {verb} extracted") - return ExpandResult.Okay - - -def _expand_tgz( - ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool -) -> int: - "Expand a tar.gz archive" - n_extracted = 0 - with tarfile.open(str(ar), "r:*") as tf: - for mem in tf.getmembers(): - n_extracted += _maybe_extract_member( - dest, - PurePath(mem.name), - pattern, - strip_components, - mem.isdir(), - lambda: cast("IO[bytes]", tf.extractfile(mem)), # noqa: B023 - mem.mode | 0o222, # make sure file is writable - test=test, - ) - return n_extracted - - -def _expand_zip( - ar: Path, dest: Path, pattern: "str | None", strip_components: int, test: bool -) -> int: - "Expand a .zip archive." - n_extracted = 0 - with zipfile.ZipFile(str(ar), "r") as zf: - for item in zf.infolist(): - n_extracted += _maybe_extract_member( - dest, - PurePath(item.filename), - pattern, - strip_components, - item.filename.endswith("/"), ## Equivalent to: item.is_dir(), - lambda: zf.open(item, "r"), # noqa: B023 - 0o777, - test=test, - ) - return n_extracted - - -def _maybe_extract_member( - out: Path, - relpath: PurePath, - pattern: "str | None", - strip: int, - is_dir: bool, - opener: "Callable[[], IO[bytes]]", - modebits: int, - test: bool, -) -> int: - """ - Try to extract an archive member according to the given arguments. - - :return: Zero if the file was excluded by filters, one otherwise. - """ - relpath = PurePath(relpath) - LOGGER.debug(" | {:-<65} |".format(str(relpath) + " ")) - if len(relpath.parts) <= strip: - # Not enough path components - LOGGER.debug(" (Excluded by --strip-components)") - return 0 - if not _test_pattern(relpath, PurePath(pattern) if pattern else None): - # Doesn't match our pattern - LOGGER.debug(" (excluded by pattern)") - return 0 - stripped = _pathjoin(relpath.parts[strip:]) - dest = Path(out) / stripped - LOGGER.debug(f"-> [{dest}]") - if test: - # We are running in test-only mode: Do not do anything - return 1 - if is_dir: - _mkdir(dest) - return 1 - with opener() as infile: - _mkdir(dest.parent) - with dest.open("wb") as outfile: - shutil.copyfileobj(infile, outfile) - os.chmod(str(dest), modebits) - return 1 - - -def main(argv=None): - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter - ) - parser.add_argument( - "--cache-dir", - type=Path, - default=default_cache_dir(), - help="Directory where download caches and metadata will be stored", - ) - parser.add_argument( - "--verbose", "-v", action="store_true", help="Whether to log at the DEBUG level" - ) - parser.add_argument( - "--quiet", "-q", action="store_true", help="Whether to log at the WARNING level" - ) - grp = parser.add_argument_group("List arguments") - grp.add_argument( - "--list", - action="store_true", - help="List available components, targets, editions, and " - "architectures. Download arguments will act as filters.", - ) - dl_grp = parser.add_argument_group( - "Download arguments", - description="Select what to download and extract. " - "Some arguments will be inferred " - "based on the host system.", - ) - dl_grp.add_argument( - "--target", - "-T", - default="auto", - help="The target platform for which to download. " - 'Use "--list" to list available targets.', - ) - dl_grp.add_argument( - "--arch", "-A", default="auto", help="The architecture for which to download" - ) - dl_grp.add_argument( - "--edition", - "-E", - default="enterprise", - help='The edition of the product to download (Default is "enterprise"). ' - 'Use "--list" to list available editions.', - ) - dl_grp.add_argument( - "--out", - "-o", - help="The directory in which to download components.", - type=Path, - ) - dl_grp.add_argument( - "--version", - "-V", - default="latest-build", - help='The product version to download. Use "latest-release" to download ' - "the newest available version (including release candidates). Use " - '"latest-stable" to download the newest version, excluding release ' - 'candidates. Use "rapid" to download the latest rapid release. ' - ' Use "latest-build" or "latest" to download the most recent build of ' - 'the named component. Use "--list" to list available versions.', - ) - dl_grp.add_argument( - "--component", - "-C", - default="archive", - help="The component to download. " 'Use "--list" to list available components.', - ) - dl_grp.add_argument( - "--only", - help="Restrict extraction to items that match the given globbing expression. " - 'The full archive member path is matched, so a pattern like "*.exe" ' - 'will only match "*.exe" at the top level of the archive. To match ' - 'recursively, use the "**" pattern to match any number of ' - "intermediate directories.", - ) - dl_grp.add_argument( - "--strip-path-components", - "-p", - dest="strip_components", - metavar="N", - default=0, - type=int, - help="Strip the given number of path components from archive members before " - "extracting into the destination. The relative path of the archive " - "member will be used to form the destination path. For example, a " - "member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. " - "Using --strip-components=1 will remove the first path component, extracting " - "such an item to [/mongod.exe]. If the path has fewer than N components, " - "that archive member will be ignored.", - ) - dl_grp.add_argument( - "--no-download", - action="store_true", - help="Do not download the file, only print its url.", - ) - dl_grp.add_argument( - "--test", - action="store_true", - help="Do not extract or place any files/directories. " - "Only print what will be extracted without placing any files.", - ) - dl_grp.add_argument( - "--empty-is-error", - action="store_true", - help="If all files are excluded by other filters, " - "treat that situation as an error and exit non-zero.", - ) - dl_grp.add_argument( - "--latest-build-branch", - help="Specify the name of the branch to " - 'download the with "--version=latest-build"', - metavar="BRANCH_NAME", - ) - dl_grp.add_argument("--retries", help="The number of times to retry", default=0) - args = parser.parse_args(argv) - cache = Cache.open_in(args.cache_dir) - cache.refresh_full_json() - - version = args.version - if version in PERF_VERSIONS: - version = PERF_VERSIONS[version] - target = args.target - if target == "auto": - target = infer_target(version) - arch = args.arch - if arch == "auto": - arch = infer_arch() - - if args.verbose: - LOGGER.setLevel(logging.DEBUG) - elif args.quiet: - LOGGER.setLevel(logging.WARNING) - - if args.list: - _print_list(cache.db, version, target, arch, args.edition, args.component) - return - - out = args.out or Path.cwd() - out = out.absolute() - - result = _dl_component( - cache, - out, - version=version, - target=target, - arch=arch, - edition=args.edition, - component=args.component, - pattern=args.only, - strip_components=args.strip_components, - test=args.test, - no_download=args.no_download, - latest_build_branch=args.latest_build_branch, - retries=int(args.retries), - ) - if result is ExpandResult.Empty and args.empty_is_error: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py new file mode 120000 index 0000000000..ce13a359c1 --- /dev/null +++ b/.evergreen/mongodl.py @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/mongodl.py \ No newline at end of file diff --git a/.evergreen/mongosh_dl.py b/.evergreen/mongosh_dl.py deleted file mode 100755 index 3e2a2e6968..0000000000 --- a/.evergreen/mongosh_dl.py +++ /dev/null @@ -1,211 +0,0 @@ -#!/usr/bin/env python3 -""" -Download and extract MongoSH. - -Use '--help' for more information. -""" - -import argparse -import json -import logging -import re -import subprocess -import sys -from pathlib import Path - -LOGGER = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") - -HERE = Path(__file__).absolute().parent -sys.path.insert(0, str(HERE)) -from mongodl import LOGGER as DL_LOGGER -from mongodl import ( - Cache, - DownloadRetrier, - ExpandResult, - _expand_archive, - default_cache_dir, - infer_arch, -) - - -def _get_latest_version(cache: Cache, retries: int) -> str: - dl_url = "https://downloads.mongodb.com/compass/mongosh.json" - retrier = DownloadRetrier(retries) - while True: - try: - cached = cache.download_file(dl_url).path - data = json.loads(cached.read_text()) - return data["versions"][0]["version"] - except Exception as e: - LOGGER.exception(e) - if not retrier.retry(): - raise - - -def _download( - cache: Cache, - out_dir: Path, - version: str, - target: str, - arch: str, - pattern: "str | None", - strip_components: int, - test: bool, - no_download: bool, - retries: int, -) -> int: - LOGGER.info(f"Download {version} mongosh for {target}-{arch}") - if version == "latest": - version = _get_latest_version(cache, retries) - if arch == "x86_64": - arch = "x64" - elif arch == "aarch64": - arch = "arm64" - if target == "linux": - suffix = ".tgz" - if sys.platform == "linux" and arch in ["x64", "arm64"]: - openssl = subprocess.check_output(["openssl", "version"]) - if "3." in openssl.decode("utf-8"): - suffix = "-openssl3.tgz" - elif re.match("1.1.1[e-w] ", openssl.decode("utf-8")): - suffix = "-openssl11.tgz" - else: - suffix = ".zip" - dl_url = f"https://downloads.mongodb.com/compass/mongosh-{version}-{target}-{arch}{suffix}" - # This must go to stdout to be consumed by the calling program. - print(dl_url) - LOGGER.info("Download url: %s", dl_url) - - if no_download: - return ExpandResult.Okay - - retrier = DownloadRetrier(retries) - while True: - try: - cached = cache.download_file(dl_url).path - return _expand_archive( - cached, out_dir, pattern, strip_components, test=test - ) - except Exception as e: - LOGGER.exception(e) - if not retrier.retry(): - raise - - -def main(argv=None): - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter - ) - parser.add_argument( - "--verbose", "-v", action="store_true", help="Whether to log at the DEBUG level" - ) - parser.add_argument( - "--quiet", "-q", action="store_true", help="Whether to log at the WARNING level" - ) - parser.add_argument( - "--cache-dir", - type=Path, - default=default_cache_dir(), - help="Directory where download caches and metadata will be stored", - ) - dl_grp = parser.add_argument_group( - "Download arguments", - description="Select what to download and extract. " - "Some arguments will be inferred " - "based on the host system.", - ) - dl_grp.add_argument( - "--target", - "-T", - default="auto", - help="The target platform for which to download. " - 'Use "--list" to list available targets.', - ) - dl_grp.add_argument( - "--arch", "-A", default="auto", help="The architecture for which to download" - ) - dl_grp.add_argument( - "--out", - "-o", - help="The directory in which to download components.", - type=Path, - ) - dl_grp.add_argument( - "--version", - "-V", - default="latest", - help='The product version to download. Use "latest" to download ' - "the newest available stable version.", - ) - dl_grp.add_argument( - "--only", - help="Restrict extraction to items that match the given globbing expression. " - 'The full archive member path is matched, so a pattern like "*.exe" ' - 'will only match "*.exe" at the top level of the archive. To match ' - 'recursively, use the "**" pattern to match any number of ' - "intermediate directories.", - ) - dl_grp.add_argument( - "--strip-path-components", - "-p", - dest="strip_components", - metavar="N", - default=0, - type=int, - help="Strip the given number of path components from archive members before " - "extracting into the destination. The relative path of the archive " - "member will be used to form the destination path. For example, a " - "member named [bin/mongod.exe] will be extracted to [/bin/mongod.exe]. " - "Using --strip-components=1 will remove the first path component, extracting " - "such an item to [/mongod.exe]. If the path has fewer than N components, " - "that archive member will be ignored.", - ) - dl_grp.add_argument( - "--no-download", - action="store_true", - help="Do not download the file, only print its url.", - ) - dl_grp.add_argument( - "--test", - action="store_true", - help="Do not extract or place any files/directories. " - "Only print what will be extracted without placing any files.", - ) - dl_grp.add_argument("--retries", help="The number of times to retry", default=0) - args = parser.parse_args(argv) - - target = args.target - if target == "auto": - target = sys.platform - arch = args.arch - if arch == "auto": - arch = infer_arch() - out = args.out or Path.cwd() - out = out.absolute() - if args.verbose: - LOGGER.setLevel(logging.DEBUG) - DL_LOGGER.setLevel(logging.DEBUG) - elif args.quiet: - LOGGER.setLevel(logging.WARNING) - DL_LOGGER.setLevel(logging.WARNING) - - cache = Cache.open_in(args.cache_dir) - result = _download( - cache, - out, - version=args.version, - target=target, - arch=arch, - pattern=args.only, - strip_components=args.strip_components, - test=args.test, - no_download=args.no_download, - retries=int(args.retries), - ) - if result is ExpandResult.Empty: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/.evergreen/mongosh_dl.py b/.evergreen/mongosh_dl.py new file mode 120000 index 0000000000..9a234910ec --- /dev/null +++ b/.evergreen/mongosh_dl.py @@ -0,0 +1 @@ +../.mod/drivers-evergreen-tools/.evergreen/mongosh_dl.py \ No newline at end of file diff --git a/gemfiles/standard.rb b/gemfiles/standard.rb index 3e51fe0c3f..5c45d27260 100644 --- a/gemfiles/standard.rb +++ b/gemfiles/standard.rb @@ -41,7 +41,9 @@ def standard_dependencies gem 'ruby-debug-ide' end end - gem 'ostruct' if RUBY_VERSION >= '3.4' + if RUBY_VERSION >= '3.4' + gem 'ostruct' + end end group :testing do From 77717f90cf74cc4049e92d1af7654352316ab96d Mon Sep 17 00:00:00 2001 From: Dmitry Rybakov Date: Thu, 15 Jan 2026 14:44:45 +0100 Subject: [PATCH 9/9] Fix rubocop complaints --- gemfiles/standard.rb | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/gemfiles/standard.rb b/gemfiles/standard.rb index 5c45d27260..3e51fe0c3f 100644 --- a/gemfiles/standard.rb +++ b/gemfiles/standard.rb @@ -41,9 +41,7 @@ def standard_dependencies gem 'ruby-debug-ide' end end - if RUBY_VERSION >= '3.4' - gem 'ostruct' - end + gem 'ostruct' if RUBY_VERSION >= '3.4' end group :testing do