From 2bbe04ef903edea7efd32018bb492dd27da11376 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 15:15:59 -0600 Subject: [PATCH 01/33] more efficient --- .github/actions/setup-go/action.yml | 3 +- .github/workflows/ci.yml | 361 ++++++++++++++++------------ go.mod | 12 +- go.sum | 24 +- harmony/harmonydb/harmonydb.go | 9 + harmony/harmonydb/itest_registry.go | 35 +++ harmony/harmonydb/testutil/setup.go | 177 ++++++++++++++ itests/alertnow_test.go | 3 +- itests/curio_test.go | 3 +- itests/dyncfg_test.go | 3 +- itests/harmonydb_test.go | 7 +- lib/paths/local_test.go | 3 +- lib/paths/remote_test.go | 3 +- tools/tools.go | 14 ++ 14 files changed, 483 insertions(+), 174 deletions(-) create mode 100644 harmony/harmonydb/itest_registry.go create mode 100644 harmony/harmonydb/testutil/setup.go create mode 100644 tools/tools.go diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml index b23c1bea5..150106dc6 100644 --- a/.github/actions/setup-go/action.yml +++ b/.github/actions/setup-go/action.yml @@ -13,4 +13,5 @@ runs: uses: actions/setup-go@v5 with: go-version: ${{ inputs.go-version }} - cache: false + cache: true + cache-dependency-path: go.sum diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3be08478f..ab7a91a1e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,24 +24,20 @@ jobs: go-version: ${{ env.GO_VERSION }} - name: Install actionlint - run: go install github.com/rhysd/actionlint/cmd/actionlint@latest + run: go install github.com/rhysd/actionlint/cmd/actionlint - name: Run actionlint run: actionlint -shellcheck= -pyflakes= - setup-params: - runs-on: [self-hosted, docker] - needs: [ci-lint] - steps: - - name: Fetch parameters - run: lotus fetch-params 8388608 - shell: bash - - build-mainnet: + # Build FFI (filecoin-ffi) - rarely changes, heavily cached + # Go modules are cached separately via setup-go action (based on go.sum) + build-ffi: runs-on: ubuntu-latest needs: [ci-lint] steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -51,47 +47,64 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} + - name: Generate FFI cache key + id: ffi-cache-key run: | - make deps - shell: bash + FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) + echo "key=ffi-${{ runner.os }}-${{ env.GO_VERSION }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT - - name: Build Go - run: make build - shell: bash - - build-calibnet: - runs-on: ubuntu-latest - needs: [ ci-lint ] - steps: - - uses: actions/checkout@v4 - - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Cache FFI build + id: cache-ffi + uses: actions/cache@v4 with: - go-version: ${{ env.GO_VERSION }} - - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Install FFI + path: | + extern/filecoin-ffi/.install-filcrypto + extern/filecoin-ffi/filcrypto.h + extern/filecoin-ffi/libfilcrypto.a + extern/filecoin-ffi/filcrypto.pc + build/.filecoin-install + build/.blst-install + extern/supraseal/.install-blst + extern/supraseal/deps/blst + key: ${{ steps.ffi-cache-key.outputs.key }} + + - name: Build FFI + if: steps.cache-ffi.outputs.cache-hit != 'true' env: GITHUB_TOKEN: ${{ github.token }} - run: | - make deps - shell: bash + run: make deps - - name: Build Go - run: make build + - name: Upload FFI artifacts + uses: actions/upload-artifact@v4 + with: + name: ffi-deps + path: | + extern/filecoin-ffi/.install-filcrypto + extern/filecoin-ffi/filcrypto.h + extern/filecoin-ffi/libfilcrypto.a + extern/filecoin-ffi/filcrypto.pc + build/.filecoin-install + build/.blst-install + extern/supraseal/.install-blst + extern/supraseal/deps/blst + retention-days: 1 + + setup-params: + runs-on: [self-hosted, docker] + needs: [ci-lint] + steps: + - name: Fetch parameters + run: lotus fetch-params 8388608 shell: bash + # Debug build - separate so tests can depend on it without waiting for other variants build-debug: runs-on: ubuntu-latest - needs: [ ci-lint ] + needs: [build-ffi] steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -101,22 +114,47 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - make deps - shell: bash + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . - - name: Build Go - run: make build - shell: bash + - name: Restore FFI marker files + run: touch build/.filecoin-install build/.blst-install || true + + - name: Build debug + run: make debug - build-2k: + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: binaries-debug + path: | + curio + sptool + retention-days: 1 + + # Other build variants - run in parallel, tests don't wait for these + build: runs-on: ubuntu-latest - needs: [ ci-lint ] + needs: [build-ffi] + strategy: + fail-fast: false + matrix: + variant: + - name: mainnet + target: build + - name: calibnet + target: calibnet + - name: 2k + target: 2k + - name: forest + target: forest-test steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -126,22 +164,35 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - make deps - shell: bash + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . - - name: Build Go - run: make build - shell: bash + - name: Restore FFI marker files + run: touch build/.filecoin-install build/.blst-install || true - build-forest: - runs-on: ubuntu-latest - needs: [ ci-lint ] + - name: Build ${{ matrix.variant.name }} + run: make ${{ matrix.variant.target }} + + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: binaries-${{ matrix.variant.name }} + path: | + curio + sptool + retention-days: 1 + + # Unit tests - no database needed + test-unit: + runs-on: [self-hosted, docker] + needs: [build-debug, setup-params] steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -151,38 +202,48 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . + + - name: Download debug binaries + uses: actions/download-artifact@v4 + with: + name: binaries-debug + path: . + + - name: Restore FFI marker files run: | - make deps - shell: bash + touch build/.filecoin-install build/.blst-install || true + chmod +x curio sptool || true - - name: Build Forest - run: make forest-test - shell: bash + - name: Run unit tests + run: go test -v --tags=debug -timeout 30m `go list ./... | grep -v curio/itests` - test: + # Integration tests - need YugabyteDB + test-itest: runs-on: [self-hosted, docker] - needs: [setup-params] + needs: [build-debug, setup-params] env: CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} strategy: - fail-fast: false # Continue running even if one test fails + fail-fast: false matrix: test-suite: - - name: test-itest-curio + - name: itest-curio target: "./itests/curio_test.go" - - name: test-all - target: "`go list ./... | grep -v curio/itests`" - - name: test-itest-harmonyDB + - name: itest-harmonyDB target: "./itests/harmonydb_test.go" - - name: test-itest-alertnow + - name: itest-alertnow target: "./itests/alertnow_test.go" - - name: test-itest-pdp-prove + - name: itest-pdp-prove target: "./itests/pdp_prove_test.go" steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -192,17 +253,26 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . + + - name: Download debug binaries + uses: actions/download-artifact@v4 + with: + name: binaries-debug + path: . + + - name: Restore FFI marker files run: | - make deps - shell: bash + touch build/.filecoin-install build/.blst-install || true + chmod +x curio sptool || true - - name: Start YugabyteDB container with dynamic ports + - name: Start YugabyteDB container id: start-yugabyte run: | - # Start YugabyteDB container with dynamic port mapping for PostgreSQL and YCQL docker run --rm --name ${{ env.CONTAINER_NAME }} -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false - name: Wait for YugabyteDB to start @@ -218,29 +288,28 @@ jobs: - name: Get YugabyteDB container IP id: get-yb-ip run: | - # Retrieve internal bridge IP of YugabyteDB container YB_IP=$(docker inspect $CONTAINER_NAME --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT - - name: Run tests + - name: Run integration tests env: - CURIO_HARMONYDB_HOSTS: ${{ steps.get-yb-ip.outputs.yb_ip }} # Use internal IP for DB host + CURIO_HARMONYDB_HOSTS: ${{ steps.get-yb-ip.outputs.yb_ip }} LOTUS_HARMONYDB_HOSTS: ${{ steps.get-yb-ip.outputs.yb_ip }} run: | - echo "Using YugabyteDB Container IP: ${{env.CURIO_HARMONYDB_HOSTS}}" - export CURIO_HARMONYDB_HOSTS=${{ env.CURIO_HARMONYDB_HOSTS }} - export LOTUS_HARMONYDB_HOSTS=${{ env.CURIO_HARMONYDB_HOSTS }} + echo "Using YugabyteDB Container IP: ${{ env.CURIO_HARMONYDB_HOSTS }}" go test -v --tags=debug -timeout 30m ${{ matrix.test-suite.target }} - name: Stop YugabyteDB container - if: always() # Ensure this runs even if the tests fail + if: always() run: docker stop ${{ env.CONTAINER_NAME }} lint: runs-on: ubuntu-latest - needs: [ci-lint] + needs: [build-ffi] steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -250,12 +319,14 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - make deps - shell: bash + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . + + - name: Restore FFI marker files + run: touch build/.filecoin-install build/.blst-install || true - name: Install golangci-lint run: | @@ -279,8 +350,7 @@ jobs: go-version: ${{ env.GO_VERSION }} - name: Check gofmt - run: | - go fmt ./... + run: go fmt ./... shell: bash - name: Git diff check @@ -294,25 +364,22 @@ jobs: build-supraseal-ubuntu24: runs-on: ubuntu-24.04 needs: [ci-lint] - env: GCC_VERSION: "12" - steps: - name: Checkout repository uses: actions/checkout@v4 with: submodules: recursive - + - name: Free up disk space run: | - # Remove unnecessary packages to free up space for CUDA installation sudo apt-get clean sudo rm -rf /usr/share/dotnet sudo rm -rf /opt/ghc sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" - + - name: Install system dependencies run: | sudo apt-get update @@ -333,13 +400,11 @@ jobs: python3 python3-pip python3-dev \ curl wget git \ xxd - + - name: Set up Python virtual environment run: | - # Python tools will be installed in venv by build.sh - # Just ensure python3-venv is available python3 -m venv --help > /dev/null || sudo apt-get install -y python3-venv - + - name: Set up GCC 12 as default run: | sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 100 @@ -348,7 +413,7 @@ jobs: sudo update-alternatives --set g++ /usr/bin/g++-12 gcc --version g++ --version - + - name: Cache CUDA installation id: cache-cuda uses: actions/cache@v4 @@ -357,23 +422,17 @@ jobs: /usr/local/cuda /usr/local/cuda-* key: cuda-toolkit-ubuntu-24.04-${{ runner.os }}-v1 - + - name: Install CUDA Toolkit from NVIDIA Repository if: steps.cache-cuda.outputs.cache-hit != 'true' run: | - # Install CUDA using official NVIDIA repository for Ubuntu 24.04 - # Source: https://developer.nvidia.com/cuda-downloads?target_os=Linux&target_arch=x86_64&Distribution=Ubuntu&target_version=24.04&target_type=deb_local - - # Download and install the CUDA keyring package wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/x86_64/cuda-keyring_1.1-1_all.deb sudo dpkg -i cuda-keyring_1.1-1_all.deb rm cuda-keyring_1.1-1_all.deb - # Update package list and install CUDA toolkit sudo apt-get update sudo apt-get -y install cuda-toolkit - # Verify installation and find CUDA location if [ -d "/usr/local/cuda" ]; then echo "CUDA installed at /usr/local/cuda" ls -la /usr/local/cuda*/bin/nvcc || true @@ -381,7 +440,7 @@ jobs: echo "ERROR: CUDA installation not found" exit 1 fi - + - name: Set up CUDA environment run: | # Verify CUDA installation exists @@ -395,18 +454,15 @@ jobs: export CUDA_HOME=/usr/local/cuda export LD_LIBRARY_PATH="/usr/local/cuda/lib64:${LD_LIBRARY_PATH}" - # Verify nvcc is available nvcc --version - # Set environment for subsequent steps echo "/usr/local/cuda/bin" >> $GITHUB_PATH echo "CUDA_HOME=/usr/local/cuda" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH" >> $GITHUB_ENV - + - name: Install libconfig++ run: | sudo apt-get install -y libconfig++-dev || { - # If not available in repos, build from source wget https://hyperrealm.github.io/libconfig/dist/libconfig-1.7.3.tar.gz tar -xzf libconfig-1.7.3.tar.gz cd libconfig-1.7.3 @@ -417,11 +473,10 @@ jobs: cd .. rm -rf libconfig-1.7.3* } - + - name: Install GMP library - run: | - sudo apt-get install -y libgmp-dev - + run: sudo apt-get install -y libgmp-dev + - name: Cache Python venv id: cache-venv uses: actions/cache@v4 @@ -430,7 +485,7 @@ jobs: key: supraseal-venv-ubuntu24-${{ hashFiles('extern/supraseal/build.sh') }} restore-keys: | supraseal-venv-ubuntu24- - + - name: Cache SPDK build id: cache-spdk uses: actions/cache@v4 @@ -439,24 +494,21 @@ jobs: key: spdk-v24.05-gcc12-ubuntu24-${{ hashFiles('extern/supraseal/build.sh') }} restore-keys: | spdk-v24.05-gcc12-ubuntu24- - + - name: Build Supraseal working-directory: extern/supraseal run: | - # Ensure we're using GCC 12 and CUDA export CC=gcc-12 export CXX=g++-12 export CUDA=/usr/local/cuda export PATH=/usr/local/cuda/bin:$PATH export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH - # Verify CUDA is accessible which nvcc nvcc --version - # Run the build script (creates and uses Python venv internally) ./build.sh - + - name: Verify binaries working-directory: extern/supraseal run: | @@ -477,7 +529,7 @@ jobs: echo "" echo "✅ All binaries built successfully!" - + - name: Upload build artifacts uses: actions/upload-artifact@v4 with: @@ -489,14 +541,14 @@ jobs: extern/supraseal/bin/tree_r_cpu extern/supraseal/bin/tree_d_cpu retention-days: 30 - + - name: Upload library artifact uses: actions/upload-artifact@v4 with: name: supraseal-library-ubuntu24-gcc12-cuda path: extern/supraseal/obj/libsupraseal.a retention-days: 30 - + - name: Build summary run: | echo "### 🎉 Supraseal Build Summary" >> $GITHUB_STEP_SUMMARY @@ -515,9 +567,11 @@ jobs: gen-check: runs-on: ubuntu-latest - needs: [ci-lint] + needs: [build-ffi] steps: - uses: actions/checkout@v4 + with: + submodules: recursive - name: Setup Go uses: ./.github/actions/setup-go @@ -527,27 +581,22 @@ jobs: - name: Install Dependencies uses: ./.github/actions/install-deps - - name: Install goimports - run: go install golang.org/x/tools/cmd/goimports - shell: bash - - - name: Install cbor-gen-for - run: go install github.com/hannahhoward/cbor-gen-for - shell: bash - - - name: Install swag cli - run: go install github.com/swaggo/swag/cmd/swag@v1.16.4 - shell: bash + - name: Download FFI artifacts + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . -# - name: Install gotext -# run: go install golang.org/x/text/cmd/gotext -# shell: bash + - name: Restore FFI marker files + run: touch build/.filecoin-install build/.blst-install || true - - name: Install FFI - env: - GITHUB_TOKEN: ${{ github.token }} + - name: Install Go tools run: | - make deps + # Tools are defined in tools/tools.go and versioned in go.mod + go install golang.org/x/tools/cmd/goimports & + go install github.com/hannahhoward/cbor-gen-for & + go install github.com/swaggo/swag/cmd/swag & + wait shell: bash - name: Generate Code diff --git a/go.mod b/go.mod index f065850a9..86464cc43 100644 --- a/go.mod +++ b/go.mod @@ -97,6 +97,7 @@ require ( github.com/prometheus/client_golang v1.23.2 github.com/puzpuzpuz/xsync/v2 v2.5.1 github.com/raulk/clock v1.1.0 + github.com/rhysd/actionlint v1.7.9 github.com/samber/lo v1.47.0 github.com/schollz/progressbar/v3 v3.18.0 github.com/sirupsen/logrus v1.9.3 @@ -115,8 +116,8 @@ require ( golang.org/x/crypto v0.43.0 golang.org/x/exp v0.0.0-20251009144603-d2f985daa21b golang.org/x/net v0.46.0 - golang.org/x/sync v0.17.0 - golang.org/x/sys v0.37.0 + golang.org/x/sync v0.18.0 + golang.org/x/sys v0.38.0 golang.org/x/term v0.36.0 golang.org/x/text v0.30.0 golang.org/x/tools v0.38.0 @@ -140,6 +141,7 @@ require ( github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bits-and-blooms/bitset v1.20.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect github.com/buger/jsonparser v1.1.1 // indirect github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -273,7 +275,8 @@ require ( github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/mattn/go-runewidth v0.0.17 // indirect + github.com/mattn/go-shellwords v1.0.12 // indirect github.com/mattn/go-sqlite3 v1.14.32 // indirect github.com/miekg/dns v1.1.68 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect @@ -323,6 +326,7 @@ require ( github.com/quic-go/webtransport-go v0.9.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect + github.com/robfig/cron/v3 v3.0.1 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect @@ -368,6 +372,7 @@ require ( go.uber.org/fx v1.24.0 // indirect go.uber.org/mock v0.6.0 // indirect go.yaml.in/yaml/v2 v2.4.3 // indirect + go.yaml.in/yaml/v4 v4.0.0-rc.3 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect golang.org/x/mod v0.29.0 // indirect golang.org/x/telemetry v0.0.0-20251009181524-91c411e14f39 // indirect @@ -382,6 +387,7 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect howett.net/plist v0.0.0-20181124034731-591f970eefbb // indirect lukechampine.com/blake3 v1.4.1 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect ) replace github.com/filecoin-project/filecoin-ffi => ./extern/filecoin-ffi diff --git a/go.sum b/go.sum index 0a9ac8d38..f4b6fefc1 100644 --- a/go.sum +++ b/go.sum @@ -116,6 +116,8 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932 h1:mXoPYz/Ul5HYE github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= github.com/bits-and-blooms/bitset v1.20.0 h1:2F+rfL86jE2d/bmw7OhqUg2Sj/1rURkBn3MdfoPyRVU= github.com/bits-and-blooms/bitset v1.20.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE= +github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= @@ -993,8 +995,10 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= -github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ= +github.com/mattn/go-runewidth v0.0.17/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs= github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= @@ -1241,10 +1245,14 @@ github.com/raulk/clock v1.1.0 h1:dpb29+UKMbLqiU/jqIJptgLR1nn23HLgMY0sTCDza5Y= github.com/raulk/clock v1.1.0/go.mod h1:3MpVxdZ/ODBQDxbN+kzshf5OSZwPjtMDx6BBXBmOeY0= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU= +github.com/rhysd/actionlint v1.7.9 h1:oq4uFwcW6pRTk8BhAS4+RhYoUddUkbvRMcqndja0CT0= +github.com/rhysd/actionlint v1.7.9/go.mod h1:H3q8YpD2es7K4c+mibw3OhTXGQQ7HkZX1u+DXaHLwfE= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= @@ -1535,6 +1543,8 @@ go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= +go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go= +go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg= go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc= @@ -1686,8 +1696,8 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180202135801-37707fdb30a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1767,8 +1777,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20251009181524-91c411e14f39 h1:jHQt1JBuPc+c/cAlupnkce8or0E04hX2Oqmnqq1XCVA= golang.org/x/telemetry v0.0.0-20251009181524-91c411e14f39/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -2013,5 +2023,7 @@ lukechampine.com/blake3 v1.4.1/go.mod h1:QFosUxmjB8mnrWFSNwKmvxHpfY72bmD2tQ0kBMM rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck= sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/harmony/harmonydb/harmonydb.go b/harmony/harmonydb/harmonydb.go index 3e3d0ff35..7ab000695 100644 --- a/harmony/harmonydb/harmonydb.go +++ b/harmony/harmonydb/harmonydb.go @@ -196,6 +196,15 @@ func New(hosts []string, username, password, database, port string, loadBalance return &db, db.upgrade() } +// Close releases the underlying connection pool. It is safe to call multiple times. +func (db *DB) Close() { + if db == nil || db.pgx == nil { + return + } + db.pgx.Close() + db.pgx = nil +} + type tracer struct { } diff --git a/harmony/harmonydb/itest_registry.go b/harmony/harmonydb/itest_registry.go new file mode 100644 index 000000000..a0b87f914 --- /dev/null +++ b/harmony/harmonydb/itest_registry.go @@ -0,0 +1,35 @@ +package harmonydb + +import "sync" + +var itestDatabaseRegistry sync.Map + +// RegisterITestDatabase allows test helpers to declare that a given ITestID +// should connect to the provided physical database instead of creating a fresh +// schema inside the default database. This is used by the integration-test +// template cloning logic. +func RegisterITestDatabase(id ITestID, database string) { + if id == "" || database == "" { + return + } + itestDatabaseRegistry.Store(string(id), database) +} + +func unregisterITestDatabase(id ITestID) { + if id == "" { + return + } + itestDatabaseRegistry.Delete(string(id)) +} + +func lookupITestDatabase(id ITestID) (string, bool) { + if id == "" { + return "", false + } + if v, ok := itestDatabaseRegistry.Load(string(id)); ok { + if s, ok2 := v.(string); ok2 { + return s, true + } + } + return "", false +} diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go new file mode 100644 index 000000000..5827a41db --- /dev/null +++ b/harmony/harmonydb/testutil/setup.go @@ -0,0 +1,177 @@ +package testutil + +import ( + "context" + "fmt" + "net/url" + "os" + "strings" + "sync" + "testing" + "time" + + "github.com/yugabyte/pgx/v5" + + "github.com/filecoin-project/curio/harmony/harmonydb" +) + +const ( + templateSchemaID harmonydb.ITestID = "template" + templateDBName = "curio_itest_template" + testDBPrefix = "curio_itest" +) + +var ( + templateOnce sync.Once + templateErr error + baseConnCfg connConfig +) + +type connConfig struct { + host string + port string + username string + password string + baseDB string +} + +// SetupTestDB prepares a reusable template database once, then rapidly clones it +// for every test invocation using PostgreSQL's template mechanism. It returns +// an ITestID that can be passed to harmonydb.NewFromConfigWithITestID. +func SetupTestDB(t *testing.T) harmonydb.ITestID { + t.Helper() + + templateOnce.Do(func() { + baseConnCfg = loadConnConfig() + templateErr = prepareTemplateDatabase() + }) + if templateErr != nil { + t.Fatalf("preparing template database: %v", templateErr) + } + + id := harmonydb.ITestNewID() + dbName := fmt.Sprintf("%s_%s", testDBPrefix, string(id)) + if err := cloneTemplateDatabase(id, dbName); err != nil { + t.Fatalf("cloning template database: %v", err) + } + + harmonydb.RegisterITestDatabase(id, dbName) + return id +} + +func loadConnConfig() connConfig { + return connConfig{ + host: firstNonEmpty(splitFirst(os.Getenv("CURIO_HARMONYDB_HOSTS")), os.Getenv("CURIO_DB_HOST"), "127.0.0.1"), + port: firstNonEmpty(os.Getenv("CURIO_HARMONYDB_PORT"), os.Getenv("CURIO_DB_PORT"), "5433"), + username: firstNonEmpty(os.Getenv("CURIO_HARMONYDB_USERNAME"), os.Getenv("CURIO_DB_USER"), "yugabyte"), + password: firstNonEmpty(os.Getenv("CURIO_HARMONYDB_PASSWORD"), os.Getenv("CURIO_DB_PASSWORD"), "yugabyte"), + baseDB: firstNonEmpty(os.Getenv("CURIO_HARMONYDB_NAME"), os.Getenv("CURIO_DB_NAME"), "yugabyte"), + } +} + +func prepareTemplateDatabase() error { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) + defer cancel() + + adminConn, err := pgx.Connect(ctx, baseConnCfg.connString(baseConnCfg.baseDB)) + if err != nil { + return fmt.Errorf("connecting to yugabyte admin database: %w", err) + } + defer adminConn.Close(ctx) + + if err := dropDatabaseIfExists(ctx, adminConn, templateDBName); err != nil { + return fmt.Errorf("dropping existing template database: %w", err) + } + + if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(templateDBName)+" WITH TEMPLATE template1"); err != nil { + return fmt.Errorf("creating template database: %w", err) + } + + db, err := harmonydb.New([]string{baseConnCfg.host}, baseConnCfg.username, baseConnCfg.password, templateDBName, baseConnCfg.port, false, templateSchemaID) + if err != nil { + return fmt.Errorf("initializing template schema: %w", err) + } + db.Close() + + return nil +} + +func cloneTemplateDatabase(id harmonydb.ITestID, targetDB string) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + adminConn, err := pgx.Connect(ctx, baseConnCfg.connString(baseConnCfg.baseDB)) + if err != nil { + return fmt.Errorf("connecting to yugabyte admin database: %w", err) + } + defer adminConn.Close(ctx) + + if err := dropDatabaseIfExists(ctx, adminConn, targetDB); err != nil { + return fmt.Errorf("dropping target database: %w", err) + } + + if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(targetDB)+" WITH TEMPLATE "+quoteIdentifier(templateDBName)); err != nil { + return fmt.Errorf("creating cloned database: %w", err) + } + + cloneConn, err := pgx.Connect(ctx, baseConnCfg.connString(targetDB)) + if err != nil { + return fmt.Errorf("connecting to cloned database: %w", err) + } + defer cloneConn.Close(ctx) + + oldSchema := fmt.Sprintf("itest_%s", templateSchemaID) + newSchema := fmt.Sprintf("itest_%s", id) + if _, err := cloneConn.Exec(ctx, "ALTER SCHEMA "+quoteIdentifier(oldSchema)+" RENAME TO "+quoteIdentifier(newSchema)); err != nil { + return fmt.Errorf("renaming cloned schema: %w", err) + } + + return nil +} + +func dropDatabaseIfExists(ctx context.Context, conn *pgx.Conn, name string) error { + _, _ = conn.Exec(ctx, `SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = $1`, name) + _, err := conn.Exec(ctx, "DROP DATABASE IF EXISTS "+quoteIdentifier(name)) + return err +} + +func (c connConfig) connString(database string) string { + u := url.URL{ + Scheme: "postgresql", + Host: fmt.Sprintf("%s:%s", c.host, c.port), + Path: "/" + database, + RawQuery: "sslmode=disable", + } + if c.password == "" { + u.User = url.User(c.username) + } else { + u.User = url.UserPassword(c.username, c.password) + } + return u.String() +} + +func firstNonEmpty(values ...string) string { + for _, v := range values { + if strings.TrimSpace(v) != "" { + return strings.TrimSpace(v) + } + } + return "" +} + +func splitFirst(hosts string) string { + if hosts == "" { + return "" + } + for _, part := range strings.Split(hosts, ",") { + part = strings.TrimSpace(part) + if part != "" { + return part + } + } + return "" +} + +func quoteIdentifier(name string) string { + return `"` + strings.ReplaceAll(name, `"`, `""`) + `"` +} diff --git a/itests/alertnow_test.go b/itests/alertnow_test.go index 0eef39730..cf1ac843d 100644 --- a/itests/alertnow_test.go +++ b/itests/alertnow_test.go @@ -10,6 +10,7 @@ import ( "github.com/filecoin-project/curio/alertmanager/plugin" "github.com/filecoin-project/curio/deps/config" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) func TestAlertNow(t *testing.T) { @@ -20,7 +21,7 @@ func TestAlertNow(t *testing.T) { tp, } // Create dependencies - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) diff --git a/itests/curio_test.go b/itests/curio_test.go index 10f8f47ec..db530aca3 100644 --- a/itests/curio_test.go +++ b/itests/curio_test.go @@ -31,6 +31,7 @@ import ( "github.com/filecoin-project/curio/deps" "github.com/filecoin-project/curio/deps/config" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" "github.com/filecoin-project/curio/lib/ffiselect" "github.com/filecoin-project/curio/lib/storiface" "github.com/filecoin-project/curio/lib/testutils" @@ -73,7 +74,7 @@ func TestCurioHappyPath(t *testing.T) { fapi := fmt.Sprintf("%s:%s", string(token), full.ListenAddr) - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) t.Logf("sharedITestID: %s", sharedITestID) db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) diff --git a/itests/dyncfg_test.go b/itests/dyncfg_test.go index 8b70249f1..6de070943 100644 --- a/itests/dyncfg_test.go +++ b/itests/dyncfg_test.go @@ -10,13 +10,14 @@ import ( "github.com/filecoin-project/curio/deps" "github.com/filecoin-project/curio/deps/config" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) func TestDynamicConfig(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) diff --git a/itests/harmonydb_test.go b/itests/harmonydb_test.go index b62b17883..214060c5f 100644 --- a/itests/harmonydb_test.go +++ b/itests/harmonydb_test.go @@ -11,13 +11,14 @@ import ( "golang.org/x/xerrors" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) func TestCrud(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) @@ -49,7 +50,7 @@ func TestTransaction(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - testID := harmonydb.ITestNewID() + testID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) require.NoError(t, err) _, err = cdb.Exec(ctx, "INSERT INTO itest_scratch (some_int) VALUES (4), (5), (6)") @@ -99,7 +100,7 @@ func TestPartialWalk(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - testID := harmonydb.ITestNewID() + testID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) require.NoError(t, err) _, err = cdb.Exec(ctx, ` diff --git a/lib/paths/local_test.go b/lib/paths/local_test.go index 24282c4df..41b053c5f 100644 --- a/lib/paths/local_test.go +++ b/lib/paths/local_test.go @@ -11,6 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" "github.com/filecoin-project/curio/lib/storiface" "github.com/filecoin-project/lotus/storage/sealer/fsutil" @@ -82,7 +83,7 @@ func TestLocalStorage(t *testing.T) { root: root, } - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) diff --git a/lib/paths/remote_test.go b/lib/paths/remote_test.go index 682ded8da..64f4b057b 100644 --- a/lib/paths/remote_test.go +++ b/lib/paths/remote_test.go @@ -22,6 +22,7 @@ import ( "github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" "github.com/filecoin-project/curio/lib/partialfile" "github.com/filecoin-project/curio/lib/paths" "github.com/filecoin-project/curio/lib/paths/mocks" @@ -59,7 +60,7 @@ func createTestStorage(t *testing.T, p string, seal bool, att ...*paths.Local) s func TestMoveShared(t *testing.T) { logging.SetAllLoggers(logging.LevelDebug) - sharedITestID := harmonydb.ITestNewID() + sharedITestID := testutil.SetupTestDB(t) db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) diff --git a/tools/tools.go b/tools/tools.go new file mode 100644 index 000000000..80a64c8b5 --- /dev/null +++ b/tools/tools.go @@ -0,0 +1,14 @@ +//go:build tools + +// Package tools tracks dev/CI tool dependencies. +// This file is not compiled into the binary but ensures tools are tracked in go.mod. +// Install all tools with: go install ./tools/... +// Or individually: go install golang.org/x/tools/cmd/goimports +package tools + +import ( + _ "github.com/hannahhoward/cbor-gen-for" + _ "github.com/rhysd/actionlint/cmd/actionlint" + _ "github.com/swaggo/swag/cmd/swag" + _ "golang.org/x/tools/cmd/goimports" +) From 77223b33d65473ff4b761fb59eee485a6d5da6d6 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 15:29:08 -0600 Subject: [PATCH 02/33] latest ffi --- extern/filecoin-ffi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extern/filecoin-ffi b/extern/filecoin-ffi index 586063e9c..a7d5488a8 160000 --- a/extern/filecoin-ffi +++ b/extern/filecoin-ffi @@ -1 +1 @@ -Subproject commit 586063e9cfa45147d554f176759520398715ba41 +Subproject commit a7d5488a8e5c0f732ba24ff8768ac5903e3d38b1 From 7d3de9281c17463f1ea2b87bcf6cf9ca70ce46b6 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 15:35:43 -0600 Subject: [PATCH 03/33] fiddling --- .github/actions/install-deps/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/install-deps/action.yml b/.github/actions/install-deps/action.yml index 0fa119037..bf7c8cbe0 100644 --- a/.github/actions/install-deps/action.yml +++ b/.github/actions/install-deps/action.yml @@ -11,7 +11,7 @@ runs: shell: bash - name: Fetch all tags - run: git fetch --all + run: git fetch --all --no-recurse-submodules shell: bash - name: Sync submodules From 893ad2bcb60dfb10856c8953c18acf7482af8d47 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 15:49:03 -0600 Subject: [PATCH 04/33] version lock --- .github/workflows/ci.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab7a91a1e..015070faa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: - '**' env: - GO_VERSION: 1.24 + GO_VERSION: 1.24.7 jobs: ci-lint: @@ -33,7 +33,6 @@ jobs: # Go modules are cached separately via setup-go action (based on go.sum) build-ffi: runs-on: ubuntu-latest - needs: [ci-lint] steps: - uses: actions/checkout@v4 with: @@ -87,12 +86,17 @@ jobs: build/.blst-install extern/supraseal/.install-blst extern/supraseal/deps/blst - retention-days: 1 + retention-days: 3 setup-params: runs-on: [self-hosted, docker] - needs: [ci-lint] steps: + - name: Cache proof parameters + uses: actions/cache@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-8388608 + - name: Fetch parameters run: lotus fetch-params 8388608 shell: bash From 9d9dc587a17a5fbb02f230bf13d3d9292122c408 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 15:59:49 -0600 Subject: [PATCH 05/33] mod stuff --- go.mod | 8 ++++---- go.sum | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/go.mod b/go.mod index 86464cc43..5d5d501ce 100644 --- a/go.mod +++ b/go.mod @@ -113,13 +113,13 @@ require ( go.opencensus.io v0.24.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - golang.org/x/crypto v0.43.0 + golang.org/x/crypto v0.45.0 golang.org/x/exp v0.0.0-20251009144603-d2f985daa21b - golang.org/x/net v0.46.0 + golang.org/x/net v0.47.0 golang.org/x/sync v0.18.0 golang.org/x/sys v0.38.0 - golang.org/x/term v0.36.0 - golang.org/x/text v0.30.0 + golang.org/x/term v0.37.0 + golang.org/x/text v0.31.0 golang.org/x/tools v0.38.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da ) diff --git a/go.sum b/go.sum index f4b6fefc1..895e05ead 100644 --- a/go.sum +++ b/go.sum @@ -1574,8 +1574,8 @@ golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1670,8 +1670,8 @@ golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1790,8 +1790,8 @@ golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1804,8 +1804,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From 89cfd55555a5baa6b36af17bd9827e0fe0ba68e2 Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 16:17:06 -0600 Subject: [PATCH 06/33] dedup setup --- .github/actions/setup-build-env/action.yml | 62 +++++++++++ .github/images/build-env/Dockerfile | 24 +++++ .github/workflows/build-env-image.yml | 48 +++++++++ .github/workflows/ci.yml | 115 +++++---------------- cmd/curio/ffi.go | 2 +- harmony/harmonydb/itest_registry.go | 19 ---- harmony/harmonydb/testutil/setup.go | 10 +- {lib/paths => itests}/local_test.go | 11 +- 8 files changed, 171 insertions(+), 120 deletions(-) create mode 100644 .github/actions/setup-build-env/action.yml create mode 100644 .github/images/build-env/Dockerfile create mode 100644 .github/workflows/build-env-image.yml rename {lib/paths => itests}/local_test.go (88%) diff --git a/.github/actions/setup-build-env/action.yml b/.github/actions/setup-build-env/action.yml new file mode 100644 index 000000000..56c0efdaf --- /dev/null +++ b/.github/actions/setup-build-env/action.yml @@ -0,0 +1,62 @@ +name: 'Setup Build Environment' +description: 'Setup Go, install deps, download FFI artifacts - everything needed to build' + +inputs: + go-version: + description: 'Go version to use' + required: true + download-ffi: + description: 'Whether to download FFI artifacts' + required: false + default: 'true' + +runs: + using: 'composite' + steps: + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: ${{ inputs.go-version }} + cache: true + cache-dependency-path: go.sum + + # Cache apt packages to avoid repeated downloads + - name: Cache apt packages + uses: actions/cache@v4 + with: + path: | + /var/cache/apt/archives + key: apt-${{ runner.os }}-${{ hashFiles('.github/actions/install-deps/action.yml') }} + restore-keys: | + apt-${{ runner.os }}- + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y curl ca-certificates gnupg ocl-icd-opencl-dev libhwloc-dev + shell: bash + + - name: Fetch all tags + run: git fetch --all --no-recurse-submodules + shell: bash + + - name: Sync submodules + run: git submodule sync + shell: bash + + - name: Update submodules + run: git submodule update --init + shell: bash + + - name: Download FFI artifacts + if: inputs.download-ffi == 'true' + uses: actions/download-artifact@v4 + with: + name: ffi-deps + path: . + + - name: Restore FFI marker files + if: inputs.download-ffi == 'true' + run: touch build/.filecoin-install build/.blst-install || true + shell: bash + diff --git a/.github/images/build-env/Dockerfile b/.github/images/build-env/Dockerfile new file mode 100644 index 000000000..1d1717287 --- /dev/null +++ b/.github/images/build-env/Dockerfile @@ -0,0 +1,24 @@ +# Build environment for Curio CI +# Contains: Go, system dependencies, and common tools +# Rebuild when: Go version changes, system deps change + +ARG GO_VERSION=1.24.7 + +FROM golang:${GO_VERSION}-bookworm + +# Install system dependencies (same as install-deps action) +RUN apt-get update && apt-get install -y \ + curl \ + ca-certificates \ + gnupg \ + ocl-icd-opencl-dev \ + libhwloc-dev \ + git \ + && rm -rf /var/lib/apt/lists/* + +# Pre-warm Go module cache directory +RUN mkdir -p /go/pkg/mod + +# Set up workspace +WORKDIR /workspace + diff --git a/.github/workflows/build-env-image.yml b/.github/workflows/build-env-image.yml new file mode 100644 index 000000000..dfb32e7ad --- /dev/null +++ b/.github/workflows/build-env-image.yml @@ -0,0 +1,48 @@ +name: Build CI Environment Image + +on: + push: + branches: [main] + paths: + - '.github/images/build-env/**' + - '.github/workflows/build-env-image.yml' + workflow_dispatch: # Allow manual trigger + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }}/build-env + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - uses: actions/checkout@v4 + + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=latest + type=sha,prefix= + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: .github/images/build-env + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 015070faa..2bf64d747 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,13 +38,11 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - - name: Install Dependencies - uses: ./.github/actions/install-deps + download-ffi: 'false' - name: Generate FFI cache key id: ffi-cache-key @@ -110,23 +108,11 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Download FFI artifacts - uses: actions/download-artifact@v4 - with: - name: ffi-deps - path: . - - - name: Restore FFI marker files - run: touch build/.filecoin-install build/.blst-install || true - - name: Build debug run: make debug @@ -160,23 +146,11 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Download FFI artifacts - uses: actions/download-artifact@v4 - with: - name: ffi-deps - path: . - - - name: Restore FFI marker files - run: touch build/.filecoin-install build/.blst-install || true - - name: Build ${{ matrix.variant.name }} run: make ${{ matrix.variant.target }} @@ -193,19 +167,18 @@ jobs: test-unit: runs-on: [self-hosted, docker] needs: [build-debug, setup-params] + container: + image: ghcr.io/${{ github.repository }}/build-env:latest + credentials: + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + volumes: + - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go - with: - go-version: ${{ env.GO_VERSION }} - - - name: Install Dependencies - uses: ./.github/actions/install-deps - - name: Download FFI artifacts uses: actions/download-artifact@v4 with: @@ -218,15 +191,14 @@ jobs: name: binaries-debug path: . - - name: Restore FFI marker files - run: | - touch build/.filecoin-install build/.blst-install || true - chmod +x curio sptool || true + - name: Make binaries executable + run: chmod +x curio sptool || true - name: Run unit tests run: go test -v --tags=debug -timeout 30m `go list ./... | grep -v curio/itests` # Integration tests - need YugabyteDB + # Note: Cannot use job container because we need Docker access to run YugabyteDB test-itest: runs-on: [self-hosted, docker] needs: [build-debug, setup-params] @@ -249,33 +221,21 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Download FFI artifacts - uses: actions/download-artifact@v4 - with: - name: ffi-deps - path: . - - name: Download debug binaries uses: actions/download-artifact@v4 with: name: binaries-debug path: . - - name: Restore FFI marker files - run: | - touch build/.filecoin-install build/.blst-install || true - chmod +x curio sptool || true + - name: Make binaries executable + run: chmod +x curio sptool || true - name: Start YugabyteDB container - id: start-yugabyte run: | docker run --rm --name ${{ env.CONTAINER_NAME }} -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false @@ -287,7 +247,6 @@ jobs: echo $status | grep Running && break; sleep 1; done - shell: bash - name: Get YugabyteDB container IP id: get-yb-ip @@ -315,23 +274,11 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Download FFI artifacts - uses: actions/download-artifact@v4 - with: - name: ffi-deps - path: . - - - name: Restore FFI marker files - run: touch build/.filecoin-install build/.blst-install || true - - name: Install golangci-lint run: | curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.4.0 @@ -577,23 +524,11 @@ jobs: with: submodules: recursive - - name: Setup Go - uses: ./.github/actions/setup-go + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Download FFI artifacts - uses: actions/download-artifact@v4 - with: - name: ffi-deps - path: . - - - name: Restore FFI marker files - run: touch build/.filecoin-install build/.blst-install || true - - name: Install Go tools run: | # Tools are defined in tools/tools.go and versioned in go.mod diff --git a/cmd/curio/ffi.go b/cmd/curio/ffi.go index 976fecf93..22fb2dcf3 100644 --- a/cmd/curio/ffi.go +++ b/cmd/curio/ffi.go @@ -1,13 +1,13 @@ package main import ( + "context" "fmt" "os" "github.com/ipfs/go-cid" "github.com/snadrus/must" "github.com/urfave/cli/v2" - "golang.org/x/net/context" "github.com/filecoin-project/go-jsonrpc" diff --git a/harmony/harmonydb/itest_registry.go b/harmony/harmonydb/itest_registry.go index a0b87f914..f7879036f 100644 --- a/harmony/harmonydb/itest_registry.go +++ b/harmony/harmonydb/itest_registry.go @@ -14,22 +14,3 @@ func RegisterITestDatabase(id ITestID, database string) { } itestDatabaseRegistry.Store(string(id), database) } - -func unregisterITestDatabase(id ITestID) { - if id == "" { - return - } - itestDatabaseRegistry.Delete(string(id)) -} - -func lookupITestDatabase(id ITestID) (string, bool) { - if id == "" { - return "", false - } - if v, ok := itestDatabaseRegistry.Load(string(id)); ok { - if s, ok2 := v.(string); ok2 { - return s, true - } - } - return "", false -} diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index 5827a41db..916fe931e 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -17,8 +17,8 @@ import ( const ( templateSchemaID harmonydb.ITestID = "template" - templateDBName = "curio_itest_template" - testDBPrefix = "curio_itest" + templateDBName string = "curio_itest_template" + testDBPrefix string = "curio_itest" ) var ( @@ -77,7 +77,7 @@ func prepareTemplateDatabase() error { if err != nil { return fmt.Errorf("connecting to yugabyte admin database: %w", err) } - defer adminConn.Close(ctx) + defer func() { _ = adminConn.Close(ctx) }() if err := dropDatabaseIfExists(ctx, adminConn, templateDBName); err != nil { return fmt.Errorf("dropping existing template database: %w", err) @@ -104,7 +104,7 @@ func cloneTemplateDatabase(id harmonydb.ITestID, targetDB string) error { if err != nil { return fmt.Errorf("connecting to yugabyte admin database: %w", err) } - defer adminConn.Close(ctx) + defer func() { _ = adminConn.Close(ctx) }() if err := dropDatabaseIfExists(ctx, adminConn, targetDB); err != nil { return fmt.Errorf("dropping target database: %w", err) @@ -118,7 +118,7 @@ func cloneTemplateDatabase(id harmonydb.ITestID, targetDB string) error { if err != nil { return fmt.Errorf("connecting to cloned database: %w", err) } - defer cloneConn.Close(ctx) + defer func() { _ = cloneConn.Close(ctx) }() oldSchema := fmt.Sprintf("itest_%s", templateSchemaID) newSchema := fmt.Sprintf("itest_%s", id) diff --git a/lib/paths/local_test.go b/itests/local_test.go similarity index 88% rename from lib/paths/local_test.go rename to itests/local_test.go index 41b053c5f..412da659d 100644 --- a/lib/paths/local_test.go +++ b/itests/local_test.go @@ -1,4 +1,4 @@ -package paths +package itests import ( "context" @@ -12,6 +12,7 @@ import ( "github.com/filecoin-project/curio/harmony/harmonydb" "github.com/filecoin-project/curio/harmony/harmonydb/testutil" + "github.com/filecoin-project/curio/lib/paths" "github.com/filecoin-project/curio/lib/storiface" "github.com/filecoin-project/lotus/storage/sealer/fsutil" @@ -51,7 +52,7 @@ func (t *TestingLocalStorage) init(subpath string) error { return err } - metaFile := filepath.Join(path, MetaFile) + metaFile := filepath.Join(path, paths.MetaFile) meta := &storiface.LocalStorageMeta{ ID: storiface.ID(uuid.New().String()), @@ -72,7 +73,7 @@ func (t *TestingLocalStorage) init(subpath string) error { return nil } -var _ LocalStorage = &TestingLocalStorage{} +var _ paths.LocalStorage = &TestingLocalStorage{} func TestLocalStorage(t *testing.T) { ctx := context.TODO() @@ -88,9 +89,9 @@ func TestLocalStorage(t *testing.T) { db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) require.NoError(t, err) - index := NewDBIndex(nil, db) + index := paths.NewDBIndex(nil, db) - st, err := NewLocal(ctx, tstor, index, "") + st, err := paths.NewLocal(ctx, tstor, index, "") require.NoError(t, err) p1 := "1" From e4125cb4134c0365b5cb818556afae07f6328c3d Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 16:41:49 -0600 Subject: [PATCH 07/33] giving this a try --- .github/images/build-env/Dockerfile | 23 ++++++++++++++--- .github/workflows/build-env-image.yml | 5 +++- .github/workflows/ci.yml | 37 +++++++++++---------------- 3 files changed, 39 insertions(+), 26 deletions(-) diff --git a/.github/images/build-env/Dockerfile b/.github/images/build-env/Dockerfile index 1d1717287..cb7096bed 100644 --- a/.github/images/build-env/Dockerfile +++ b/.github/images/build-env/Dockerfile @@ -1,6 +1,6 @@ # Build environment for Curio CI # Contains: Go, system dependencies, and common tools -# Rebuild when: Go version changes, system deps change +# Rebuild when: Go version changes, system deps change, go.mod/go.sum change ARG GO_VERSION=1.24.7 @@ -16,9 +16,26 @@ RUN apt-get update && apt-get install -y \ git \ && rm -rf /var/lib/apt/lists/* -# Pre-warm Go module cache directory -RUN mkdir -p /go/pkg/mod +# Install Docker CLI (for test-itest to communicate with host Docker daemon) +RUN install -m 0755 -d /etc/apt/keyrings && \ + curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc && \ + chmod a+r /etc/apt/keyrings/docker.asc && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" > /etc/apt/sources.list.d/docker.list && \ + apt-get update && \ + apt-get install -y docker-ce-cli && \ + rm -rf /var/lib/apt/lists/* # Set up workspace WORKDIR /workspace +# Copy go.mod and go.sum to pre-download modules +COPY go.mod go.sum ./ + +# Pre-download Go modules (will be cached in /go/pkg/mod) +RUN go mod download + +# Install common dev/CI tools (from tools/tools.go) +RUN go install golang.org/x/tools/cmd/goimports@latest && \ + go install github.com/hannahhoward/cbor-gen-for@latest && \ + go install github.com/swaggo/swag/cmd/swag@latest + diff --git a/.github/workflows/build-env-image.yml b/.github/workflows/build-env-image.yml index dfb32e7ad..50cfaefc5 100644 --- a/.github/workflows/build-env-image.yml +++ b/.github/workflows/build-env-image.yml @@ -6,6 +6,8 @@ on: paths: - '.github/images/build-env/**' - '.github/workflows/build-env-image.yml' + - 'go.mod' + - 'go.sum' workflow_dispatch: # Allow manual trigger env: @@ -41,7 +43,8 @@ jobs: - name: Build and push uses: docker/build-push-action@v5 with: - context: .github/images/build-env + context: . + file: .github/images/build-env/Dockerfile push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2bf64d747..139fa55d0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -164,9 +164,10 @@ jobs: retention-days: 1 # Unit tests - no database needed + # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] - needs: [build-debug, setup-params] + needs: [build-ffi, setup-params] container: image: ghcr.io/${{ github.repository }}/build-env:latest credentials: @@ -185,23 +186,23 @@ jobs: name: ffi-deps path: . - - name: Download debug binaries - uses: actions/download-artifact@v4 - with: - name: binaries-debug - path: . - - - name: Make binaries executable - run: chmod +x curio sptool || true - - name: Run unit tests run: go test -v --tags=debug -timeout 30m `go list ./... | grep -v curio/itests` # Integration tests - need YugabyteDB - # Note: Cannot use job container because we need Docker access to run YugabyteDB + # Uses container with Docker socket mounted to access host Docker daemon + # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-itest: runs-on: [self-hosted, docker] - needs: [build-debug, setup-params] + needs: [build-ffi, setup-params] + container: + image: ghcr.io/${{ github.repository }}/build-env:latest + credentials: + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + volumes: + - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters + - /var/run/docker.sock:/var/run/docker.sock env: CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} strategy: @@ -221,20 +222,12 @@ jobs: with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - - - name: Download debug binaries + - name: Download FFI artifacts uses: actions/download-artifact@v4 with: - name: binaries-debug + name: ffi-deps path: . - - name: Make binaries executable - run: chmod +x curio sptool || true - - name: Start YugabyteDB container run: | docker run --rm --name ${{ env.CONTAINER_NAME }} -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false From 3a599e10a78daf4093bc6f11b9fad66dc5f128de Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 16:46:59 -0600 Subject: [PATCH 08/33] cache params --- .github/workflows/ci.yml | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 139fa55d0..0a0e6ce39 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,12 +90,14 @@ jobs: runs-on: [self-hosted, docker] steps: - name: Cache proof parameters + id: cache-params uses: actions/cache@v4 with: path: /var/tmp/filecoin-proof-parameters key: proof-params-8388608 - name: Fetch parameters + if: steps.cache-params.outputs.cache-hit != 'true' run: lotus fetch-params 8388608 shell: bash @@ -544,21 +546,3 @@ jobs: - name: Git diff quiet run: git --no-pager diff --quiet shell: bash - - mod-tidy-check: - runs-on: ubuntu-latest - needs: [ci-lint] - steps: - - uses: actions/checkout@v4 - - - name: Setup Go - uses: ./.github/actions/setup-go - with: - go-version: ${{ env.GO_VERSION }} - - - name: Install Dependencies - uses: ./.github/actions/install-deps - - - name: Run mod tidy check - run: go mod tidy -v - shell: bash From fa8b9beb50b1044c4a92e131ee8cb3bec2d862fc Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 16:52:16 -0600 Subject: [PATCH 09/33] docker build --- .github/workflows/ci.yml | 42 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a0e6ce39..d8e9072aa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -101,6 +101,44 @@ jobs: run: lotus fetch-params 8388608 shell: bash + # Ensure build-env container image exists, build if missing + ensure-build-env: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + env: + IMAGE: ghcr.io/${{ github.repository }}/build-env:latest + steps: + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Check if image exists + id: check-image + run: | + if docker manifest inspect ${{ env.IMAGE }} > /dev/null 2>&1; then + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "exists=false" >> $GITHUB_OUTPUT + fi + + - name: Checkout repository + if: steps.check-image.outputs.exists == 'false' + uses: actions/checkout@v4 + + - name: Build and push image + if: steps.check-image.outputs.exists == 'false' + uses: docker/build-push-action@v5 + with: + context: . + file: .github/images/build-env/Dockerfile + push: true + tags: ${{ env.IMAGE }} + # Debug build - separate so tests can depend on it without waiting for other variants build-debug: runs-on: ubuntu-latest @@ -169,7 +207,7 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] - needs: [build-ffi, setup-params] + needs: [build-ffi, setup-params, ensure-build-env] container: image: ghcr.io/${{ github.repository }}/build-env:latest credentials: @@ -196,7 +234,7 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-itest: runs-on: [self-hosted, docker] - needs: [build-ffi, setup-params] + needs: [build-ffi, setup-params, ensure-build-env] container: image: ghcr.io/${{ github.repository }}/build-env:latest credentials: From aba2eaa66067ccacf34be64042eea1296e483c2b Mon Sep 17 00:00:00 2001 From: "Andrew Jackson (Ajax)" Date: Tue, 2 Dec 2025 17:01:57 -0600 Subject: [PATCH 10/33] reuse without containers --- .github/actions/setup-build-env/action.yml | 46 +++++-- .github/workflows/ci.yml | 135 ++------------------- 2 files changed, 42 insertions(+), 139 deletions(-) diff --git a/.github/actions/setup-build-env/action.yml b/.github/actions/setup-build-env/action.yml index 56c0efdaf..1c278107b 100644 --- a/.github/actions/setup-build-env/action.yml +++ b/.github/actions/setup-build-env/action.yml @@ -1,14 +1,10 @@ name: 'Setup Build Environment' -description: 'Setup Go, install deps, download FFI artifacts - everything needed to build' +description: 'Setup Go, install deps, cache/build FFI - everything needed to build' inputs: go-version: description: 'Go version to use' required: true - download-ffi: - description: 'Whether to download FFI artifacts' - required: false - default: 'true' runs: using: 'composite' @@ -48,15 +44,39 @@ runs: run: git submodule update --init shell: bash - - name: Download FFI artifacts - if: inputs.download-ffi == 'true' - uses: actions/download-artifact@v4 + # Cache FFI build based on submodule commit + - name: Generate FFI cache key + id: ffi-cache-key + run: | + FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) + echo "key=ffi-${{ runner.os }}-${{ inputs.go-version }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT + shell: bash + + - name: Cache FFI build + id: cache-ffi + uses: actions/cache@v4 with: - name: ffi-deps - path: . + path: | + extern/filecoin-ffi/.install-filcrypto + extern/filecoin-ffi/filcrypto.h + extern/filecoin-ffi/libfilcrypto.a + extern/filecoin-ffi/filcrypto.pc + build/.filecoin-install + build/.blst-install + extern/supraseal/.install-blst + extern/supraseal/deps/blst + key: ${{ steps.ffi-cache-key.outputs.key }} - - name: Restore FFI marker files - if: inputs.download-ffi == 'true' - run: touch build/.filecoin-install build/.blst-install || true + - name: Build FFI + if: steps.cache-ffi.outputs.cache-hit != 'true' + run: make deps shell: bash + env: + GITHUB_TOKEN: ${{ github.token }} + - name: Restore FFI marker files + if: steps.cache-ffi.outputs.cache-hit == 'true' + run: | + mkdir -p build + touch build/.filecoin-install build/.blst-install || true + shell: bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d8e9072aa..1ef3d675a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,63 +29,6 @@ jobs: - name: Run actionlint run: actionlint -shellcheck= -pyflakes= - # Build FFI (filecoin-ffi) - rarely changes, heavily cached - # Go modules are cached separately via setup-go action (based on go.sum) - build-ffi: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - download-ffi: 'false' - - - name: Generate FFI cache key - id: ffi-cache-key - run: | - FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) - echo "key=ffi-${{ runner.os }}-${{ env.GO_VERSION }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT - - - name: Cache FFI build - id: cache-ffi - uses: actions/cache@v4 - with: - path: | - extern/filecoin-ffi/.install-filcrypto - extern/filecoin-ffi/filcrypto.h - extern/filecoin-ffi/libfilcrypto.a - extern/filecoin-ffi/filcrypto.pc - build/.filecoin-install - build/.blst-install - extern/supraseal/.install-blst - extern/supraseal/deps/blst - key: ${{ steps.ffi-cache-key.outputs.key }} - - - name: Build FFI - if: steps.cache-ffi.outputs.cache-hit != 'true' - env: - GITHUB_TOKEN: ${{ github.token }} - run: make deps - - - name: Upload FFI artifacts - uses: actions/upload-artifact@v4 - with: - name: ffi-deps - path: | - extern/filecoin-ffi/.install-filcrypto - extern/filecoin-ffi/filcrypto.h - extern/filecoin-ffi/libfilcrypto.a - extern/filecoin-ffi/filcrypto.pc - build/.filecoin-install - build/.blst-install - extern/supraseal/.install-blst - extern/supraseal/deps/blst - retention-days: 3 - setup-params: runs-on: [self-hosted, docker] steps: @@ -101,48 +44,9 @@ jobs: run: lotus fetch-params 8388608 shell: bash - # Ensure build-env container image exists, build if missing - ensure-build-env: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - env: - IMAGE: ghcr.io/${{ github.repository }}/build-env:latest - steps: - - name: Log in to Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Check if image exists - id: check-image - run: | - if docker manifest inspect ${{ env.IMAGE }} > /dev/null 2>&1; then - echo "exists=true" >> $GITHUB_OUTPUT - else - echo "exists=false" >> $GITHUB_OUTPUT - fi - - - name: Checkout repository - if: steps.check-image.outputs.exists == 'false' - uses: actions/checkout@v4 - - - name: Build and push image - if: steps.check-image.outputs.exists == 'false' - uses: docker/build-push-action@v5 - with: - context: . - file: .github/images/build-env/Dockerfile - push: true - tags: ${{ env.IMAGE }} - # Debug build - separate so tests can depend on it without waiting for other variants build-debug: runs-on: ubuntu-latest - needs: [build-ffi] steps: - uses: actions/checkout@v4 with: @@ -168,7 +72,6 @@ jobs: # Other build variants - run in parallel, tests don't wait for these build: runs-on: ubuntu-latest - needs: [build-ffi] strategy: fail-fast: false matrix: @@ -207,42 +110,25 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] - needs: [build-ffi, setup-params, ensure-build-env] - container: - image: ghcr.io/${{ github.repository }}/build-env:latest - credentials: - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - volumes: - - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters + needs: [setup-params] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Download FFI artifacts - uses: actions/download-artifact@v4 + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: - name: ffi-deps - path: . + go-version: ${{ env.GO_VERSION }} - name: Run unit tests - run: go test -v --tags=debug -timeout 30m `go list ./... | grep -v curio/itests` + run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) # Integration tests - need YugabyteDB - # Uses container with Docker socket mounted to access host Docker daemon # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-itest: runs-on: [self-hosted, docker] - needs: [build-ffi, setup-params, ensure-build-env] - container: - image: ghcr.io/${{ github.repository }}/build-env:latest - credentials: - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - volumes: - - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters - - /var/run/docker.sock:/var/run/docker.sock + needs: [setup-params] env: CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} strategy: @@ -262,11 +148,10 @@ jobs: with: submodules: recursive - - name: Download FFI artifacts - uses: actions/download-artifact@v4 + - name: Setup build environment + uses: ./.github/actions/setup-build-env with: - name: ffi-deps - path: . + go-version: ${{ env.GO_VERSION }} - name: Start YugabyteDB container run: | @@ -301,7 +186,6 @@ jobs: lint: runs-on: ubuntu-latest - needs: [build-ffi] steps: - uses: actions/checkout@v4 with: @@ -551,7 +435,6 @@ jobs: gen-check: runs-on: ubuntu-latest - needs: [build-ffi] steps: - uses: actions/checkout@v4 with: From d67a235ed3b90e8e2fddd58828121331025b8ca2 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 17:03:04 -0600 Subject: [PATCH 11/33] yugabyte cant clone --- extern/filecoin-ffi | 2 +- go.mod | 2 +- harmony/harmonydb/harmonydb.go | 15 ++-- harmony/harmonydb/testutil/setup.go | 131 +++++++++++++++++++--------- itests/alertnow_test.go | 2 +- 5 files changed, 103 insertions(+), 49 deletions(-) diff --git a/extern/filecoin-ffi b/extern/filecoin-ffi index a7d5488a8..586063e9c 160000 --- a/extern/filecoin-ffi +++ b/extern/filecoin-ffi @@ -1 +1 @@ -Subproject commit a7d5488a8e5c0f732ba24ff8768ac5903e3d38b1 +Subproject commit 586063e9cfa45147d554f176759520398715ba41 diff --git a/go.mod b/go.mod index 5d5d501ce..523bd8d29 100644 --- a/go.mod +++ b/go.mod @@ -115,7 +115,6 @@ require ( go.uber.org/zap v1.27.0 golang.org/x/crypto v0.45.0 golang.org/x/exp v0.0.0-20251009144603-d2f985daa21b - golang.org/x/net v0.47.0 golang.org/x/sync v0.18.0 golang.org/x/sys v0.38.0 golang.org/x/term v0.37.0 @@ -375,6 +374,7 @@ require ( go.yaml.in/yaml/v4 v4.0.0-rc.3 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect golang.org/x/mod v0.29.0 // indirect + golang.org/x/net v0.47.0 // indirect golang.org/x/telemetry v0.0.0-20251009181524-91c411e14f39 // indirect golang.org/x/time v0.14.0 // indirect gonum.org/v1/gonum v0.16.0 // indirect diff --git a/harmony/harmonydb/harmonydb.go b/harmony/harmonydb/harmonydb.go index 7ab000695..16cadf756 100644 --- a/harmony/harmonydb/harmonydb.go +++ b/harmony/harmonydb/harmonydb.go @@ -86,13 +86,18 @@ func envElse(env, els string) string { } func NewFromConfigWithITestID(t *testing.T, id ITestID) (*DB, error) { - fmt.Printf("CURIO_HARMONYDB_HOSTS: %s\n", os.Getenv("CURIO_HARMONYDB_HOSTS")) + // Look up the database name from the registry, or fall back to default + database := "yugabyte" + if v, ok := itestDatabaseRegistry.Load(string(id)); ok { + database = v.(string) + } + db, err := New( []string{envElse("CURIO_HARMONYDB_HOSTS", "127.0.0.1")}, - "yugabyte", - "yugabyte", - "yugabyte", - "5433", + envElse("CURIO_HARMONYDB_USERNAME", "yugabyte"), + envElse("CURIO_HARMONYDB_PASSWORD", "yugabyte"), + database, + envElse("CURIO_HARMONYDB_PORT", "5433"), false, id, ) diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index 916fe931e..38c5d6ec1 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -17,8 +17,7 @@ import ( const ( templateSchemaID harmonydb.ITestID = "template" - templateDBName string = "curio_itest_template" - testDBPrefix string = "curio_itest" + testDBName string = "curio_itest" ) var ( @@ -35,27 +34,28 @@ type connConfig struct { baseDB string } -// SetupTestDB prepares a reusable template database once, then rapidly clones it -// for every test invocation using PostgreSQL's template mechanism. It returns -// an ITestID that can be passed to harmonydb.NewFromConfigWithITestID. +// SetupTestDB prepares a reusable template schema once, then rapidly clones it +// for every test invocation using CREATE TABLE ... (LIKE ... INCLUDING ALL). +// YugabyteDB doesn't support custom database templates, so we use schema-based +// isolation within a single shared test database. +// It returns an ITestID that can be passed to harmonydb.NewFromConfigWithITestID. func SetupTestDB(t *testing.T) harmonydb.ITestID { t.Helper() templateOnce.Do(func() { baseConnCfg = loadConnConfig() - templateErr = prepareTemplateDatabase() + templateErr = prepareTemplateSchema() }) if templateErr != nil { - t.Fatalf("preparing template database: %v", templateErr) + t.Fatalf("preparing template schema: %v", templateErr) } id := harmonydb.ITestNewID() - dbName := fmt.Sprintf("%s_%s", testDBPrefix, string(id)) - if err := cloneTemplateDatabase(id, dbName); err != nil { - t.Fatalf("cloning template database: %v", err) + if err := cloneTemplateSchema(id); err != nil { + t.Fatalf("cloning template schema: %v", err) } - harmonydb.RegisterITestDatabase(id, dbName) + harmonydb.RegisterITestDatabase(id, testDBName) return id } @@ -69,25 +69,45 @@ func loadConnConfig() connConfig { } } -func prepareTemplateDatabase() error { +// prepareTemplateSchema creates the shared test database (if needed) and +// applies all migrations to a template schema that will be cloned for each test. +func prepareTemplateSchema() error { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() + // Create the shared test database if it doesn't exist adminConn, err := pgx.Connect(ctx, baseConnCfg.connString(baseConnCfg.baseDB)) if err != nil { - return fmt.Errorf("connecting to yugabyte admin database: %w", err) + return fmt.Errorf("connecting to admin database: %w", err) } - defer func() { _ = adminConn.Close(ctx) }() - if err := dropDatabaseIfExists(ctx, adminConn, templateDBName); err != nil { - return fmt.Errorf("dropping existing template database: %w", err) + // Check if database exists + var exists bool + err = adminConn.QueryRow(ctx, "SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = $1)", testDBName).Scan(&exists) + if err != nil { + _ = adminConn.Close(ctx) + return fmt.Errorf("checking if test database exists: %w", err) + } + + if !exists { + if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(testDBName)); err != nil { + _ = adminConn.Close(ctx) + return fmt.Errorf("creating test database: %w", err) + } } + _ = adminConn.Close(ctx) - if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(templateDBName)+" WITH TEMPLATE template1"); err != nil { - return fmt.Errorf("creating template database: %w", err) + // Connect to the test database and drop old template schema if it exists + testConn, err := pgx.Connect(ctx, baseConnCfg.connString(testDBName)) + if err != nil { + return fmt.Errorf("connecting to test database: %w", err) } + templateSchema := fmt.Sprintf("itest_%s", templateSchemaID) + _, _ = testConn.Exec(ctx, "DROP SCHEMA IF EXISTS "+quoteIdentifier(templateSchema)+" CASCADE") + _ = testConn.Close(ctx) - db, err := harmonydb.New([]string{baseConnCfg.host}, baseConnCfg.username, baseConnCfg.password, templateDBName, baseConnCfg.port, false, templateSchemaID) + // Use harmonydb.New to create the template schema and apply all migrations + db, err := harmonydb.New([]string{baseConnCfg.host}, baseConnCfg.username, baseConnCfg.password, testDBName, baseConnCfg.port, false, templateSchemaID) if err != nil { return fmt.Errorf("initializing template schema: %w", err) } @@ -96,43 +116,72 @@ func prepareTemplateDatabase() error { return nil } -func cloneTemplateDatabase(id harmonydb.ITestID, targetDB string) error { +// cloneTemplateSchema creates a new schema for the test by copying all table +// structures from the template schema using CREATE TABLE ... (LIKE ... INCLUDING ALL). +func cloneTemplateSchema(id harmonydb.ITestID) error { ctx, cancel := context.WithTimeout(context.Background(), time.Minute) defer cancel() - adminConn, err := pgx.Connect(ctx, baseConnCfg.connString(baseConnCfg.baseDB)) + conn, err := pgx.Connect(ctx, baseConnCfg.connString(testDBName)) if err != nil { - return fmt.Errorf("connecting to yugabyte admin database: %w", err) + return fmt.Errorf("connecting to test database: %w", err) } - defer func() { _ = adminConn.Close(ctx) }() + defer func() { _ = conn.Close(ctx) }() - if err := dropDatabaseIfExists(ctx, adminConn, targetDB); err != nil { - return fmt.Errorf("dropping target database: %w", err) - } + templateSchema := fmt.Sprintf("itest_%s", templateSchemaID) + newSchema := fmt.Sprintf("itest_%s", id) - if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(targetDB)+" WITH TEMPLATE "+quoteIdentifier(templateDBName)); err != nil { - return fmt.Errorf("creating cloned database: %w", err) + // Create the new schema + if _, err := conn.Exec(ctx, "CREATE SCHEMA "+quoteIdentifier(newSchema)); err != nil { + return fmt.Errorf("creating schema: %w", err) } - cloneConn, err := pgx.Connect(ctx, baseConnCfg.connString(targetDB)) + // Get all tables from template schema + rows, err := conn.Query(ctx, ` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = $1 AND table_type = 'BASE TABLE' + `, templateSchema) if err != nil { - return fmt.Errorf("connecting to cloned database: %w", err) + return fmt.Errorf("querying template tables: %w", err) } - defer func() { _ = cloneConn.Close(ctx) }() - oldSchema := fmt.Sprintf("itest_%s", templateSchemaID) - newSchema := fmt.Sprintf("itest_%s", id) - if _, err := cloneConn.Exec(ctx, "ALTER SCHEMA "+quoteIdentifier(oldSchema)+" RENAME TO "+quoteIdentifier(newSchema)); err != nil { - return fmt.Errorf("renaming cloned schema: %w", err) + var tables []string + for rows.Next() { + var tableName string + if err := rows.Scan(&tableName); err != nil { + rows.Close() + return fmt.Errorf("scanning table name: %w", err) + } + tables = append(tables, tableName) + } + rows.Close() + if err := rows.Err(); err != nil { + return fmt.Errorf("iterating template tables: %w", err) + } + + // Clone each table structure + for _, table := range tables { + createSQL := fmt.Sprintf( + "CREATE TABLE %s.%s (LIKE %s.%s INCLUDING ALL)", + quoteIdentifier(newSchema), quoteIdentifier(table), + quoteIdentifier(templateSchema), quoteIdentifier(table), + ) + if _, err := conn.Exec(ctx, createSQL); err != nil { + return fmt.Errorf("cloning table %s: %w", table, err) + } } - return nil -} + // Copy data from base table (migration tracking) so harmonydb doesn't re-run migrations + _, err = conn.Exec(ctx, fmt.Sprintf( + "INSERT INTO %s.base SELECT * FROM %s.base", + quoteIdentifier(newSchema), quoteIdentifier(templateSchema), + )) + if err != nil { + return fmt.Errorf("copying base table data: %w", err) + } -func dropDatabaseIfExists(ctx context.Context, conn *pgx.Conn, name string) error { - _, _ = conn.Exec(ctx, `SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = $1`, name) - _, err := conn.Exec(ctx, "DROP DATABASE IF EXISTS "+quoteIdentifier(name)) - return err + return nil } func (c connConfig) connString(database string) string { diff --git a/itests/alertnow_test.go b/itests/alertnow_test.go index cf1ac843d..c07e9ebf2 100644 --- a/itests/alertnow_test.go +++ b/itests/alertnow_test.go @@ -14,7 +14,7 @@ import ( ) func TestAlertNow(t *testing.T) { - // TestAlertNow tests alerting system + // tests alerting system tp := &testPlugin{} plugin.TestPlugins = []plugin.Plugin{ From 07655f05894cc4a7a897dc996b0cfb06f8e55886 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 17:37:11 -0600 Subject: [PATCH 12/33] CLEANUPS --- harmony/harmonydb/testutil/setup.go | 20 ++-- itests/move_shared_test.go | 137 ++++++++++++++++++++++++++++ lib/paths/remote_test.go | 119 ------------------------ 3 files changed, 149 insertions(+), 127 deletions(-) create mode 100644 itests/move_shared_test.go diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index 38c5d6ec1..ed3e7029f 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -117,7 +117,8 @@ func prepareTemplateSchema() error { } // cloneTemplateSchema creates a new schema for the test by copying all table -// structures from the template schema using CREATE TABLE ... (LIKE ... INCLUDING ALL). +// structures and data from the template schema. This includes seed data that +// was inserted during migrations (e.g., harmony_config entries). func cloneTemplateSchema(id harmonydb.ITestID) error { ctx, cancel := context.WithTimeout(context.Background(), time.Minute) defer cancel() @@ -172,13 +173,16 @@ func cloneTemplateSchema(id harmonydb.ITestID) error { } } - // Copy data from base table (migration tracking) so harmonydb doesn't re-run migrations - _, err = conn.Exec(ctx, fmt.Sprintf( - "INSERT INTO %s.base SELECT * FROM %s.base", - quoteIdentifier(newSchema), quoteIdentifier(templateSchema), - )) - if err != nil { - return fmt.Errorf("copying base table data: %w", err) + // Copy data from all tables (includes migration tracking in 'base' and seed data from migrations) + for _, table := range tables { + _, err = conn.Exec(ctx, fmt.Sprintf( + "INSERT INTO %s.%s SELECT * FROM %s.%s", + quoteIdentifier(newSchema), quoteIdentifier(table), + quoteIdentifier(templateSchema), quoteIdentifier(table), + )) + if err != nil { + return fmt.Errorf("copying data for table %s: %w", table, err) + } } return nil diff --git a/itests/move_shared_test.go b/itests/move_shared_test.go new file mode 100644 index 000000000..84c88e0a0 --- /dev/null +++ b/itests/move_shared_test.go @@ -0,0 +1,137 @@ +package itests + +import ( + "context" + "encoding/json" + "fmt" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/google/uuid" + "github.com/gorilla/mux" + logging "github.com/ipfs/go-log/v2" + "github.com/stretchr/testify/require" + + "github.com/filecoin-project/go-state-types/abi" + + "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" + "github.com/filecoin-project/curio/lib/paths" + "github.com/filecoin-project/curio/lib/storiface" +) + +const metaFile = "sectorstore.json" + +func createTestStorage(t *testing.T, p string, seal bool, att ...*paths.Local) storiface.ID { + if err := os.MkdirAll(p, 0755); err != nil { + if !os.IsExist(err) { + require.NoError(t, err) + } + } + + cfg := &storiface.LocalStorageMeta{ + ID: storiface.ID(uuid.New().String()), + Weight: 10, + CanSeal: seal, + CanStore: !seal, + } + + b, err := json.MarshalIndent(cfg, "", " ") + require.NoError(t, err) + + require.NoError(t, os.WriteFile(filepath.Join(p, metaFile), b, 0644)) + + for _, s := range att { + require.NoError(t, s.OpenPath(context.Background(), p)) + } + + return cfg.ID +} + +func TestMoveShared(t *testing.T) { + logging.SetAllLoggers(logging.LevelDebug) + + sharedITestID := testutil.SetupTestDB(t) + + db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) + require.NoError(t, err) + + index := paths.NewDBIndex(nil, db) + + ctx := context.Background() + + dir := t.TempDir() + + openRepo := func(dir string) paths.LocalStorage { + bls := &paths.BasicLocalStorage{PathToJSON: filepath.Join(t.TempDir(), "storage.json")} + return bls + } + + // setup two repos with two storage paths: + // repo 1 with both paths + // repo 2 with one path (shared) + + lr1 := openRepo(filepath.Join(dir, "l1")) + lr2 := openRepo(filepath.Join(dir, "l2")) + + mux1 := mux.NewRouter() + mux2 := mux.NewRouter() + hs1 := httptest.NewServer(mux1) + hs2 := httptest.NewServer(mux2) + + ls1, err := paths.NewLocal(ctx, lr1, index, hs1.URL+"/remote") + require.NoError(t, err) + ls2, err := paths.NewLocal(ctx, lr2, index, hs2.URL+"/remote") + require.NoError(t, err) + + dirStor := filepath.Join(dir, "stor") + dirSeal := filepath.Join(dir, "seal") + + id1 := createTestStorage(t, dirStor, false, ls1, ls2) + id2 := createTestStorage(t, dirSeal, true, ls1) + + rs1, err := paths.NewRemote(ls1, index, nil, 20, &paths.DefaultPartialFileHandler{}) + require.NoError(t, err) + rs2, err := paths.NewRemote(ls2, index, nil, 20, &paths.DefaultPartialFileHandler{}) + require.NoError(t, err) + _ = rs2 + mux1.PathPrefix("/").Handler(&paths.FetchHandler{Local: ls1, PfHandler: &paths.DefaultPartialFileHandler{}}) + mux2.PathPrefix("/").Handler(&paths.FetchHandler{Local: ls2, PfHandler: &paths.DefaultPartialFileHandler{}}) + + // add a sealed replica file to the sealing (non-shared) path + + s1ref := storiface.SectorRef{ + ID: abi.SectorID{ + Miner: 12, + Number: 1, + }, + ProofType: abi.RegisteredSealProof_StackedDrg2KiBV1, + } + + sp, sid, err := rs1.AcquireSector(ctx, s1ref, storiface.FTNone, storiface.FTSealed, storiface.PathSealing, storiface.AcquireMove) + require.NoError(t, err) + require.Equal(t, id2, storiface.ID(sid.Sealed)) + + data := make([]byte, 2032) + data[1] = 54 + require.NoError(t, os.WriteFile(sp.Sealed, data, 0666)) + fmt.Println("write to ", sp.Sealed) + + require.NoError(t, index.StorageDeclareSector(ctx, storiface.ID(sid.Sealed), s1ref.ID, storiface.FTSealed, true)) + + // move to the shared path from the second node (remote move / delete) + + require.NoError(t, rs2.MoveStorage(ctx, s1ref, storiface.FTSealed)) + + // check that the file still exists + sp, sid, err = rs2.AcquireSector(ctx, s1ref, storiface.FTSealed, storiface.FTNone, storiface.PathStorage, storiface.AcquireMove) + require.NoError(t, err) + require.Equal(t, id1, storiface.ID(sid.Sealed)) + fmt.Println("read from ", sp.Sealed) + + read, err := os.ReadFile(sp.Sealed) + require.NoError(t, err) + require.EqualValues(t, data, read) +} diff --git a/lib/paths/remote_test.go b/lib/paths/remote_test.go index 64f4b057b..a18069773 100644 --- a/lib/paths/remote_test.go +++ b/lib/paths/remote_test.go @@ -3,17 +3,14 @@ package paths_test import ( "context" - "encoding/json" "fmt" "io" "net/http" "net/http/httptest" "os" - "path/filepath" "testing" "github.com/golang/mock/gomock" - "github.com/google/uuid" "github.com/gorilla/mux" logging "github.com/ipfs/go-log/v2" "github.com/stretchr/testify/require" @@ -21,128 +18,12 @@ import ( "github.com/filecoin-project/go-state-types/abi" - "github.com/filecoin-project/curio/harmony/harmonydb" - "github.com/filecoin-project/curio/harmony/harmonydb/testutil" "github.com/filecoin-project/curio/lib/partialfile" "github.com/filecoin-project/curio/lib/paths" "github.com/filecoin-project/curio/lib/paths/mocks" "github.com/filecoin-project/curio/lib/storiface" ) -const metaFile = "sectorstore.json" - -func createTestStorage(t *testing.T, p string, seal bool, att ...*paths.Local) storiface.ID { - if err := os.MkdirAll(p, 0755); err != nil { - if !os.IsExist(err) { - require.NoError(t, err) - } - } - - cfg := &storiface.LocalStorageMeta{ - ID: storiface.ID(uuid.New().String()), - Weight: 10, - CanSeal: seal, - CanStore: !seal, - } - - b, err := json.MarshalIndent(cfg, "", " ") - require.NoError(t, err) - - require.NoError(t, os.WriteFile(filepath.Join(p, metaFile), b, 0644)) - - for _, s := range att { - require.NoError(t, s.OpenPath(context.Background(), p)) - } - - return cfg.ID -} - -func TestMoveShared(t *testing.T) { - logging.SetAllLoggers(logging.LevelDebug) - - sharedITestID := testutil.SetupTestDB(t) - - db, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) - require.NoError(t, err) - - index := paths.NewDBIndex(nil, db) - - ctx := context.Background() - - dir := t.TempDir() - - openRepo := func(dir string) paths.LocalStorage { - bls := &paths.BasicLocalStorage{PathToJSON: filepath.Join(t.TempDir(), "storage.json")} - return bls - } - - // setup two repos with two storage paths: - // repo 1 with both paths - // repo 2 with one path (shared) - - lr1 := openRepo(filepath.Join(dir, "l1")) - lr2 := openRepo(filepath.Join(dir, "l2")) - - mux1 := mux.NewRouter() - mux2 := mux.NewRouter() - hs1 := httptest.NewServer(mux1) - hs2 := httptest.NewServer(mux2) - - ls1, err := paths.NewLocal(ctx, lr1, index, hs1.URL+"/remote") - require.NoError(t, err) - ls2, err := paths.NewLocal(ctx, lr2, index, hs2.URL+"/remote") - require.NoError(t, err) - - dirStor := filepath.Join(dir, "stor") - dirSeal := filepath.Join(dir, "seal") - - id1 := createTestStorage(t, dirStor, false, ls1, ls2) - id2 := createTestStorage(t, dirSeal, true, ls1) - - rs1, err := paths.NewRemote(ls1, index, nil, 20, &paths.DefaultPartialFileHandler{}) - require.NoError(t, err) - rs2, err := paths.NewRemote(ls2, index, nil, 20, &paths.DefaultPartialFileHandler{}) - require.NoError(t, err) - _ = rs2 - mux1.PathPrefix("/").Handler(&paths.FetchHandler{Local: ls1, PfHandler: &paths.DefaultPartialFileHandler{}}) - mux2.PathPrefix("/").Handler(&paths.FetchHandler{Local: ls2, PfHandler: &paths.DefaultPartialFileHandler{}}) - - // add a sealed replica file to the sealing (non-shared) path - - s1ref := storiface.SectorRef{ - ID: abi.SectorID{ - Miner: 12, - Number: 1, - }, - ProofType: abi.RegisteredSealProof_StackedDrg2KiBV1, - } - - sp, sid, err := rs1.AcquireSector(ctx, s1ref, storiface.FTNone, storiface.FTSealed, storiface.PathSealing, storiface.AcquireMove) - require.NoError(t, err) - require.Equal(t, id2, storiface.ID(sid.Sealed)) - - data := make([]byte, 2032) - data[1] = 54 - require.NoError(t, os.WriteFile(sp.Sealed, data, 0666)) - fmt.Println("write to ", sp.Sealed) - - require.NoError(t, index.StorageDeclareSector(ctx, storiface.ID(sid.Sealed), s1ref.ID, storiface.FTSealed, true)) - - // move to the shared path from the second node (remote move / delete) - - require.NoError(t, rs2.MoveStorage(ctx, s1ref, storiface.FTSealed)) - - // check that the file still exists - sp, sid, err = rs2.AcquireSector(ctx, s1ref, storiface.FTSealed, storiface.FTNone, storiface.PathStorage, storiface.AcquireMove) - require.NoError(t, err) - require.Equal(t, id1, storiface.ID(sid.Sealed)) - fmt.Println("read from ", sp.Sealed) - - read, err := os.ReadFile(sp.Sealed) - require.NoError(t, err) - require.EqualValues(t, data, read) -} - func TestReader(t *testing.T) { //stm: @STORAGE_INFO_001 logging.SetAllLoggers(logging.LevelDebug) From 7cc440e94ed17ddaaeaef099a593cbf350337544 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 17:56:47 -0600 Subject: [PATCH 13/33] itest --- .../indexstore => itests}/indexstore_test.go | 39 +++++-------------- 1 file changed, 10 insertions(+), 29 deletions(-) rename {market/indexstore => itests}/indexstore_test.go (83%) diff --git a/market/indexstore/indexstore_test.go b/itests/indexstore_test.go similarity index 83% rename from market/indexstore/indexstore_test.go rename to itests/indexstore_test.go index eafcd6594..b3bbc18d9 100644 --- a/market/indexstore/indexstore_test.go +++ b/itests/indexstore_test.go @@ -1,4 +1,4 @@ -package indexstore +package itests import ( "context" @@ -18,22 +18,16 @@ import ( "github.com/filecoin-project/curio/deps/config" "github.com/filecoin-project/curio/lib/savecache" "github.com/filecoin-project/curio/lib/testutils" + "github.com/filecoin-project/curio/market/indexstore" ) -func envElse(env, els string) string { - if v := os.Getenv(env); v != "" { - return v - } - return els -} - func TestNewIndexStore(t *testing.T) { // Set up the indexStore for testing ctx := context.Background() cfg := config.DefaultCurioConfig() - idxStore := NewIndexStore([]string{envElse("CURIO_HARMONYDB_HOSTS", "127.0.0.1")}, 9042, cfg) + idxStore := indexstore.NewIndexStore([]string{testutils.EnvElse("CURIO_HARMONYDB_HOSTS", "127.0.0.1")}, 9042, cfg) err := idxStore.Start(ctx, true) require.NoError(t, err) @@ -89,7 +83,7 @@ func TestNewIndexStore(t *testing.T) { dealCfg := cfg.Market.StorageMarketConfig chanSize := dealCfg.Indexing.InsertConcurrency * dealCfg.Indexing.InsertBatchSize - recs := make(chan Record, chanSize) + recs := make(chan indexstore.Record, chanSize) opts := []carv2.Option{carv2.ZeroLengthSectionAsEOF(true)} blockReader, err := carv2.NewBlockReader(f, opts...) require.NoError(t, err) @@ -109,7 +103,7 @@ func TestNewIndexStore(t *testing.T) { if i == 0 { m = blockMetadata.Hash() } - recs <- Record{ + recs <- indexstore.Record{ Cid: blockMetadata.Cid, Offset: blockMetadata.SourceOffset, Size: blockMetadata.Size, @@ -141,10 +135,8 @@ func TestNewIndexStore(t *testing.T) { err = idxStore.RemoveIndexes(ctx, pcids[0].PieceCid) require.NoError(t, err) - err = idxStore.session.Query("SELECT * FROM piece_by_aggregate").Exec() - require.NoError(t, err) - - aggrRec := []Record{ + // Test aggregate index + aggrRec := []indexstore.Record{ { Cid: pcid1, Offset: 0, @@ -174,9 +166,9 @@ func TestNewIndexStore(t *testing.T) { require.NoError(t, err) // Test PDP layer - leafs := make([]NodeDigest, len(layer)) + leafs := make([]indexstore.NodeDigest, len(layer)) for i, s := range layer { - leafs[i] = NodeDigest{ + leafs[i] = indexstore.NodeDigest{ Layer: layerIdx, Hash: s.Hash, Index: int64(i), @@ -213,16 +205,5 @@ func TestNewIndexStore(t *testing.T) { err = idxStore.DeletePDPLayer(ctx, pcid2) require.NoError(t, err) - - // Drop the tables - err = idxStore.session.Query("DROP TABLE PayloadToPieces").Exec() - require.NoError(t, err) - err = idxStore.session.Query("DROP TABLE PieceBlockOffsetSize").Exec() - require.NoError(t, err) - err = idxStore.session.Query("DROP TABLE aggregate_by_piece").Exec() - require.NoError(t, err) - err = idxStore.session.Query("DROP TABLE piece_by_aggregate").Exec() - require.NoError(t, err) - err = idxStore.session.Query("DROP TABLE pdp_cache_layer").Exec() - require.NoError(t, err) } + From 2ad599a49dceb16dd88311420c672fcd03697f76 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:02:49 -0600 Subject: [PATCH 14/33] try docker file again --- .github/actions/setup-ffi/action.yml | 55 +++++++++++++++++++++++ .github/workflows/ci.yml | 67 ++++++++++++++-------------- itests/indexstore_test.go | 1 - 3 files changed, 89 insertions(+), 34 deletions(-) create mode 100644 .github/actions/setup-ffi/action.yml diff --git a/.github/actions/setup-ffi/action.yml b/.github/actions/setup-ffi/action.yml new file mode 100644 index 000000000..af8b56f4c --- /dev/null +++ b/.github/actions/setup-ffi/action.yml @@ -0,0 +1,55 @@ +name: 'Setup FFI' +description: 'Cache and build FFI dependencies (for use with build-env container)' + +runs: + using: 'composite' + steps: + - name: Fetch all tags + run: git fetch --all --no-recurse-submodules + shell: bash + + - name: Sync submodules + run: git submodule sync + shell: bash + + - name: Update submodules + run: git submodule update --init + shell: bash + + # Cache FFI build based on submodule commit + - name: Generate FFI cache key + id: ffi-cache-key + run: | + FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) + echo "key=ffi-${{ runner.os }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT + shell: bash + + - name: Cache FFI build + id: cache-ffi + uses: actions/cache@v4 + with: + path: | + extern/filecoin-ffi/.install-filcrypto + extern/filecoin-ffi/filcrypto.h + extern/filecoin-ffi/libfilcrypto.a + extern/filecoin-ffi/filcrypto.pc + build/.filecoin-install + build/.blst-install + extern/supraseal/.install-blst + extern/supraseal/deps/blst + key: ${{ steps.ffi-cache-key.outputs.key }} + + - name: Build FFI + if: steps.cache-ffi.outputs.cache-hit != 'true' + run: make deps + shell: bash + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: Restore FFI marker files + if: steps.cache-ffi.outputs.cache-hit == 'true' + run: | + mkdir -p build + touch build/.filecoin-install build/.blst-install || true + shell: bash + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1ef3d675a..cca907d9f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,7 @@ on: env: GO_VERSION: 1.24.7 + BUILD_ENV_IMAGE: ghcr.io/${{ github.repository }}/build-env:latest jobs: ci-lint: @@ -47,15 +48,15 @@ jobs: # Debug build - separate so tests can depend on it without waiting for other variants build-debug: runs-on: ubuntu-latest + container: + image: ghcr.io/filecoin-project/curio/build-env:latest steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Build debug run: make debug @@ -72,6 +73,8 @@ jobs: # Other build variants - run in parallel, tests don't wait for these build: runs-on: ubuntu-latest + container: + image: ghcr.io/filecoin-project/curio/build-env:latest strategy: fail-fast: false matrix: @@ -89,10 +92,8 @@ jobs: with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Build ${{ matrix.variant.name }} run: make ${{ matrix.variant.target }} @@ -110,16 +111,18 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] + container: + image: ghcr.io/filecoin-project/curio/build-env:latest + volumes: + - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters needs: [setup-params] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Run unit tests run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) @@ -128,6 +131,11 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-itest: runs-on: [self-hosted, docker] + container: + image: ghcr.io/filecoin-project/curio/build-env:latest + volumes: + - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters + - /var/run/docker.sock:/var/run/docker.sock needs: [setup-params] env: CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} @@ -143,15 +151,17 @@ jobs: target: "./itests/alertnow_test.go" - name: itest-pdp-prove target: "./itests/pdp_prove_test.go" + - name: itest-move-shared + target: "./itests/move_shared_test.go" + - name: itest-indexstore + target: "./itests/indexstore_test.go" steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Start YugabyteDB container run: | @@ -186,15 +196,15 @@ jobs: lint: runs-on: ubuntu-latest + container: + image: ghcr.io/filecoin-project/curio/build-env:latest steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Install golangci-lint run: | @@ -435,24 +445,15 @@ jobs: gen-check: runs-on: ubuntu-latest + container: + image: ghcr.io/filecoin-project/curio/build-env:latest steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - - - name: Install Go tools - run: | - # Tools are defined in tools/tools.go and versioned in go.mod - go install golang.org/x/tools/cmd/goimports & - go install github.com/hannahhoward/cbor-gen-for & - go install github.com/swaggo/swag/cmd/swag & - wait - shell: bash + - name: Setup FFI (cached) + uses: ./.github/actions/setup-ffi - name: Generate Code env: diff --git a/itests/indexstore_test.go b/itests/indexstore_test.go index b3bbc18d9..f4e5d15b4 100644 --- a/itests/indexstore_test.go +++ b/itests/indexstore_test.go @@ -206,4 +206,3 @@ func TestNewIndexStore(t *testing.T) { err = idxStore.DeletePDPLayer(ctx, pcid2) require.NoError(t, err) } - From 49f1c9344ab4fbbf0c81eef11094be1e71302cc7 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:10:20 -0600 Subject: [PATCH 15/33] reuse compromise --- .github/actions/setup-ffi/action.yml | 55 ---------------- .github/workflows/ci.yml | 96 ++++++++++++++++++---------- 2 files changed, 63 insertions(+), 88 deletions(-) delete mode 100644 .github/actions/setup-ffi/action.yml diff --git a/.github/actions/setup-ffi/action.yml b/.github/actions/setup-ffi/action.yml deleted file mode 100644 index af8b56f4c..000000000 --- a/.github/actions/setup-ffi/action.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: 'Setup FFI' -description: 'Cache and build FFI dependencies (for use with build-env container)' - -runs: - using: 'composite' - steps: - - name: Fetch all tags - run: git fetch --all --no-recurse-submodules - shell: bash - - - name: Sync submodules - run: git submodule sync - shell: bash - - - name: Update submodules - run: git submodule update --init - shell: bash - - # Cache FFI build based on submodule commit - - name: Generate FFI cache key - id: ffi-cache-key - run: | - FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) - echo "key=ffi-${{ runner.os }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT - shell: bash - - - name: Cache FFI build - id: cache-ffi - uses: actions/cache@v4 - with: - path: | - extern/filecoin-ffi/.install-filcrypto - extern/filecoin-ffi/filcrypto.h - extern/filecoin-ffi/libfilcrypto.a - extern/filecoin-ffi/filcrypto.pc - build/.filecoin-install - build/.blst-install - extern/supraseal/.install-blst - extern/supraseal/deps/blst - key: ${{ steps.ffi-cache-key.outputs.key }} - - - name: Build FFI - if: steps.cache-ffi.outputs.cache-hit != 'true' - run: make deps - shell: bash - env: - GITHUB_TOKEN: ${{ github.token }} - - - name: Restore FFI marker files - if: steps.cache-ffi.outputs.cache-hit == 'true' - run: | - mkdir -p build - touch build/.filecoin-install build/.blst-install || true - shell: bash - diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cca907d9f..2352d32cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,6 @@ on: env: GO_VERSION: 1.24.7 - BUILD_ENV_IMAGE: ghcr.io/${{ github.repository }}/build-env:latest jobs: ci-lint: @@ -30,7 +29,21 @@ jobs: - name: Run actionlint run: actionlint -shellcheck= -pyflakes= - setup-params: + # Build environment setup - runs once, caches FFI for other ubuntu-latest jobs + setup-build-env: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} + + # Self-hosted setup - proof params and FFI for test jobs + setup-self-hosted: runs-on: [self-hosted, docker] steps: - name: Cache proof parameters @@ -45,18 +58,28 @@ jobs: run: lotus fetch-params 8388608 shell: bash + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} + # Debug build - separate so tests can depend on it without waiting for other variants build-debug: runs-on: ubuntu-latest - container: - image: ghcr.io/filecoin-project/curio/build-env:latest + needs: [setup-build-env] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} - name: Build debug run: make debug @@ -73,8 +96,7 @@ jobs: # Other build variants - run in parallel, tests don't wait for these build: runs-on: ubuntu-latest - container: - image: ghcr.io/filecoin-project/curio/build-env:latest + needs: [setup-build-env] strategy: fail-fast: false matrix: @@ -92,8 +114,10 @@ jobs: with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} - name: Build ${{ matrix.variant.name }} run: make ${{ matrix.variant.target }} @@ -111,18 +135,16 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] - container: - image: ghcr.io/filecoin-project/curio/build-env:latest - volumes: - - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters - needs: [setup-params] + needs: [setup-self-hosted] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} - name: Run unit tests run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) @@ -131,12 +153,7 @@ jobs: # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-itest: runs-on: [self-hosted, docker] - container: - image: ghcr.io/filecoin-project/curio/build-env:latest - volumes: - - /var/tmp/filecoin-proof-parameters:/var/tmp/filecoin-proof-parameters - - /var/run/docker.sock:/var/run/docker.sock - needs: [setup-params] + needs: [setup-self-hosted] env: CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} strategy: @@ -160,8 +177,10 @@ jobs: with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} - name: Start YugabyteDB container run: | @@ -196,15 +215,16 @@ jobs: lint: runs-on: ubuntu-latest - container: - image: ghcr.io/filecoin-project/curio/build-env:latest + needs: [setup-build-env] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} - name: Install golangci-lint run: | @@ -445,15 +465,25 @@ jobs: gen-check: runs-on: ubuntu-latest - container: - image: ghcr.io/filecoin-project/curio/build-env:latest + needs: [setup-build-env] steps: - uses: actions/checkout@v4 with: submodules: recursive - - name: Setup FFI (cached) - uses: ./.github/actions/setup-ffi + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} + + - name: Install Go tools + run: | + # Tools are defined in tools/tools.go and versioned in go.mod + go install golang.org/x/tools/cmd/goimports & + go install github.com/hannahhoward/cbor-gen-for & + go install github.com/swaggo/swag/cmd/swag & + wait + shell: bash - name: Generate Code env: From 2c6ab32ae258e3e88ae566f3421c5d6e1af15f48 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:27:11 -0600 Subject: [PATCH 16/33] greased lightning --- .github/actions/setup-build-env/action.yml | 44 +++++++++++----------- .github/workflows/ci.yml | 32 +++++++++++++++- 2 files changed, 52 insertions(+), 24 deletions(-) diff --git a/.github/actions/setup-build-env/action.yml b/.github/actions/setup-build-env/action.yml index 1c278107b..1825546ba 100644 --- a/.github/actions/setup-build-env/action.yml +++ b/.github/actions/setup-build-env/action.yml @@ -16,32 +16,32 @@ runs: cache: true cache-dependency-path: go.sum - # Cache apt packages to avoid repeated downloads - - name: Cache apt packages - uses: actions/cache@v4 - with: - path: | - /var/cache/apt/archives - key: apt-${{ runner.os }}-${{ hashFiles('.github/actions/install-deps/action.yml') }} - restore-keys: | - apt-${{ runner.os }}- - - - name: Install system dependencies + # Run apt install, git submodules, and go mod download in parallel + - name: Install deps, setup submodules, download Go modules (parallel) run: | - sudo apt-get update - sudo apt-get install -y curl ca-certificates gnupg ocl-icd-opencl-dev libhwloc-dev - shell: bash + # Start apt install in background + ( + sudo apt-get update + sudo apt-get install -y curl ca-certificates gnupg ocl-icd-opencl-dev libhwloc-dev + ) & + APT_PID=$! - - name: Fetch all tags - run: git fetch --all --no-recurse-submodules - shell: bash + # Start git operations in background + ( + git fetch --all --no-recurse-submodules + git submodule sync + git submodule update --init + ) & + GIT_PID=$! - - name: Sync submodules - run: git submodule sync - shell: bash + # Start go mod download in background + go mod download & + GO_MOD_PID=$! - - name: Update submodules - run: git submodule update --init + # Wait for all to complete + wait $APT_PID || exit 1 + wait $GIT_PID || exit 1 + wait $GO_MOD_PID || exit 1 shell: bash # Cache FFI build based on submodule commit diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2352d32cb..addd254c4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,15 +46,43 @@ jobs: setup-self-hosted: runs-on: [self-hosted, docker] steps: + # Check if proof params exist locally (self-hosted runners persist files) + - name: Check local proof parameters + id: local-params + run: | + PARAMS_DIR="/var/tmp/filecoin-proof-parameters" + if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then + echo "Proof parameters found locally" + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "Proof parameters not found locally" + echo "exists=false" >> $GITHUB_OUTPUT + fi + shell: bash + + # Only try cache if local files don't exist + # Use restore-keys to read from main branch's cache (PRs inherit from base branch) - name: Cache proof parameters + if: steps.local-params.outputs.exists != 'true' id: cache-params - uses: actions/cache@v4 + uses: actions/cache/restore@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-8388608 + restore-keys: | + proof-params- + + # Save cache only on main branch (PRs just read from main's cache) + - name: Save proof parameters cache + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' + uses: actions/cache/save@v4 with: path: /var/tmp/filecoin-proof-parameters key: proof-params-8388608 + # Only fetch if neither local nor cache had params - name: Fetch parameters - if: steps.cache-params.outputs.cache-hit != 'true' + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' run: lotus fetch-params 8388608 shell: bash From 128388bbefc69f6b9df1fca2017a271aa1e2a9b8 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:39:29 -0600 Subject: [PATCH 17/33] itest parallel --- .github/workflows/ci.yml | 442 +++++++++------------------------- itests/alertnow_test.go | 1 + itests/curio_test.go | 1 + itests/dyncfg_test.go | 1 + itests/harmonydb_test.go | 3 + itests/indexstore_test.go | 1 + itests/local_test.go | 1 + itests/move_shared_test.go | 1 + itests/pdp_prove_test.go | 1 + itests/sql_idempotent_test.go | 1 + 10 files changed, 125 insertions(+), 328 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index addd254c4..28ca4d925 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,24 +12,39 @@ env: GO_VERSION: 1.24.7 jobs: - ci-lint: + # Fast checks - no FFI needed, run immediately + quick-checks: runs-on: ubuntu-latest steps: - - name: Checkout repository - uses: actions/checkout@v4 + - uses: actions/checkout@v4 - name: Setup Go - uses: ./.github/actions/setup-go + uses: actions/setup-go@v5 with: go-version: ${{ env.GO_VERSION }} + cache: true + cache-dependency-path: go.sum - - name: Install actionlint - run: go install github.com/rhysd/actionlint/cmd/actionlint - - - name: Run actionlint - run: actionlint -shellcheck= -pyflakes= - - # Build environment setup - runs once, caches FFI for other ubuntu-latest jobs + - name: Run checks in parallel + run: | + # gofmt check + ( + go fmt ./... + git diff --quiet || { echo "gofmt needed"; exit 1; } + ) & + FMT_PID=$! + + # actionlint + ( + go install github.com/rhysd/actionlint/cmd/actionlint + actionlint -shellcheck= -pyflakes= + ) & + LINT_PID=$! + + wait $FMT_PID || exit 1 + wait $LINT_PID || exit 1 + + # Setup FFI cache for ubuntu-latest jobs setup-build-env: runs-on: ubuntu-latest steps: @@ -42,9 +57,13 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - # Self-hosted setup - proof params and FFI for test jobs + # Self-hosted setup - proof params and FFI, starts YugabyteDB for all tests setup-self-hosted: runs-on: [self-hosted, docker] + outputs: + yugabyte-ip: ${{ steps.start-yb.outputs.yb_ip }} + env: + YB_CONTAINER: yugabyte-${{ github.run_id }} steps: # Check if proof params exist locally (self-hosted runners persist files) - name: Check local proof parameters @@ -52,16 +71,11 @@ jobs: run: | PARAMS_DIR="/var/tmp/filecoin-proof-parameters" if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then - echo "Proof parameters found locally" echo "exists=true" >> $GITHUB_OUTPUT else - echo "Proof parameters not found locally" echo "exists=false" >> $GITHUB_OUTPUT fi - shell: bash - # Only try cache if local files don't exist - # Use restore-keys to read from main branch's cache (PRs inherit from base branch) - name: Cache proof parameters if: steps.local-params.outputs.exists != 'true' id: cache-params @@ -69,10 +83,8 @@ jobs: with: path: /var/tmp/filecoin-proof-parameters key: proof-params-8388608 - restore-keys: | - proof-params- + restore-keys: proof-params- - # Save cache only on main branch (PRs just read from main's cache) - name: Save proof parameters cache if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' uses: actions/cache/save@v4 @@ -80,11 +92,32 @@ jobs: path: /var/tmp/filecoin-proof-parameters key: proof-params-8388608 - # Only fetch if neither local nor cache had params - name: Fetch parameters if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' run: lotus fetch-params 8388608 - shell: bash + + # Start YugabyteDB early (in parallel with checkout/setup) + - name: Start YugabyteDB + id: start-yb + run: | + # Stop any existing container + docker stop $YB_CONTAINER 2>/dev/null || true + docker rm $YB_CONTAINER 2>/dev/null || true + + # Start fresh container + docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false + + # Wait for it to be ready + for i in {1..60}; do + if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then + break + fi + sleep 1 + done + + YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') + echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT + echo "YugabyteDB ready at $YB_IP" - uses: actions/checkout@v4 with: @@ -95,8 +128,8 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - # Debug build - separate so tests can depend on it without waiting for other variants - build-debug: + # Lint - runs early, fast feedback + lint: runs-on: ubuntu-latest needs: [setup-build-env] steps: @@ -109,34 +142,19 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Build debug - run: make debug - - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: binaries-debug - path: | - curio - sptool - retention-days: 1 + - name: Install and run golangci-lint + run: | + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.4.0 + golangci-lint run -v --timeout 15m --concurrency 4 - # Other build variants - run in parallel, tests don't wait for these + # Single build job - just verify it compiles build: runs-on: ubuntu-latest needs: [setup-build-env] strategy: fail-fast: false matrix: - variant: - - name: mainnet - target: build - - name: calibnet - target: calibnet - - name: 2k - target: 2k - - name: forest - target: forest-test + variant: [debug, build, calibnet, 2k] steps: - uses: actions/checkout@v4 with: @@ -147,20 +165,10 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Build ${{ matrix.variant.name }} - run: make ${{ matrix.variant.target }} - - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: binaries-${{ matrix.variant.name }} - path: | - curio - sptool - retention-days: 1 + - name: Build ${{ matrix.variant }} + run: make ${{ matrix.variant }} # Unit tests - no database needed - # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking test-unit: runs-on: [self-hosted, docker] needs: [setup-self-hosted] @@ -177,29 +185,13 @@ jobs: - name: Run unit tests run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) - # Integration tests - need YugabyteDB - # Tests compile via `go test`, they don't need pre-built binaries - only FFI for linking + # Integration tests - single job, Go handles parallelism via t.Parallel() test-itest: runs-on: [self-hosted, docker] needs: [setup-self-hosted] env: - CONTAINER_NAME: yugabyte-${{ github.run_id }}-${{ matrix.test-suite.name }} - strategy: - fail-fast: false - matrix: - test-suite: - - name: itest-curio - target: "./itests/curio_test.go" - - name: itest-harmonyDB - target: "./itests/harmonydb_test.go" - - name: itest-alertnow - target: "./itests/alertnow_test.go" - - name: itest-pdp-prove - target: "./itests/pdp_prove_test.go" - - name: itest-move-shared - target: "./itests/move_shared_test.go" - - name: itest-indexstore - target: "./itests/indexstore_test.go" + CURIO_HARMONYDB_HOSTS: ${{ needs.setup-self-hosted.outputs.yugabyte-ip }} + LOTUS_HARMONYDB_HOSTS: ${{ needs.setup-self-hosted.outputs.yugabyte-ip }} steps: - uses: actions/checkout@v4 with: @@ -210,38 +202,24 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Start YugabyteDB container - run: | - docker run --rm --name ${{ env.CONTAINER_NAME }} -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false - - - name: Wait for YugabyteDB to start + - name: Run all integration tests (parallel via t.Parallel) run: | - while true; do - status=$(docker exec ${{ env.CONTAINER_NAME }} bin/yugabyted status); - echo $status; - echo $status | grep Running && break; - sleep 1; - done - - - name: Get YugabyteDB container IP - id: get-yb-ip - run: | - YB_IP=$(docker inspect $CONTAINER_NAME --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') - echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT + echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" + go test -v --tags=debug -timeout 60m -parallel 4 ./itests/... - - name: Run integration tests - env: - CURIO_HARMONYDB_HOSTS: ${{ steps.get-yb-ip.outputs.yb_ip }} - LOTUS_HARMONYDB_HOSTS: ${{ steps.get-yb-ip.outputs.yb_ip }} - run: | - echo "Using YugabyteDB Container IP: ${{ env.CURIO_HARMONYDB_HOSTS }}" - go test -v --tags=debug -timeout 30m ${{ matrix.test-suite.target }} - - - name: Stop YugabyteDB container - if: always() - run: docker stop ${{ env.CONTAINER_NAME }} + # Cleanup YugabyteDB after all tests complete + cleanup: + runs-on: [self-hosted, docker] + needs: [test-unit, test-itest] + if: always() + env: + YB_CONTAINER: yugabyte-${{ github.run_id }} + steps: + - name: Stop YugabyteDB + run: docker stop $YB_CONTAINER 2>/dev/null || true - lint: + # Gen check + gen-check: runs-on: ubuntu-latest needs: [setup-build-env] steps: @@ -254,275 +232,83 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Install golangci-lint + - name: Install Go tools and generate run: | - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.4.0 - shell: bash + go install golang.org/x/tools/cmd/goimports & + go install github.com/hannahhoward/cbor-gen-for & + go install github.com/swaggo/swag/cmd/swag & + wait + make gen - - name: Lint + - name: Check for changes run: | - golangci-lint run -v --timeout 15m --concurrency 4 - shell: bash - - gofmt: - runs-on: ubuntu-latest - needs: [ci-lint] - steps: - - uses: actions/checkout@v4 - - - name: Setup Go - uses: ./.github/actions/setup-go - with: - go-version: ${{ env.GO_VERSION }} - - - name: Check gofmt - run: go fmt ./... - shell: bash - - - name: Git diff check - run: git --no-pager diff - shell: bash - - - name: Git diff quiet - run: git --no-pager diff --quiet - shell: bash + git diff --quiet || { git diff; exit 1; } + # Supraseal build (kept separate - long running, different runner) build-supraseal-ubuntu24: runs-on: ubuntu-24.04 - needs: [ci-lint] - env: - GCC_VERSION: "12" steps: - - name: Checkout repository - uses: actions/checkout@v4 + - uses: actions/checkout@v4 with: submodules: recursive - name: Free up disk space run: | sudo apt-get clean - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf "/usr/local/share/boost" - sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo rm -rf /usr/share/dotnet /opt/ghc "/usr/local/share/boost" "$AGENT_TOOLSDIRECTORY" - name: Install system dependencies run: | sudo apt-get update sudo apt-get install -y \ - build-essential \ - gcc-12 g++-12 \ - nasm \ - pkg-config \ - autoconf automake libtool \ - libssl-dev \ - libnuma-dev \ - uuid-dev \ - libaio-dev \ - libfuse3-dev \ - libarchive-dev \ - libkeyutils-dev \ - libncurses-dev \ - python3 python3-pip python3-dev \ - curl wget git \ - xxd - - - name: Set up Python virtual environment - run: | - python3 -m venv --help > /dev/null || sudo apt-get install -y python3-venv + build-essential gcc-12 g++-12 nasm pkg-config \ + autoconf automake libtool libssl-dev libnuma-dev \ + uuid-dev libaio-dev libfuse3-dev libarchive-dev \ + libkeyutils-dev libncurses-dev libgmp-dev libconfig++-dev \ + python3 python3-pip python3-dev curl wget git xxd - - name: Set up GCC 12 as default + - name: Set up GCC 12 run: | sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 100 sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 - sudo update-alternatives --set gcc /usr/bin/gcc-12 - sudo update-alternatives --set g++ /usr/bin/g++-12 - gcc --version - g++ --version - name: Cache CUDA installation id: cache-cuda uses: actions/cache@v4 with: - path: | - /usr/local/cuda - /usr/local/cuda-* - key: cuda-toolkit-ubuntu-24.04-${{ runner.os }}-v1 + path: /usr/local/cuda + key: cuda-toolkit-ubuntu-24.04-v1 - - name: Install CUDA Toolkit from NVIDIA Repository + - name: Install CUDA Toolkit if: steps.cache-cuda.outputs.cache-hit != 'true' run: | wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/x86_64/cuda-keyring_1.1-1_all.deb sudo dpkg -i cuda-keyring_1.1-1_all.deb - rm cuda-keyring_1.1-1_all.deb - - sudo apt-get update - sudo apt-get -y install cuda-toolkit - - if [ -d "/usr/local/cuda" ]; then - echo "CUDA installed at /usr/local/cuda" - ls -la /usr/local/cuda*/bin/nvcc || true - else - echo "ERROR: CUDA installation not found" - exit 1 - fi + sudo apt-get update && sudo apt-get -y install cuda-toolkit - name: Set up CUDA environment run: | - # Verify CUDA installation exists - if [ ! -d "/usr/local/cuda" ]; then - echo "ERROR: /usr/local/cuda not found" - exit 1 - fi - - # Export PATH locally to verify nvcc works - export PATH="/usr/local/cuda/bin:$PATH" - export CUDA_HOME=/usr/local/cuda - export LD_LIBRARY_PATH="/usr/local/cuda/lib64:${LD_LIBRARY_PATH}" - - nvcc --version - echo "/usr/local/cuda/bin" >> $GITHUB_PATH echo "CUDA_HOME=/usr/local/cuda" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH" >> $GITHUB_ENV - - name: Install libconfig++ - run: | - sudo apt-get install -y libconfig++-dev || { - wget https://hyperrealm.github.io/libconfig/dist/libconfig-1.7.3.tar.gz - tar -xzf libconfig-1.7.3.tar.gz - cd libconfig-1.7.3 - ./configure - make -j$(nproc) - sudo make install - sudo ldconfig - cd .. - rm -rf libconfig-1.7.3* - } - - - name: Install GMP library - run: sudo apt-get install -y libgmp-dev - - - name: Cache Python venv - id: cache-venv - uses: actions/cache@v4 - with: - path: extern/supraseal/.venv - key: supraseal-venv-ubuntu24-${{ hashFiles('extern/supraseal/build.sh') }} - restore-keys: | - supraseal-venv-ubuntu24- - - - name: Cache SPDK build - id: cache-spdk + - name: Cache Python venv and SPDK uses: actions/cache@v4 with: - path: extern/supraseal/deps/spdk-v24.05 - key: spdk-v24.05-gcc12-ubuntu24-${{ hashFiles('extern/supraseal/build.sh') }} - restore-keys: | - spdk-v24.05-gcc12-ubuntu24- + path: | + extern/supraseal/.venv + extern/supraseal/deps/spdk-v24.05 + key: supraseal-deps-ubuntu24-${{ hashFiles('extern/supraseal/build.sh') }} - name: Build Supraseal working-directory: extern/supraseal run: | - export CC=gcc-12 - export CXX=g++-12 - export CUDA=/usr/local/cuda + export CC=gcc-12 CXX=g++-12 CUDA=/usr/local/cuda export PATH=/usr/local/cuda/bin:$PATH - export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH - - which nvcc - nvcc --version - ./build.sh - name: Verify binaries - working-directory: extern/supraseal - run: | - echo "=== Built binaries ===" - ls -lh bin/ - - echo "" - echo "=== Verifying binaries exist ===" - test -f bin/seal && echo "✓ seal binary created" || exit 1 - test -f bin/pc2 && echo "✓ pc2 binary created" || exit 1 - test -f bin/tree_r && echo "✓ tree_r binary created" || exit 1 - test -f bin/tree_r_cpu && echo "✓ tree_r_cpu binary created" || exit 1 - test -f bin/tree_d_cpu && echo "✓ tree_d_cpu binary created" || exit 1 - - echo "" - echo "=== Binary sizes ===" - du -h bin/* - - echo "" - echo "✅ All binaries built successfully!" - - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: supraseal-binaries-ubuntu24-gcc12-cuda - path: | - extern/supraseal/bin/seal - extern/supraseal/bin/pc2 - extern/supraseal/bin/tree_r - extern/supraseal/bin/tree_r_cpu - extern/supraseal/bin/tree_d_cpu - retention-days: 30 - - - name: Upload library artifact - uses: actions/upload-artifact@v4 - with: - name: supraseal-library-ubuntu24-gcc12-cuda - path: extern/supraseal/obj/libsupraseal.a - retention-days: 30 - - - name: Build summary run: | - echo "### 🎉 Supraseal Build Summary" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Configuration:**" >> $GITHUB_STEP_SUMMARY - echo "- OS: Ubuntu 24.04" >> $GITHUB_STEP_SUMMARY - echo "- GCC: $(gcc --version | head -1)" >> $GITHUB_STEP_SUMMARY - echo "- CUDA: $(nvcc --version | grep release)" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Built Binaries:**" >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - ls -lh extern/supraseal/bin/ >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "✅ All binaries compiled successfully with GCC 12 and CUDA!" >> $GITHUB_STEP_SUMMARY - - gen-check: - runs-on: ubuntu-latest - needs: [setup-build-env] - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - - - name: Install Go tools - run: | - # Tools are defined in tools/tools.go and versioned in go.mod - go install golang.org/x/tools/cmd/goimports & - go install github.com/hannahhoward/cbor-gen-for & - go install github.com/swaggo/swag/cmd/swag & - wait - shell: bash - - - name: Generate Code - env: - LANG: en-US - run: make gen - shell: bash - - - name: Git diff check - run: git --no-pager diff - shell: bash - - - name: Git diff quiet - run: git --no-pager diff --quiet - shell: bash + for bin in seal pc2 tree_r tree_r_cpu tree_d_cpu; do + test -f extern/supraseal/bin/$bin || exit 1 + done + echo "✅ All Supraseal binaries built" diff --git a/itests/alertnow_test.go b/itests/alertnow_test.go index c07e9ebf2..4631d0712 100644 --- a/itests/alertnow_test.go +++ b/itests/alertnow_test.go @@ -14,6 +14,7 @@ import ( ) func TestAlertNow(t *testing.T) { + t.Parallel() // tests alerting system tp := &testPlugin{} diff --git a/itests/curio_test.go b/itests/curio_test.go index db530aca3..3831baa14 100644 --- a/itests/curio_test.go +++ b/itests/curio_test.go @@ -48,6 +48,7 @@ import ( ) func TestCurioHappyPath(t *testing.T) { + t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/itests/dyncfg_test.go b/itests/dyncfg_test.go index 6de070943..67704ceb4 100644 --- a/itests/dyncfg_test.go +++ b/itests/dyncfg_test.go @@ -14,6 +14,7 @@ import ( ) func TestDynamicConfig(t *testing.T) { + t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/itests/harmonydb_test.go b/itests/harmonydb_test.go index 214060c5f..ddacc9332 100644 --- a/itests/harmonydb_test.go +++ b/itests/harmonydb_test.go @@ -15,6 +15,7 @@ import ( ) func TestCrud(t *testing.T) { + t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -47,6 +48,7 @@ func TestCrud(t *testing.T) { } func TestTransaction(t *testing.T) { + t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -97,6 +99,7 @@ func TestTransaction(t *testing.T) { } func TestPartialWalk(t *testing.T) { + t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/itests/indexstore_test.go b/itests/indexstore_test.go index f4e5d15b4..84b5b3302 100644 --- a/itests/indexstore_test.go +++ b/itests/indexstore_test.go @@ -22,6 +22,7 @@ import ( ) func TestNewIndexStore(t *testing.T) { + t.Parallel() // Set up the indexStore for testing ctx := context.Background() diff --git a/itests/local_test.go b/itests/local_test.go index 412da659d..0ba5c0e05 100644 --- a/itests/local_test.go +++ b/itests/local_test.go @@ -76,6 +76,7 @@ func (t *TestingLocalStorage) init(subpath string) error { var _ paths.LocalStorage = &TestingLocalStorage{} func TestLocalStorage(t *testing.T) { + t.Parallel() ctx := context.TODO() root := t.TempDir() diff --git a/itests/move_shared_test.go b/itests/move_shared_test.go index 84c88e0a0..1e3b202fc 100644 --- a/itests/move_shared_test.go +++ b/itests/move_shared_test.go @@ -51,6 +51,7 @@ func createTestStorage(t *testing.T, p string, seal bool, att ...*paths.Local) s } func TestMoveShared(t *testing.T) { + t.Parallel() logging.SetAllLoggers(logging.LevelDebug) sharedITestID := testutil.SetupTestDB(t) diff --git a/itests/pdp_prove_test.go b/itests/pdp_prove_test.go index cde14bbca..62644b1de 100644 --- a/itests/pdp_prove_test.go +++ b/itests/pdp_prove_test.go @@ -26,6 +26,7 @@ import ( // TestPDPProving verifies the functionality of generating and validating PDP proofs with a random file created in a temporary directory. func TestPDPProving(t *testing.T) { + t.Parallel() ctx := context.Background() cfg := config.DefaultCurioConfig() idxStore := indexstore.NewIndexStore([]string{testutils.EnvElse("CURIO_HARMONYDB_HOSTS", "127.0.0.1")}, 9042, cfg) diff --git a/itests/sql_idempotent_test.go b/itests/sql_idempotent_test.go index 2d11aa450..0d95d9a1c 100644 --- a/itests/sql_idempotent_test.go +++ b/itests/sql_idempotent_test.go @@ -15,6 +15,7 @@ import ( // The upgrader will fail unless everything has "IF NOT EXISTS" or "IF EXISTS" statements. // Or equivalent safety checks. func TestSQLIdempotent(t *testing.T) { + t.Parallel() harmonydb.ITestUpgradeFunc = func(db *pgxpool.Pool, name string, sql string) { _, err := db.Exec(context.Background(), sql) require.NoError(t, fmt.Errorf("SQL DDL file failed idempotent check: %s, %w", name, err)) From 40b5b99f99ce3c10f726215ea413c0b943a838cb Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:48:41 -0600 Subject: [PATCH 18/33] yb local to itests --- .github/workflows/ci.yml | 67 +++++++++++++++++----------------------- 1 file changed, 29 insertions(+), 38 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 28ca4d925..420a423de 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,13 +57,9 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - # Self-hosted setup - proof params and FFI, starts YugabyteDB for all tests + # Self-hosted setup - proof params and FFI cache only setup-self-hosted: runs-on: [self-hosted, docker] - outputs: - yugabyte-ip: ${{ steps.start-yb.outputs.yb_ip }} - env: - YB_CONTAINER: yugabyte-${{ github.run_id }} steps: # Check if proof params exist locally (self-hosted runners persist files) - name: Check local proof parameters @@ -96,29 +92,6 @@ jobs: if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' run: lotus fetch-params 8388608 - # Start YugabyteDB early (in parallel with checkout/setup) - - name: Start YugabyteDB - id: start-yb - run: | - # Stop any existing container - docker stop $YB_CONTAINER 2>/dev/null || true - docker rm $YB_CONTAINER 2>/dev/null || true - - # Start fresh container - docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false - - # Wait for it to be ready - for i in {1..60}; do - if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then - break - fi - sleep 1 - done - - YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') - echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT - echo "YugabyteDB ready at $YB_IP" - - uses: actions/checkout@v4 with: submodules: recursive @@ -190,32 +163,50 @@ jobs: runs-on: [self-hosted, docker] needs: [setup-self-hosted] env: - CURIO_HARMONYDB_HOSTS: ${{ needs.setup-self-hosted.outputs.yugabyte-ip }} - LOTUS_HARMONYDB_HOSTS: ${{ needs.setup-self-hosted.outputs.yugabyte-ip }} + YB_CONTAINER: yugabyte-${{ github.run_id }} steps: - uses: actions/checkout@v4 with: submodules: recursive + - name: Start YugabyteDB + id: start-yb + run: | + # Stop any existing container from previous runs + docker stop $YB_CONTAINER 2>/dev/null || true + docker rm $YB_CONTAINER 2>/dev/null || true + + # Start fresh container + docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false + + # Wait for it to be ready + for i in {1..60}; do + if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then + echo "YugabyteDB is ready" + break + fi + sleep 1 + done + + YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') + echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT + echo "YugabyteDB ready at $YB_IP" + - name: Setup build environment uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - name: Run all integration tests (parallel via t.Parallel) + env: + CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} run: | echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" go test -v --tags=debug -timeout 60m -parallel 4 ./itests/... - # Cleanup YugabyteDB after all tests complete - cleanup: - runs-on: [self-hosted, docker] - needs: [test-unit, test-itest] - if: always() - env: - YB_CONTAINER: yugabyte-${{ github.run_id }} - steps: - name: Stop YugabyteDB + if: always() run: docker stop $YB_CONTAINER 2>/dev/null || true # Gen check From 5961a1c676c15eb370680cec594f26e725c8e6c9 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 18:56:27 -0600 Subject: [PATCH 19/33] yb clone fuss --- harmony/harmonydb/testutil/setup.go | 61 +++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index ed3e7029f..bd0fc8d7d 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -3,6 +3,7 @@ package testutil import ( "context" "fmt" + "math/rand" "net/url" "os" "strings" @@ -15,6 +16,8 @@ import ( "github.com/filecoin-project/curio/harmony/harmonydb" ) +const maxRetries = 5 + const ( templateSchemaID harmonydb.ITestID = "template" testDBName string = "curio_itest" @@ -71,7 +74,28 @@ func loadConnConfig() connConfig { // prepareTemplateSchema creates the shared test database (if needed) and // applies all migrations to a template schema that will be cloned for each test. +// Retries on YugabyteDB serialization errors. func prepareTemplateSchema() error { + var lastErr error + for attempt := 0; attempt < maxRetries; attempt++ { + if attempt > 0 { + backoff := time.Duration(1< 0 { + // Exponential backoff with jitter + backoff := time.Duration(1< Date: Wed, 3 Dec 2025 19:05:25 -0600 Subject: [PATCH 20/33] mx the itest create to handle the thundering herd --- harmony/harmonydb/testutil/setup.go | 67 +++++++++++++++++------------ 1 file changed, 39 insertions(+), 28 deletions(-) diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index bd0fc8d7d..7f13f7095 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -24,9 +24,10 @@ const ( ) var ( - templateOnce sync.Once - templateErr error - baseConnCfg connConfig + templateOnce sync.Once + templateErr error + baseConnCfg connConfig + createDBMutex sync.Mutex ) type connConfig struct { @@ -105,37 +106,47 @@ func doPrepareTemplateSchema() error { return fmt.Errorf("connecting to admin database: %w", err) } - // Check if database exists - var exists bool - err = adminConn.QueryRow(ctx, "SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = $1)", testDBName).Scan(&exists) - if err != nil { - _ = adminConn.Close(ctx) - return fmt.Errorf("checking if test database exists: %w", err) - } - - if !exists { - if _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(testDBName)); err != nil { + err = func() error { + // Check if database exists + createDBMutex.Lock() + defer createDBMutex.Unlock() + var exists bool + err = adminConn.QueryRow(ctx, "SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = $1)", testDBName).Scan(&exists) + if err != nil { _ = adminConn.Close(ctx) - return fmt.Errorf("creating test database: %w", err) + return fmt.Errorf("checking if test database exists: %w", err) } - } - _ = adminConn.Close(ctx) - // Connect to the test database and drop old template schema if it exists - testConn, err := pgx.Connect(ctx, baseConnCfg.connString(testDBName)) - if err != nil { - return fmt.Errorf("connecting to test database: %w", err) - } - templateSchema := fmt.Sprintf("itest_%s", templateSchemaID) - _, _ = testConn.Exec(ctx, "DROP SCHEMA IF EXISTS "+quoteIdentifier(templateSchema)+" CASCADE") - _ = testConn.Close(ctx) + if !exists { + _, err := adminConn.Exec(ctx, "CREATE DATABASE "+quoteIdentifier(testDBName)) + // Ignore "already exists" errors (race condition with parallel tests or previous runs) + if err != nil && !strings.Contains(err.Error(), "already exists") { + _ = adminConn.Close(ctx) + return fmt.Errorf("creating test database: %w", err) + } + } + _ = adminConn.Close(ctx) + + // Connect to the test database and drop old template schema if it exists + testConn, err := pgx.Connect(ctx, baseConnCfg.connString(testDBName)) + if err != nil { + return fmt.Errorf("connecting to test database: %w", err) + } + templateSchema := fmt.Sprintf("itest_%s", templateSchemaID) + _, _ = testConn.Exec(ctx, "DROP SCHEMA IF EXISTS "+quoteIdentifier(templateSchema)+" CASCADE") + _ = testConn.Close(ctx) - // Use harmonydb.New to create the template schema and apply all migrations - db, err := harmonydb.New([]string{baseConnCfg.host}, baseConnCfg.username, baseConnCfg.password, testDBName, baseConnCfg.port, false, templateSchemaID) + // Use harmonydb.New to create the template schema and apply all migrations + db, err := harmonydb.New([]string{baseConnCfg.host}, baseConnCfg.username, baseConnCfg.password, testDBName, baseConnCfg.port, false, templateSchemaID) + if err != nil { + return fmt.Errorf("initializing template schema: %w", err) + } + db.Close() + return nil + }() if err != nil { - return fmt.Errorf("initializing template schema: %w", err) + return err } - db.Close() return nil } From 1017b0d4038e80ab08c1e28eff8135835fb10573 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 19:11:08 -0600 Subject: [PATCH 21/33] fix idempotent test --- itests/sql_idempotent_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/itests/sql_idempotent_test.go b/itests/sql_idempotent_test.go index 0d95d9a1c..8937f367f 100644 --- a/itests/sql_idempotent_test.go +++ b/itests/sql_idempotent_test.go @@ -14,8 +14,9 @@ import ( // TestSQLIdempotent tests that the SQL DDL files are idempotent. // The upgrader will fail unless everything has "IF NOT EXISTS" or "IF EXISTS" statements. // Or equivalent safety checks. +// NOTE: This test modifies harmonydb.ITestUpgradeFunc (global state), so it cannot run in parallel. func TestSQLIdempotent(t *testing.T) { - t.Parallel() + // Cannot use t.Parallel() - this test modifies global harmonydb.ITestUpgradeFunc harmonydb.ITestUpgradeFunc = func(db *pgxpool.Pool, name string, sql string) { _, err := db.Exec(context.Background(), sql) require.NoError(t, fmt.Errorf("SQL DDL file failed idempotent check: %s, %w", name, err)) From e611db42ea6f03a1abfc8202736937e7ee18fd61 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 19:34:43 -0600 Subject: [PATCH 22/33] parallel --- itests/sql_idempotent_test.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/itests/sql_idempotent_test.go b/itests/sql_idempotent_test.go index 8937f367f..0d705b592 100644 --- a/itests/sql_idempotent_test.go +++ b/itests/sql_idempotent_test.go @@ -17,6 +17,9 @@ import ( // NOTE: This test modifies harmonydb.ITestUpgradeFunc (global state), so it cannot run in parallel. func TestSQLIdempotent(t *testing.T) { // Cannot use t.Parallel() - this test modifies global harmonydb.ITestUpgradeFunc + defer func() { + harmonydb.ITestUpgradeFunc = nil + }() harmonydb.ITestUpgradeFunc = func(db *pgxpool.Pool, name string, sql string) { _, err := db.Exec(context.Background(), sql) require.NoError(t, fmt.Errorf("SQL DDL file failed idempotent check: %s, %w", name, err)) From 242ee2ae6e8282dcf6a5a7403560fffe71227ed4 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 19:49:24 -0600 Subject: [PATCH 23/33] fixes --- harmony/harmonydb/testutil/setup.go | 130 +++++++--------------------- itests/alertnow_test.go | 3 +- itests/dyncfg_test.go | 3 +- itests/sql_idempotent_test.go | 4 +- 4 files changed, 40 insertions(+), 100 deletions(-) diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index 7f13f7095..3edf8072e 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -3,7 +3,6 @@ package testutil import ( "context" "fmt" - "math/rand" "net/url" "os" "strings" @@ -16,18 +15,16 @@ import ( "github.com/filecoin-project/curio/harmony/harmonydb" ) -const maxRetries = 5 - const ( templateSchemaID harmonydb.ITestID = "template" testDBName string = "curio_itest" ) var ( - templateOnce sync.Once - templateErr error - baseConnCfg connConfig - createDBMutex sync.Mutex + templateOnce sync.Once + templateErr error + baseConnCfg connConfig + cloneMutex sync.Mutex // Serializes schema cloning to avoid YugabyteDB conflicts ) type connConfig struct { @@ -75,28 +72,7 @@ func loadConnConfig() connConfig { // prepareTemplateSchema creates the shared test database (if needed) and // applies all migrations to a template schema that will be cloned for each test. -// Retries on YugabyteDB serialization errors. func prepareTemplateSchema() error { - var lastErr error - for attempt := 0; attempt < maxRetries; attempt++ { - if attempt > 0 { - backoff := time.Duration(1< 0 { - // Exponential backoff with jitter - backoff := time.Duration(1< Date: Wed, 3 Dec 2025 21:06:24 -0600 Subject: [PATCH 24/33] serial vs parallel tests --- .github/workflows/ci.yml | 61 ++++++++++++++++++++-- itests/{ => serial}/alertnow_test.go | 11 ++-- itests/{ => serial}/sql_idempotent_test.go | 7 +-- 3 files changed, 67 insertions(+), 12 deletions(-) rename itests/{ => serial}/alertnow_test.go (87%) rename itests/{ => serial}/sql_idempotent_test.go (83%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 420a423de..2d30855c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -158,12 +158,12 @@ jobs: - name: Run unit tests run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) - # Integration tests - single job, Go handles parallelism via t.Parallel() - test-itest: + # Integration tests - parallel tests (use t.Parallel()) + test-itest-parallel: runs-on: [self-hosted, docker] needs: [setup-self-hosted] env: - YB_CONTAINER: yugabyte-${{ github.run_id }} + YB_CONTAINER: yugabyte-parallel-${{ github.run_id }} steps: - uses: actions/checkout@v4 with: @@ -197,18 +197,71 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Run all integration tests (parallel via t.Parallel) + - name: Run parallel integration tests env: CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} run: | echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" + # Serial tests require "serial" build tag, so they're excluded here go test -v --tags=debug -timeout 60m -parallel 4 ./itests/... - name: Stop YugabyteDB if: always() run: docker stop $YB_CONTAINER 2>/dev/null || true + # Integration tests - serial tests (modify global state, cannot use t.Parallel()) + test-itest-serial: + runs-on: [self-hosted, docker] + needs: [setup-self-hosted] + env: + YB_CONTAINER: yugabyte-serial-${{ github.run_id }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Start YugabyteDB + id: start-yb + run: | + # Stop any existing container from previous runs + docker stop $YB_CONTAINER 2>/dev/null || true + docker rm $YB_CONTAINER 2>/dev/null || true + + # Start fresh container + docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false + + # Wait for it to be ready + for i in {1..60}; do + if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then + echo "YugabyteDB is ready" + break + fi + sleep 1 + done + + YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') + echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT + echo "YugabyteDB ready at $YB_IP" + + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} + + - name: Run serial integration tests + env: + CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + run: | + echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" + # Tests in serial/ modify global state and cannot run in parallel + go test -v --tags=debug,serial -timeout 30m ./itests/serial/... + + - name: Stop YugabyteDB + if: always() + run: docker stop $YB_CONTAINER 2>/dev/null || true + # Gen check gen-check: runs-on: ubuntu-latest diff --git a/itests/alertnow_test.go b/itests/serial/alertnow_test.go similarity index 87% rename from itests/alertnow_test.go rename to itests/serial/alertnow_test.go index 04366cd9b..d0a22ff19 100644 --- a/itests/alertnow_test.go +++ b/itests/serial/alertnow_test.go @@ -1,4 +1,6 @@ -package itests +//go:build serial + +package serial import ( "testing" @@ -13,11 +15,10 @@ import ( "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) +// TestAlertNow tests the alerting system. +// NOTE: Cannot run in parallel - modifies global variables: +// plugin.TestPlugins and alertmanager.AlertFuncs func TestAlertNow(t *testing.T) { - // NOTE: Cannot run in parallel - modifies global variables: - // plugin.TestPlugins and alertmanager.AlertFuncs - // tests alerting system - tp := &testPlugin{} plugin.TestPlugins = []plugin.Plugin{ tp, diff --git a/itests/sql_idempotent_test.go b/itests/serial/sql_idempotent_test.go similarity index 83% rename from itests/sql_idempotent_test.go rename to itests/serial/sql_idempotent_test.go index b35ab7cb5..0266edf93 100644 --- a/itests/sql_idempotent_test.go +++ b/itests/serial/sql_idempotent_test.go @@ -1,4 +1,6 @@ -package itests +//go:build serial + +package serial import ( "context" @@ -14,9 +16,8 @@ import ( // TestSQLIdempotent tests that the SQL DDL files are idempotent. // The upgrader will fail unless everything has "IF NOT EXISTS" or "IF EXISTS" statements. // Or equivalent safety checks. -// NOTE: This test modifies harmonydb.ITestUpgradeFunc (global state), so it cannot run in parallel. +// NOTE: Cannot run in parallel - modifies global harmonydb.ITestUpgradeFunc func TestSQLIdempotent(t *testing.T) { - // Cannot use t.Parallel() - this test modifies global harmonydb.ITestUpgradeFunc defer func() { harmonydb.ITestUpgradeFunc = nil }() From a4d4b652ae1dd508297fa604d16d1095ed666161 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 21:20:26 -0600 Subject: [PATCH 25/33] further op plus make gen timing debug --- .github/workflows/ci.yml | 161 +++++++++++++++------------ itests/serial/sql_idempotent_test.go | 3 +- 2 files changed, 94 insertions(+), 70 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2d30855c3..dcba177af 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,67 +44,9 @@ jobs: wait $FMT_PID || exit 1 wait $LINT_PID || exit 1 - # Setup FFI cache for ubuntu-latest jobs - setup-build-env: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - - # Self-hosted setup - proof params and FFI cache only - setup-self-hosted: - runs-on: [self-hosted, docker] - steps: - # Check if proof params exist locally (self-hosted runners persist files) - - name: Check local proof parameters - id: local-params - run: | - PARAMS_DIR="/var/tmp/filecoin-proof-parameters" - if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then - echo "exists=true" >> $GITHUB_OUTPUT - else - echo "exists=false" >> $GITHUB_OUTPUT - fi - - - name: Cache proof parameters - if: steps.local-params.outputs.exists != 'true' - id: cache-params - uses: actions/cache/restore@v4 - with: - path: /var/tmp/filecoin-proof-parameters - key: proof-params-8388608 - restore-keys: proof-params- - - - name: Save proof parameters cache - if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 - with: - path: /var/tmp/filecoin-proof-parameters - key: proof-params-8388608 - - - name: Fetch parameters - if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' - run: lotus fetch-params 8388608 - - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup build environment - uses: ./.github/actions/setup-build-env - with: - go-version: ${{ env.GO_VERSION }} - - # Lint - runs early, fast feedback + # Lint - runs immediately, no dependencies lint: runs-on: ubuntu-latest - needs: [setup-build-env] steps: - uses: actions/checkout@v4 with: @@ -120,10 +62,9 @@ jobs: curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.4.0 golangci-lint run -v --timeout 15m --concurrency 4 - # Single build job - just verify it compiles + # Build variants - run in parallel, no dependencies build: runs-on: ubuntu-latest - needs: [setup-build-env] strategy: fail-fast: false matrix: @@ -141,10 +82,9 @@ jobs: - name: Build ${{ matrix.variant }} run: make ${{ matrix.variant }} - # Unit tests - no database needed + # Unit tests - no database needed, runs immediately on self-hosted test-unit: runs-on: [self-hosted, docker] - needs: [setup-self-hosted] steps: - uses: actions/checkout@v4 with: @@ -161,10 +101,41 @@ jobs: # Integration tests - parallel tests (use t.Parallel()) test-itest-parallel: runs-on: [self-hosted, docker] - needs: [setup-self-hosted] env: YB_CONTAINER: yugabyte-parallel-${{ github.run_id }} steps: + - name: Check local proof parameters + id: local-params + run: | + PARAMS_DIR="/var/tmp/filecoin-proof-parameters" + if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "exists=false" >> $GITHUB_OUTPUT + fi + + - name: Restore proof parameters from cache + if: steps.local-params.outputs.exists != 'true' + id: cache-params + uses: actions/cache/restore@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + restore-keys: proof-params- + + - name: Fetch proof parameters + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' + run: | + echo "Fetching proof parameters (cache miss)..." + lotus fetch-params 8388608 + + - name: Save proof parameters to cache + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' + uses: actions/cache/save@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + - uses: actions/checkout@v4 with: submodules: recursive @@ -213,10 +184,41 @@ jobs: # Integration tests - serial tests (modify global state, cannot use t.Parallel()) test-itest-serial: runs-on: [self-hosted, docker] - needs: [setup-self-hosted] env: YB_CONTAINER: yugabyte-serial-${{ github.run_id }} steps: + - name: Check local proof parameters + id: local-params + run: | + PARAMS_DIR="/var/tmp/filecoin-proof-parameters" + if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "exists=false" >> $GITHUB_OUTPUT + fi + + - name: Restore proof parameters from cache + if: steps.local-params.outputs.exists != 'true' + id: cache-params + uses: actions/cache/restore@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + restore-keys: proof-params- + + - name: Fetch proof parameters + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' + run: | + echo "Fetching proof parameters (cache miss)..." + lotus fetch-params 8388608 + + - name: Save proof parameters to cache + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' + uses: actions/cache/save@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + - uses: actions/checkout@v4 with: submodules: recursive @@ -262,10 +264,9 @@ jobs: if: always() run: docker stop $YB_CONTAINER 2>/dev/null || true - # Gen check + # Gen check - verify generated code is up to date gen-check: runs-on: ubuntu-latest - needs: [setup-build-env] steps: - uses: actions/checkout@v4 with: @@ -276,13 +277,35 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Install Go tools and generate + - name: Install Go tools run: | go install golang.org/x/tools/cmd/goimports & go install github.com/hannahhoward/cbor-gen-for & go install github.com/swaggo/swag/cmd/swag & wait - make gen + + - name: api-gen + run: make api-gen + + - name: go-generate + run: make go-generate + + - name: cfgdoc-gen + run: make cfgdoc-gen + + - name: docsgen (md + openrpc) + run: make docsgen + + - name: marketgen + run: make marketgen + + - name: docsgen-cli (builds curio + sptool) + run: make docsgen-cli + + - name: fiximports + go mod tidy + run: | + go run ./scripts/fiximports + go mod tidy - name: Check for changes run: | diff --git a/itests/serial/sql_idempotent_test.go b/itests/serial/sql_idempotent_test.go index 0266edf93..c90ce1e3a 100644 --- a/itests/serial/sql_idempotent_test.go +++ b/itests/serial/sql_idempotent_test.go @@ -11,6 +11,7 @@ import ( "github.com/yugabyte/pgx/v5/pgxpool" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) // TestSQLIdempotent tests that the SQL DDL files are idempotent. @@ -28,7 +29,7 @@ func TestSQLIdempotent(t *testing.T) { } } - testID := harmonydb.ITestNewID() + testID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) require.NoError(t, err) From a6c3fd69096019379292b8f73819872d0cfa4237 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Wed, 3 Dec 2025 22:10:19 -0600 Subject: [PATCH 26/33] lets try something --- harmony/harmonydb/harmonydb.go | 22 ++++++++++++++++++---- itests/{ => serial}/dyncfg_test.go | 8 ++++++-- itests/serial/sql_idempotent_test.go | 5 +++-- 3 files changed, 27 insertions(+), 8 deletions(-) rename itests/{ => serial}/dyncfg_test.go (89%) diff --git a/harmony/harmonydb/harmonydb.go b/harmony/harmonydb/harmonydb.go index 16cadf756..3cab019d1 100644 --- a/harmony/harmonydb/harmonydb.go +++ b/harmony/harmonydb/harmonydb.go @@ -299,11 +299,25 @@ func (db *DB) ITestDeleteAll() { return } defer db.pgx.Close() - _, err := db.pgx.Exec(context.Background(), "DROP SCHEMA "+db.schema+" CASCADE") - if err != nil { - fmt.Println("warning: unclean itest shutdown: cannot delete schema: " + err.Error()) - return + + // Retry with exponential backoff for YugabyteDB serialization errors + retryWait := 100 * time.Millisecond + maxRetries := 5 + for i := 0; i < maxRetries; i++ { + _, err := db.pgx.Exec(context.Background(), "DROP SCHEMA "+db.schema+" CASCADE") + if err == nil { + return + } + // Check if it's a serialization error (40001) + if !strings.Contains(err.Error(), "40001") { + fmt.Println("warning: unclean itest shutdown: cannot delete schema: " + err.Error()) + return + } + // Serialization error - retry after backoff + time.Sleep(retryWait) + retryWait *= 2 } + fmt.Println("warning: unclean itest shutdown: cannot delete schema after retries") } var schemaREString = "^[A-Za-z0-9_]+$" diff --git a/itests/dyncfg_test.go b/itests/serial/dyncfg_test.go similarity index 89% rename from itests/dyncfg_test.go rename to itests/serial/dyncfg_test.go index c6eb9d5b4..5fe77fab4 100644 --- a/itests/dyncfg_test.go +++ b/itests/serial/dyncfg_test.go @@ -1,4 +1,6 @@ -package itests +//go:build serial + +package serial import ( "context" @@ -13,8 +15,10 @@ import ( "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) +// TestDynamicConfig tests the dynamic configuration change detection. +// NOTE: Cannot run in parallel - EnableChangeDetection starts a background +// goroutine that persists after the test and can interfere with other tests. func TestDynamicConfig(t *testing.T) { - t.Parallel() ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/itests/serial/sql_idempotent_test.go b/itests/serial/sql_idempotent_test.go index c90ce1e3a..3cf78f52b 100644 --- a/itests/serial/sql_idempotent_test.go +++ b/itests/serial/sql_idempotent_test.go @@ -11,13 +11,13 @@ import ( "github.com/yugabyte/pgx/v5/pgxpool" "github.com/filecoin-project/curio/harmony/harmonydb" - "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) // TestSQLIdempotent tests that the SQL DDL files are idempotent. // The upgrader will fail unless everything has "IF NOT EXISTS" or "IF EXISTS" statements. // Or equivalent safety checks. // NOTE: Cannot run in parallel - modifies global harmonydb.ITestUpgradeFunc +// NOTE: Does NOT use SetupTestDB because it needs fresh migrations to run func TestSQLIdempotent(t *testing.T) { defer func() { harmonydb.ITestUpgradeFunc = nil @@ -29,7 +29,8 @@ func TestSQLIdempotent(t *testing.T) { } } - testID := testutil.SetupTestDB(t) + // Use a fresh schema (not cloned) so migrations actually run and ITestUpgradeFunc is called + testID := harmonydb.ITestNewID() cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) require.NoError(t, err) From 67531e7754569fbbd6410193a293bd9818becb74 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Thu, 4 Dec 2025 09:21:14 -0600 Subject: [PATCH 27/33] change-detect should be test-friendly --- deps/config/dynamic.go | 52 +++++++++++++++++++++++++++++++----- itests/serial/dyncfg_test.go | 11 ++++++-- 2 files changed, 54 insertions(+), 9 deletions(-) diff --git a/deps/config/dynamic.go b/deps/config/dynamic.go index 85cd438f5..e1f0eccee 100644 --- a/deps/config/dynamic.go +++ b/deps/config/dynamic.go @@ -91,17 +91,33 @@ type cfgRoot[T any] struct { layers []string treeCopy T fixupFn func(string, T) error + ctx context.Context + done chan struct{} } +// StopFunc is returned by EnableChangeDetectionWithContext and should be called +// to stop the change monitor goroutine and wait for it to exit. +type StopFunc func() + func EnableChangeDetection[T any](db *harmonydb.DB, obj T, layers []string, fixupFn func(string, T) error) error { + _, err := EnableChangeDetectionWithContext(context.Background(), db, obj, layers, fixupFn) + return err +} + +// EnableChangeDetectionWithContext starts a goroutine that monitors config changes. +// It returns a StopFunc that cancels the context and waits for the goroutine to exit. +// Call the StopFunc before cleaning up database resources. +func EnableChangeDetectionWithContext[T any](ctx context.Context, db *harmonydb.DB, obj T, layers []string, fixupFn func(string, T) error) (StopFunc, error) { var err error - r := &cfgRoot[T]{db: db, treeCopy: obj, layers: layers, fixupFn: fixupFn} + r := &cfgRoot[T]{db: db, treeCopy: obj, layers: layers, fixupFn: fixupFn, ctx: ctx, done: make(chan struct{})} r.treeCopy, err = CopyWithOriginalDynamics(obj) if err != nil { - return err + return nil, err } go r.changeMonitor() - return nil + return func() { + <-r.done // Wait for goroutine to exit + }, nil } // CopyWithOriginalDynamics copies the original dynamics from the original object to the new object. @@ -181,12 +197,25 @@ func isDynamicType(t reflect.Type) bool { } func (r *cfgRoot[T]) changeMonitor() { + defer close(r.done) // Signal that goroutine has exited + lastTimestamp := time.Time{} // lets do a read at startup for { + // Check if context is cancelled + select { + case <-r.ctx.Done(): + return + default: + } + configCount := 0 - err := r.db.QueryRow(context.Background(), `SELECT COUNT(*) FROM harmony_config WHERE timestamp > $1 AND title IN ($2)`, lastTimestamp, strings.Join(r.layers, ",")).Scan(&configCount) + err := r.db.QueryRow(r.ctx, `SELECT COUNT(*) FROM harmony_config WHERE timestamp > $1 AND title IN ($2)`, lastTimestamp, strings.Join(r.layers, ",")).Scan(&configCount) if err != nil { + // Exit if context was cancelled + if r.ctx.Err() != nil { + return + } logger.Errorf("error selecting configs: %s", err) continue } @@ -196,8 +225,11 @@ func (r *cfgRoot[T]) changeMonitor() { lastTimestamp = time.Now() // 1. get all configs - configs, err := GetConfigs(context.Background(), r.db, r.layers) + configs, err := GetConfigs(r.ctx, r.db, r.layers) if err != nil { + if r.ctx.Err() != nil { + return + } logger.Errorf("error getting configs: %s", err) continue } @@ -208,13 +240,19 @@ func (r *cfgRoot[T]) changeMonitor() { func() { dynamicLocker.Lock() defer dynamicLocker.Unlock() - err = ApplyLayers(context.Background(), r.treeCopy, configs, r.fixupFn) + err = ApplyLayers(r.ctx, r.treeCopy, configs, r.fixupFn) if err != nil { logger.Errorf("dynamic config failed to ApplyLayers: %s", err) return } }() - time.Sleep(30 * time.Second) + + // Sleep with context cancellation support + select { + case <-r.ctx.Done(): + return + case <-time.After(30 * time.Second): + } } } diff --git a/itests/serial/dyncfg_test.go b/itests/serial/dyncfg_test.go index 5fe77fab4..449be5dad 100644 --- a/itests/serial/dyncfg_test.go +++ b/itests/serial/dyncfg_test.go @@ -20,7 +20,6 @@ import ( // goroutine that persists after the test and can interfere with other tests. func TestDynamicConfig(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) - defer cancel() sharedITestID := testutil.SetupTestDB(t) cdb, err := harmonydb.NewFromConfigWithITestID(t, sharedITestID) @@ -47,7 +46,15 @@ func TestDynamicConfig(t *testing.T) { require.NoError(t, setTestConfig(ctx, cdb, databaseContents)) // "Start the server". This will immediately poll for a config update. - require.NoError(t, config.EnableChangeDetection(cdb, databaseContents, []string{"testcfg"}, config.FixTOML)) + // Get the stop function to properly shut down the goroutine before test cleanup + stopFn, err := config.EnableChangeDetectionWithContext(ctx, cdb, databaseContents, []string{"testcfg"}, config.FixTOML) + require.NoError(t, err) + + // Ensure we stop the change monitor BEFORE database cleanup happens + defer func() { + cancel() // Signal context cancellation + stopFn() // Wait for goroutine to exit + }() // Positive Test: the runtime config should have the new value require.Eventually(t, func() bool { From a5ce2ddf9029ac7c03d49dca2407db4d90eba8bd Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Thu, 4 Dec 2025 20:54:15 -0600 Subject: [PATCH 28/33] test fixes --- .../harmonydb/sql/20231110-mining_tasks.sql | 2 +- .../harmonydb/sql/20240404-machine_detail.sql | 2 +- .../sql/20240929-chain-sends-eth.sql | 2 +- .../harmonydb/sql/20241106-market-fixes.sql | 4 +- harmony/harmonydb/testutil/setup.go | 65 ++++++++++++++++++- 5 files changed, 68 insertions(+), 7 deletions(-) diff --git a/harmony/harmonydb/sql/20231110-mining_tasks.sql b/harmony/harmonydb/sql/20231110-mining_tasks.sql index 13c8419dd..43d289c74 100644 --- a/harmony/harmonydb/sql/20231110-mining_tasks.sql +++ b/harmony/harmonydb/sql/20231110-mining_tasks.sql @@ -38,4 +38,4 @@ create table if not exists mining_base_block unique (sp_id, task_id, block_cid) ); -CREATE UNIQUE INDEX mining_base_block_cid_k ON mining_base_block (sp_id, block_cid) WHERE no_win = false; +CREATE UNIQUE INDEX IF NOT EXISTS mining_base_block_cid_k ON mining_base_block (sp_id, block_cid) WHERE no_win = false; diff --git a/harmony/harmonydb/sql/20240404-machine_detail.sql b/harmony/harmonydb/sql/20240404-machine_detail.sql index 128d1ceda..c9a4784a9 100644 --- a/harmony/harmonydb/sql/20240404-machine_detail.sql +++ b/harmony/harmonydb/sql/20240404-machine_detail.sql @@ -8,5 +8,5 @@ CREATE TABLE IF NOT EXISTS harmony_machine_details ( FOREIGN KEY (machine_id) REFERENCES harmony_machines(id) ON DELETE CASCADE ); -CREATE UNIQUE INDEX machine_details_machine_id ON harmony_machine_details(machine_id); +CREATE UNIQUE INDEX IF NOT EXISTS machine_details_machine_id ON harmony_machine_details(machine_id); diff --git a/harmony/harmonydb/sql/20240929-chain-sends-eth.sql b/harmony/harmonydb/sql/20240929-chain-sends-eth.sql index fdb270660..4f3d5d5c9 100644 --- a/harmony/harmonydb/sql/20240929-chain-sends-eth.sql +++ b/harmony/harmonydb/sql/20240929-chain-sends-eth.sql @@ -39,7 +39,7 @@ COMMENT ON COLUMN message_sends_eth.send_time IS 'Time when the send task was ex COMMENT ON COLUMN message_sends_eth.send_success IS 'Whether this transaction was broadcasted to the network already, NULL if not yet attempted, TRUE if successful, FALSE if failed'; COMMENT ON COLUMN message_sends_eth.send_error IS 'Error message if send_success is FALSE'; -CREATE UNIQUE INDEX message_sends_eth_success_index +CREATE UNIQUE INDEX IF NOT EXISTS message_sends_eth_success_index ON message_sends_eth (from_address, nonce) WHERE send_success IS NOT FALSE; diff --git a/harmony/harmonydb/sql/20241106-market-fixes.sql b/harmony/harmonydb/sql/20241106-market-fixes.sql index c48723e6a..4be0f253b 100644 --- a/harmony/harmonydb/sql/20241106-market-fixes.sql +++ b/harmony/harmonydb/sql/20241106-market-fixes.sql @@ -1,9 +1,9 @@ ALTER TABLE ipni_peerid ADD UNIQUE (sp_id); -CREATE UNIQUE INDEX sectors_pipeline_events_task_history_id_uindex +CREATE UNIQUE INDEX IF NOT EXISTS sectors_pipeline_events_task_history_id_uindex ON sectors_pipeline_events (task_history_id, sp_id, sector_number); -CREATE UNIQUE INDEX market_piece_deal_piece_cid_id_uindex +CREATE UNIQUE INDEX IF NOT EXISTS market_piece_deal_piece_cid_id_uindex ON market_piece_deal (piece_cid, id); alter table market_mk12_deals diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index 3edf8072e..df9f08700 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -119,8 +119,8 @@ func prepareTemplateSchema() error { } // cloneTemplateSchema creates a new schema for the test by copying all table -// structures and data from the template schema. This includes seed data that -// was inserted during migrations (e.g., harmony_config entries). +// structures, data, and functions from the template schema. This includes seed +// data that was inserted during migrations (e.g., harmony_config entries). // Uses a mutex to serialize cloning and avoid YugabyteDB transaction conflicts. func cloneTemplateSchema(id harmonydb.ITestID) error { cloneMutex.Lock() @@ -193,6 +193,67 @@ func cloneTemplateSchema(id harmonydb.ITestID) error { } } + // Clone functions from template schema to new schema + if err := cloneFunctions(ctx, conn, templateSchema, newSchema); err != nil { + return fmt.Errorf("cloning functions: %w", err) + } + + return nil +} + +// cloneFunctions copies all functions from the template schema to the new schema. +// It retrieves function definitions using pg_get_functiondef and recreates them +// in the new schema by replacing the schema name in the function definition. +func cloneFunctions(ctx context.Context, conn *pgx.Conn, templateSchema, newSchema string) error { + // Query all functions in the template schema + rows, err := conn.Query(ctx, ` + SELECT p.oid, p.proname + FROM pg_proc p + JOIN pg_namespace n ON p.pronamespace = n.oid + WHERE n.nspname = $1 + `, templateSchema) + if err != nil { + return fmt.Errorf("querying functions: %w", err) + } + + type funcInfo struct { + oid uint32 + name string + } + var functions []funcInfo + for rows.Next() { + var f funcInfo + if err := rows.Scan(&f.oid, &f.name); err != nil { + rows.Close() + return fmt.Errorf("scanning function: %w", err) + } + functions = append(functions, f) + } + rows.Close() + if err := rows.Err(); err != nil { + return fmt.Errorf("iterating functions: %w", err) + } + + // Recreate each function in the new schema + for _, f := range functions { + var funcDef string + err := conn.QueryRow(ctx, "SELECT pg_get_functiondef($1)", f.oid).Scan(&funcDef) + if err != nil { + return fmt.Errorf("getting definition for function %s: %w", f.name, err) + } + + // Replace schema name in the function definition + // The function definition starts with "CREATE OR REPLACE FUNCTION schema.funcname" + funcDef = strings.Replace(funcDef, + quoteIdentifier(templateSchema)+".", + quoteIdentifier(newSchema)+".", + 1) + + if _, err := conn.Exec(ctx, funcDef); err != nil { + return fmt.Errorf("creating function %s: %w", f.name, err) + } + } + return nil } From adb3f1a8821a719e53b31f80f837aee62a87b3be Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Thu, 4 Dec 2025 22:12:14 -0600 Subject: [PATCH 29/33] fixes --- deps/config/dynamic.go | 7 +++--- .../sql/20240507-sdr-pipeline-fk-drop.sql | 22 +++++++++---------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/deps/config/dynamic.go b/deps/config/dynamic.go index e1f0eccee..3322a1dde 100644 --- a/deps/config/dynamic.go +++ b/deps/config/dynamic.go @@ -212,8 +212,8 @@ func (r *cfgRoot[T]) changeMonitor() { configCount := 0 err := r.db.QueryRow(r.ctx, `SELECT COUNT(*) FROM harmony_config WHERE timestamp > $1 AND title IN ($2)`, lastTimestamp, strings.Join(r.layers, ",")).Scan(&configCount) if err != nil { - // Exit if context was cancelled - if r.ctx.Err() != nil { + // Exit if context was cancelled or pool was closed (shutdown condition) + if r.ctx.Err() != nil || strings.Contains(err.Error(), "closed pool") { return } logger.Errorf("error selecting configs: %s", err) @@ -227,7 +227,8 @@ func (r *cfgRoot[T]) changeMonitor() { // 1. get all configs configs, err := GetConfigs(r.ctx, r.db, r.layers) if err != nil { - if r.ctx.Err() != nil { + // Exit if context was cancelled or pool was closed (shutdown condition) + if r.ctx.Err() != nil || strings.Contains(err.Error(), "closed pool") { return } logger.Errorf("error getting configs: %s", err) diff --git a/harmony/harmonydb/sql/20240507-sdr-pipeline-fk-drop.sql b/harmony/harmonydb/sql/20240507-sdr-pipeline-fk-drop.sql index daf7a4429..cbb0c2bd3 100644 --- a/harmony/harmonydb/sql/20240507-sdr-pipeline-fk-drop.sql +++ b/harmony/harmonydb/sql/20240507-sdr-pipeline-fk-drop.sql @@ -1,12 +1,12 @@ -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_commit_msg_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_finalize_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_move_storage_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_porep_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_precommit_msg_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_sdr_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_tree_c_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_tree_d_fkey; -ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT sectors_sdr_pipeline_task_id_tree_r_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_commit_msg_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_finalize_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_move_storage_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_porep_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_precommit_msg_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_sdr_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_tree_c_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_tree_d_fkey; +ALTER TABLE sectors_sdr_pipeline DROP CONSTRAINT IF EXISTS sectors_sdr_pipeline_task_id_tree_r_fkey; -ALTER TABLE parked_pieces DROP CONSTRAINT parked_pieces_cleanup_task_id_fkey; -ALTER TABLE parked_pieces DROP CONSTRAINT parked_pieces_task_id_fkey; +ALTER TABLE parked_pieces DROP CONSTRAINT IF EXISTS parked_pieces_cleanup_task_id_fkey; +ALTER TABLE parked_pieces DROP CONSTRAINT IF EXISTS parked_pieces_task_id_fkey; From 51a6a8c97197bebf98f420884106b23f6dd30894 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Fri, 5 Dec 2025 14:57:52 -0600 Subject: [PATCH 30/33] stuff to pass tests --- deps/config/dynamic.go | 10 +-- .../harmonydb/sql/20240611-snap-pipeline.sql | 3 +- harmony/harmonydb/sql/20241104-piece-info.sql | 2 +- harmony/harmonydb/sql/20250115-proofshare.sql | 4 +- .../sql/20250803-wallet-exporter.sql | 2 +- harmony/harmonydb/testutil/setup.go | 68 +++++++++++++++++++ 6 files changed, 80 insertions(+), 9 deletions(-) diff --git a/deps/config/dynamic.go b/deps/config/dynamic.go index 3322a1dde..0723f93cb 100644 --- a/deps/config/dynamic.go +++ b/deps/config/dynamic.go @@ -212,8 +212,9 @@ func (r *cfgRoot[T]) changeMonitor() { configCount := 0 err := r.db.QueryRow(r.ctx, `SELECT COUNT(*) FROM harmony_config WHERE timestamp > $1 AND title IN ($2)`, lastTimestamp, strings.Join(r.layers, ",")).Scan(&configCount) if err != nil { - // Exit if context was cancelled or pool was closed (shutdown condition) - if r.ctx.Err() != nil || strings.Contains(err.Error(), "closed pool") { + // Exit if context was cancelled, pool was closed, or table doesn't exist yet (shutdown/startup condition) + errStr := err.Error() + if r.ctx.Err() != nil || strings.Contains(errStr, "closed pool") || strings.Contains(errStr, "does not exist") { return } logger.Errorf("error selecting configs: %s", err) @@ -227,8 +228,9 @@ func (r *cfgRoot[T]) changeMonitor() { // 1. get all configs configs, err := GetConfigs(r.ctx, r.db, r.layers) if err != nil { - // Exit if context was cancelled or pool was closed (shutdown condition) - if r.ctx.Err() != nil || strings.Contains(err.Error(), "closed pool") { + // Exit if context was cancelled, pool was closed, or table doesn't exist yet (shutdown/startup condition) + errStr := err.Error() + if r.ctx.Err() != nil || strings.Contains(errStr, "closed pool") || strings.Contains(errStr, "does not exist") { return } logger.Errorf("error getting configs: %s", err) diff --git a/harmony/harmonydb/sql/20240611-snap-pipeline.sql b/harmony/harmonydb/sql/20240611-snap-pipeline.sql index 813e769d9..c25fd7688 100644 --- a/harmony/harmonydb/sql/20240611-snap-pipeline.sql +++ b/harmony/harmonydb/sql/20240611-snap-pipeline.sql @@ -105,7 +105,8 @@ INSERT INTO sectors_cc_values (reg_seal_proof, cur_unsealed_cid) VALUES (11, 'baga6ea4seaqgl4u6lwmnerwdrm4iz7ag3mpwwaqtapc2fciabpooqmvjypweeha'), (12, 'baga6ea4seaqdsvqopmj2soyhujb72jza76t4wpq5fzifvm3ctz47iyytkewnubq'), (13, 'baga6ea4seaqao7s73y24kcutaosvacpdjgfe5pw76ooefnyqw4ynr3d2y6x2mpq'), - (14, 'baga6ea4seaqomqafu276g53zko4k23xzh4h4uecjwicbmvhsuqi7o4bhthhm4aq'); + (14, 'baga6ea4seaqomqafu276g53zko4k23xzh4h4uecjwicbmvhsuqi7o4bhthhm4aq') +ON CONFLICT DO NOTHING; ALTER TABLE sectors_meta ADD COLUMN IF NOT EXISTS expiration_epoch BIGINT; diff --git a/harmony/harmonydb/sql/20241104-piece-info.sql b/harmony/harmonydb/sql/20241104-piece-info.sql index 49e36694f..a83a31a76 100644 --- a/harmony/harmonydb/sql/20241104-piece-info.sql +++ b/harmony/harmonydb/sql/20241104-piece-info.sql @@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS piece_summary ( ); -- Insert the initial row -INSERT INTO piece_summary (id) VALUES (TRUE); +INSERT INTO piece_summary (id) VALUES (TRUE) ON CONFLICT DO NOTHING; -- Function to update piece_summary when a new entry is added to market_piece_metadata CREATE OR REPLACE FUNCTION update_piece_summary() diff --git a/harmony/harmonydb/sql/20250115-proofshare.sql b/harmony/harmonydb/sql/20250115-proofshare.sql index 51358d3a9..f0761139e 100644 --- a/harmony/harmonydb/sql/20250115-proofshare.sql +++ b/harmony/harmonydb/sql/20250115-proofshare.sql @@ -42,7 +42,7 @@ CREATE TABLE IF NOT EXISTS proofshare_meta ( COMMENT ON COLUMN proofshare_meta.enabled IS 'Setting to TRUE indicates acceptance of provider TOS in lib/proofsvc/tos/provider.md and privacy.md'; -INSERT INTO proofshare_meta (singleton, enabled, wallet) VALUES (TRUE, FALSE, NULL); +INSERT INTO proofshare_meta (singleton, enabled, wallet) VALUES (TRUE, FALSE, NULL) ON CONFLICT DO NOTHING; CREATE TABLE IF NOT EXISTS proofshare_provider_payments ( provider_id BIGINT NOT NULL, -- wallet id @@ -106,7 +106,7 @@ CREATE TABLE IF NOT EXISTS proofshare_client_settings ( COMMENT ON COLUMN proofshare_client_settings.enabled IS 'Setting to TRUE indicates acceptance of client TOS in lib/proofsvc/tos/client.md and privacy.md'; -INSERT INTO proofshare_client_settings (enabled, sp_id, wallet, minimum_pending_seconds, do_porep, do_snap) VALUES (FALSE, 0, NULL, 0, FALSE, FALSE); +INSERT INTO proofshare_client_settings (enabled, sp_id, wallet, minimum_pending_seconds, do_porep, do_snap) VALUES (FALSE, 0, NULL, 0, FALSE, FALSE) ON CONFLICT DO NOTHING; CREATE TABLE IF NOT EXISTS proofshare_client_requests ( task_id BIGINT NOT NULL, diff --git a/harmony/harmonydb/sql/20250803-wallet-exporter.sql b/harmony/harmonydb/sql/20250803-wallet-exporter.sql index 5121bbac8..283908d0b 100644 --- a/harmony/harmonydb/sql/20250803-wallet-exporter.sql +++ b/harmony/harmonydb/sql/20250803-wallet-exporter.sql @@ -3,7 +3,7 @@ CREATE TABLE IF NOT EXISTS wallet_exporter_processing ( processed_until TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() ); -INSERT INTO wallet_exporter_processing (singleton) VALUES (TRUE); +INSERT INTO wallet_exporter_processing (singleton) VALUES (TRUE) ON CONFLICT DO NOTHING; -- presence of a message in this table means that we've already accounted the basic send CREATE TABLE IF NOT EXISTS wallet_exporter_watched_msgs ( diff --git a/harmony/harmonydb/testutil/setup.go b/harmony/harmonydb/testutil/setup.go index df9f08700..c098e2dd5 100644 --- a/harmony/harmonydb/testutil/setup.go +++ b/harmony/harmonydb/testutil/setup.go @@ -198,6 +198,11 @@ func cloneTemplateSchema(id harmonydb.ITestID) error { return fmt.Errorf("cloning functions: %w", err) } + // Clone triggers from template schema to new schema + if err := cloneTriggers(ctx, conn, templateSchema, newSchema); err != nil { + return fmt.Errorf("cloning triggers: %w", err) + } + return nil } @@ -244,10 +249,15 @@ func cloneFunctions(ctx context.Context, conn *pgx.Conn, templateSchema, newSche // Replace schema name in the function definition // The function definition starts with "CREATE OR REPLACE FUNCTION schema.funcname" + // Try both quoted and unquoted schema names since pg_get_functiondef output varies funcDef = strings.Replace(funcDef, quoteIdentifier(templateSchema)+".", quoteIdentifier(newSchema)+".", 1) + funcDef = strings.Replace(funcDef, + templateSchema+".", + newSchema+".", + 1) if _, err := conn.Exec(ctx, funcDef); err != nil { return fmt.Errorf("creating function %s: %w", f.name, err) @@ -257,6 +267,64 @@ func cloneFunctions(ctx context.Context, conn *pgx.Conn, templateSchema, newSche return nil } +// cloneTriggers copies all triggers from the template schema to the new schema. +// It retrieves trigger definitions and recreates them in the new schema. +func cloneTriggers(ctx context.Context, conn *pgx.Conn, templateSchema, newSchema string) error { + // Query all triggers in the template schema + rows, err := conn.Query(ctx, ` + SELECT + t.tgname AS trigger_name, + c.relname AS table_name, + pg_get_triggerdef(t.oid) AS trigger_def + FROM pg_trigger t + JOIN pg_class c ON t.tgrelid = c.oid + JOIN pg_namespace n ON c.relnamespace = n.oid + WHERE n.nspname = $1 + AND NOT t.tgisinternal + `, templateSchema) + if err != nil { + return fmt.Errorf("querying triggers: %w", err) + } + + type triggerInfo struct { + name string + tableName string + def string + } + var triggers []triggerInfo + for rows.Next() { + var t triggerInfo + if err := rows.Scan(&t.name, &t.tableName, &t.def); err != nil { + rows.Close() + return fmt.Errorf("scanning trigger: %w", err) + } + triggers = append(triggers, t) + } + rows.Close() + if err := rows.Err(); err != nil { + return fmt.Errorf("iterating triggers: %w", err) + } + + // Recreate each trigger in the new schema + for _, t := range triggers { + // Replace schema references in the trigger definition + // Try both quoted and unquoted schema names + triggerDef := t.def + triggerDef = strings.ReplaceAll(triggerDef, + quoteIdentifier(templateSchema)+".", + quoteIdentifier(newSchema)+".") + triggerDef = strings.ReplaceAll(triggerDef, + templateSchema+".", + newSchema+".") + + if _, err := conn.Exec(ctx, triggerDef); err != nil { + return fmt.Errorf("creating trigger %s on %s: %w", t.name, t.tableName, err) + } + } + + return nil +} + func (c connConfig) connString(database string) string { u := url.URL{ Scheme: "postgresql", From 52133dd69b49345dc534e92313af346b7902d64a Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Fri, 5 Dec 2025 22:26:10 -0600 Subject: [PATCH 31/33] sqlidem works --- harmony/harmonydb/sql/20240823-ipni.sql | 4 +- .../harmonydb/sql/20241106-market-fixes.sql | 6 +-- .../harmonydb/sql/20250505-market-mk20.sql | 49 ++++++++++--------- harmony/harmonydb/sql/20250727-balancemgr.sql | 10 +++- .../sql/20250801-proofshare-pipeline.sql | 16 ++++-- .../sql/20250926-harmony_config_timestamp.sql | 2 +- itests/serial/sql_idempotent_test.go | 25 +++++++--- 7 files changed, 70 insertions(+), 42 deletions(-) diff --git a/harmony/harmonydb/sql/20240823-ipni.sql b/harmony/harmonydb/sql/20240823-ipni.sql index a3966b932..031a11a18 100644 --- a/harmony/harmonydb/sql/20240823-ipni.sql +++ b/harmony/harmonydb/sql/20240823-ipni.sql @@ -35,10 +35,10 @@ CREATE TABLE IF NOT EXISTS ipni ( CREATE INDEX IF NOT EXISTS ipni_provider_order_number ON ipni(provider, order_number); -- This index will speed up lookups based on the ad_cid, which is frequently used to identify specific ads -CREATE UNIQUE INDEX ipni_ad_cid ON ipni(ad_cid); +CREATE UNIQUE INDEX IF NOT EXISTS ipni_ad_cid ON ipni(ad_cid); -- This index will speed up lookups based on the ad_cid, which is frequently used to identify specific ads -CREATE UNIQUE INDEX ipni_context_id ON ipni(context_id, ad_cid, is_rm); -- dropped in 20241106-market-fixes.sql +CREATE UNIQUE INDEX IF NOT EXISTS ipni_context_id ON ipni(context_id, ad_cid, is_rm); -- dropped in 20241106-market-fixes.sql -- 20241106-market-fixes.sql: -- CREATE INDEX ipni_context_id ON ipni(context_id, ad_cid, is_rm, is_skip) -- non-unique to allow multiple skips -- CREATE INDEX ipni_entries_skip ON ipni(entries, is_skip, piece_cid); diff --git a/harmony/harmonydb/sql/20241106-market-fixes.sql b/harmony/harmonydb/sql/20241106-market-fixes.sql index 4be0f253b..249d0a345 100644 --- a/harmony/harmonydb/sql/20241106-market-fixes.sql +++ b/harmony/harmonydb/sql/20241106-market-fixes.sql @@ -1,4 +1,4 @@ -ALTER TABLE ipni_peerid ADD UNIQUE (sp_id); +CREATE UNIQUE INDEX IF NOT EXISTS ipni_peerid_sp_id_unique ON ipni_peerid (sp_id); CREATE UNIQUE INDEX IF NOT EXISTS sectors_pipeline_events_task_history_id_uindex ON sectors_pipeline_events (task_history_id, sp_id, sector_number); @@ -6,8 +6,8 @@ CREATE UNIQUE INDEX IF NOT EXISTS sectors_pipeline_events_task_history_id_uindex CREATE UNIQUE INDEX IF NOT EXISTS market_piece_deal_piece_cid_id_uindex ON market_piece_deal (piece_cid, id); -alter table market_mk12_deals - add proposal_cid text not null; +ALTER TABLE market_mk12_deals + ADD COLUMN IF NOT EXISTS proposal_cid text not null; CREATE INDEX IF NOT EXISTS market_mk12_deals_proposal_cid_index ON market_mk12_deals (proposal_cid); diff --git a/harmony/harmonydb/sql/20250505-market-mk20.sql b/harmony/harmonydb/sql/20250505-market-mk20.sql index 9489cfe33..d07b4a3ec 100644 --- a/harmony/harmonydb/sql/20250505-market-mk20.sql +++ b/harmony/harmonydb/sql/20250505-market-mk20.sql @@ -144,7 +144,16 @@ BEGIN END $$; -- The order_number column must be completely sequential -ALTER SEQUENCE ipni_order_number_seq CACHE 1; +DO $$ +DECLARE + seq_name TEXT; +BEGIN + -- Find the sequence for ipni.order_number column (handles cloned schemas with different sequence names) + SELECT pg_get_serial_sequence('ipni', 'order_number') INTO seq_name; + IF seq_name IS NOT NULL THEN + EXECUTE format('ALTER SEQUENCE %s CACHE 1', seq_name); + END IF; +END $$; -- This function is used to insert piece metadata and piece deal (piece indexing) -- This makes it easy to keep the logic of how table is updated and fast (in DB). @@ -248,29 +257,21 @@ $$ LANGUAGE plpgsql; -- Update raw_size for existing deals (One time backfill migration) DO $$ BEGIN - UPDATE market_mk12_deals d - SET raw_size = mpd.raw_size - FROM market_piece_deal mpd - WHERE d.uuid = mpd.id; - - UPDATE market_direct_deals d - SET raw_size = mpd.raw_size - FROM market_piece_deal mpd - WHERE d.uuid = mpd.id; - - UPDATE market_mk12_deals d - SET raw_size = p.raw_size - FROM market_mk12_deal_pipeline p - WHERE d.uuid = p.uuid - AND d.raw_size IS NULL - AND p.raw_size IS NOT NULL; - - UPDATE market_direct_deals d - SET raw_size = p.raw_size - FROM market_mk12_deal_pipeline p - WHERE d.uuid = p.uuid - AND d.raw_size IS NULL - AND p.raw_size IS NOT NULL; + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'market_mk12_deals' AND column_name = 'raw_size') THEN + EXECUTE 'UPDATE market_mk12_deals d SET raw_size = mpd.raw_size FROM market_piece_deal mpd WHERE d.uuid = mpd.id'; + END IF; + + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'market_direct_deals' AND column_name = 'raw_size') THEN + EXECUTE 'UPDATE market_direct_deals d SET raw_size = mpd.raw_size FROM market_piece_deal mpd WHERE d.uuid = mpd.id'; + END IF; + + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'market_mk12_deals' AND column_name = 'raw_size') THEN + EXECUTE 'UPDATE market_mk12_deals d SET raw_size = p.raw_size FROM market_mk12_deal_pipeline p WHERE d.uuid = p.uuid AND d.raw_size IS NULL AND p.raw_size IS NOT NULL'; + END IF; + + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'market_direct_deals' AND column_name = 'raw_size') THEN + EXECUTE 'UPDATE market_direct_deals d SET raw_size = p.raw_size FROM market_mk12_deal_pipeline p WHERE d.uuid = p.uuid AND d.raw_size IS NULL AND p.raw_size IS NOT NULL'; + END IF; END $$; -- This is main MK20 Deal table. Rows are added per deal and some diff --git a/harmony/harmonydb/sql/20250727-balancemgr.sql b/harmony/harmonydb/sql/20250727-balancemgr.sql index b44adf651..b5d49217f 100644 --- a/harmony/harmonydb/sql/20250727-balancemgr.sql +++ b/harmony/harmonydb/sql/20250727-balancemgr.sql @@ -28,8 +28,14 @@ CREATE TABLE IF NOT EXISTS balance_manager_addresses ( CREATE INDEX IF NOT EXISTS balance_manager_addresses_last_msg_cid_idx ON balance_manager_addresses (last_msg_cid); -ALTER TABLE balance_manager_addresses ADD CONSTRAINT subject_not_equal_second CHECK (subject_address != second_address); -ALTER TABLE balance_manager_addresses ADD CONSTRAINT balance_manager_addresses_subject_address_second_address_unique UNIQUE (subject_address, second_address, action_type); +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'subject_not_equal_second') THEN + ALTER TABLE balance_manager_addresses ADD CONSTRAINT subject_not_equal_second CHECK (subject_address != second_address); + END IF; +END $$; + +CREATE UNIQUE INDEX IF NOT EXISTS balance_manager_addresses_subject_address_second_address_unique ON balance_manager_addresses (subject_address, second_address, action_type); CREATE OR REPLACE FUNCTION update_balance_manager_from_message_waits() RETURNS trigger AS $$ diff --git a/harmony/harmonydb/sql/20250801-proofshare-pipeline.sql b/harmony/harmonydb/sql/20250801-proofshare-pipeline.sql index e5b18b2d4..f90ae4473 100644 --- a/harmony/harmonydb/sql/20250801-proofshare-pipeline.sql +++ b/harmony/harmonydb/sql/20250801-proofshare-pipeline.sql @@ -9,10 +9,20 @@ CREATE TABLE IF NOT EXISTS proofshare_client_sender ALTER TABLE proofshare_client_requests ADD COLUMN IF NOT EXISTS request_type TEXT NOT NULL DEFAULT 'porep'; -ALTER TABLE proofshare_client_requests DROP CONSTRAINT proofshare_client_requests_pkey; -ALTER TABLE proofshare_client_requests ADD PRIMARY KEY (sp_id, sector_num, request_type); +ALTER TABLE proofshare_client_requests DROP CONSTRAINT IF EXISTS proofshare_client_requests_pkey; -ALTER TABLE proofshare_client_requests DROP COLUMN task_id; +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_name = 'proofshare_client_requests' + AND constraint_type = 'PRIMARY KEY' + ) THEN + ALTER TABLE proofshare_client_requests ADD PRIMARY KEY (sp_id, sector_num, request_type); + END IF; +END $$; + +ALTER TABLE proofshare_client_requests DROP COLUMN IF EXISTS task_id; ALTER TABLE proofshare_client_requests ADD COLUMN IF NOT EXISTS task_id_upload BIGINT; ALTER TABLE proofshare_client_requests ADD COLUMN IF NOT EXISTS task_id_poll BIGINT; diff --git a/harmony/harmonydb/sql/20250926-harmony_config_timestamp.sql b/harmony/harmonydb/sql/20250926-harmony_config_timestamp.sql index 05e025097..2fc9324db 100644 --- a/harmony/harmonydb/sql/20250926-harmony_config_timestamp.sql +++ b/harmony/harmonydb/sql/20250926-harmony_config_timestamp.sql @@ -1 +1 @@ -ALTER TABLE harmony_config ADD COLUMN timestamp TIMESTAMP NOT NULL DEFAULT NOW(); \ No newline at end of file +ALTER TABLE harmony_config ADD COLUMN IF NOT EXISTS timestamp TIMESTAMP NOT NULL DEFAULT NOW(); \ No newline at end of file diff --git a/itests/serial/sql_idempotent_test.go b/itests/serial/sql_idempotent_test.go index 3cf78f52b..34b0d2cac 100644 --- a/itests/serial/sql_idempotent_test.go +++ b/itests/serial/sql_idempotent_test.go @@ -11,30 +11,41 @@ import ( "github.com/yugabyte/pgx/v5/pgxpool" "github.com/filecoin-project/curio/harmony/harmonydb" + "github.com/filecoin-project/curio/harmony/harmonydb/testutil" ) // TestSQLIdempotent tests that the SQL DDL files are idempotent. // The upgrader will fail unless everything has "IF NOT EXISTS" or "IF EXISTS" statements. // Or equivalent safety checks. // NOTE: Cannot run in parallel - modifies global harmonydb.ITestUpgradeFunc -// NOTE: Does NOT use SetupTestDB because it needs fresh migrations to run func TestSQLIdempotent(t *testing.T) { defer func() { harmonydb.ITestUpgradeFunc = nil }() - harmonydb.ITestUpgradeFunc = func(db *pgxpool.Pool, name string, sql string) { - _, err := db.Exec(context.Background(), sql) + + // Use SetupTestDB to get a cloned schema quickly (all structures already exist) + testID := testutil.SetupTestDB(t) + cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) + require.NoError(t, err) + + // Clear migration tracking so migrations will re-run + ctx := context.Background() + _, err = cdb.Exec(ctx, `DELETE FROM base`) + require.NoError(t, err) + + // Set up idempotency check - each migration SQL will be run twice + harmonydb.ITestUpgradeFunc = func(pool *pgxpool.Pool, name string, sql string) { + _, err := pool.Exec(context.Background(), sql) if err != nil { require.NoError(t, fmt.Errorf("SQL DDL file failed idempotent check: %s, %w", name, err)) } } - // Use a fresh schema (not cloned) so migrations actually run and ITestUpgradeFunc is called - testID := harmonydb.ITestNewID() - cdb, err := harmonydb.NewFromConfigWithITestID(t, testID) + // Create second connection - migrations re-run on existing structures (tests idempotency) + // Keep both connections open - cleanup handles closing + _, err = harmonydb.NewFromConfigWithITestID(t, testID) require.NoError(t, err) - ctx := context.Background() _, err = cdb.Exec(ctx, ` INSERT INTO itest_scratch (content, some_int) From d10bb08bb2ee480802f87c29ed56b7eda64750ed Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Fri, 5 Dec 2025 22:40:46 -0600 Subject: [PATCH 32/33] un-flake Crud --- itests/harmonydb_test.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/itests/harmonydb_test.go b/itests/harmonydb_test.go index ddacc9332..445fa21f6 100644 --- a/itests/harmonydb_test.go +++ b/itests/harmonydb_test.go @@ -37,12 +37,14 @@ func TestCrud(t *testing.T) { Animal string `db:"content"` Unpopulated int } - err = cdb.Select(ctx, &ints, "SELECT content, some_int FROM itest_scratch") + err = cdb.Select(ctx, &ints, "SELECT content, some_int FROM itest_scratch ORDER BY some_int DESC") require.NoError(t, err) require.Len(t, ints, 2, "unexpected count of returns. Want 2, Got ", len(ints)) - require.True(t, ints[0].Count == 11 || ints[1].Count == 5, "expected [11,5] got ", ints) - require.True(t, ints[0].Animal == "cows" || ints[1].Animal == "cats", "expected, [cows, cats] ", ints) + require.Equal(t, 11, ints[0].Count, "expected first row count to be 11") + require.Equal(t, 5, ints[1].Count, "expected second row count to be 5") + require.Equal(t, "cows", ints[0].Animal, "expected first row animal to be cows") + require.Equal(t, "cats", ints[1].Animal, "expected second row animal to be cats") fmt.Println("test completed") } From 56545f2476fd51ed98322f5497f7085e4e19d529 Mon Sep 17 00:00:00 2001 From: Andy Jackson Date: Fri, 5 Dec 2025 23:36:45 -0600 Subject: [PATCH 33/33] try docker --- .github/actions/install-deps/action.yml | 24 --- .github/actions/setup-build-env/action.yml | 32 +--- .github/actions/setup-go/action.yml | 17 -- .github/workflows/ci.yml | 179 +++++++++++++++------ 4 files changed, 134 insertions(+), 118 deletions(-) delete mode 100644 .github/actions/install-deps/action.yml delete mode 100644 .github/actions/setup-go/action.yml diff --git a/.github/actions/install-deps/action.yml b/.github/actions/install-deps/action.yml deleted file mode 100644 index bf7c8cbe0..000000000 --- a/.github/actions/install-deps/action.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: 'Install Dependencies' -description: 'Install common dependencies' - -runs: - using: 'composite' - steps: - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install -y curl ca-certificates gnupg ocl-icd-opencl-dev libhwloc-dev - shell: bash - - - name: Fetch all tags - run: git fetch --all --no-recurse-submodules - shell: bash - - - name: Sync submodules - run: git submodule sync - shell: bash - - - name: Update submodules - run: git submodule update --init - shell: bash - diff --git a/.github/actions/setup-build-env/action.yml b/.github/actions/setup-build-env/action.yml index 1825546ba..316058263 100644 --- a/.github/actions/setup-build-env/action.yml +++ b/.github/actions/setup-build-env/action.yml @@ -16,39 +16,23 @@ runs: cache: true cache-dependency-path: go.sum - # Run apt install, git submodules, and go mod download in parallel - - name: Install deps, setup submodules, download Go modules (parallel) + - name: Install system dependencies (Ubuntu only) run: | - # Start apt install in background - ( + if command -v apt-get &> /dev/null; then sudo apt-get update sudo apt-get install -y curl ca-certificates gnupg ocl-icd-opencl-dev libhwloc-dev - ) & - APT_PID=$! - - # Start git operations in background - ( - git fetch --all --no-recurse-submodules - git submodule sync - git submodule update --init - ) & - GIT_PID=$! - - # Start go mod download in background - go mod download & - GO_MOD_PID=$! + fi + shell: bash - # Wait for all to complete - wait $APT_PID || exit 1 - wait $GIT_PID || exit 1 - wait $GO_MOD_PID || exit 1 + - name: Download Go modules + run: go mod download shell: bash # Cache FFI build based on submodule commit - name: Generate FFI cache key id: ffi-cache-key run: | - FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD) + FFI_COMMIT=$(git -C extern/filecoin-ffi rev-parse HEAD 2>/dev/null || echo "unknown") echo "key=ffi-${{ runner.os }}-${{ inputs.go-version }}-${FFI_COMMIT}" >> $GITHUB_OUTPUT shell: bash @@ -71,8 +55,6 @@ runs: if: steps.cache-ffi.outputs.cache-hit != 'true' run: make deps shell: bash - env: - GITHUB_TOKEN: ${{ github.token }} - name: Restore FFI marker files if: steps.cache-ffi.outputs.cache-hit == 'true' diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml deleted file mode 100644 index 150106dc6..000000000 --- a/.github/actions/setup-go/action.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: 'Setup Go' -description: 'Setup Go environment' - -inputs: - go-version: - description: 'Go version to use' - required: true - -runs: - using: 'composite' - steps: - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: ${{ inputs.go-version }} - cache: true - cache-dependency-path: go.sum diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dcba177af..7285142f1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,13 +36,21 @@ jobs: # actionlint ( - go install github.com/rhysd/actionlint/cmd/actionlint + go install github.com/rhysd/actionlint/cmd/actionlint@latest actionlint -shellcheck= -pyflakes= ) & LINT_PID=$! + # mod tidy check + ( + go mod tidy -v + git diff --quiet go.mod go.sum || { echo "go mod tidy needed"; exit 1; } + ) & + TIDY_PID=$! + wait $FMT_PID || exit 1 wait $LINT_PID || exit 1 + wait $TIDY_PID || exit 1 # Lint - runs immediately, no dependencies lint: @@ -62,7 +70,7 @@ jobs: curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.4.0 golangci-lint run -v --timeout 15m --concurrency 4 - # Build variants - run in parallel, no dependencies + # Build variants - run in parallel with matrix build: runs-on: ubuntu-latest strategy: @@ -82,7 +90,7 @@ jobs: - name: Build ${{ matrix.variant }} run: make ${{ matrix.variant }} - # Unit tests - no database needed, runs immediately on self-hosted + # Unit tests - no database needed test-unit: runs-on: [self-hosted, docker] steps: @@ -98,11 +106,11 @@ jobs: - name: Run unit tests run: go test -v --tags=debug -timeout 30m $(go list ./... | grep -v curio/itests) - # Integration tests - parallel tests (use t.Parallel()) - test-itest-parallel: + # Integration tests - curio_test.go (heavy, runs separately) + test-itest-curio: runs-on: [self-hosted, docker] env: - YB_CONTAINER: yugabyte-parallel-${{ github.run_id }} + YB_CONTAINER: yugabyte-curio-${{ github.run_id }} steps: - name: Check local proof parameters id: local-params @@ -143,14 +151,9 @@ jobs: - name: Start YugabyteDB id: start-yb run: | - # Stop any existing container from previous runs docker stop $YB_CONTAINER 2>/dev/null || true docker rm $YB_CONTAINER 2>/dev/null || true - - # Start fresh container docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false - - # Wait for it to be ready for i in {1..60}; do if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then echo "YugabyteDB is ready" @@ -158,30 +161,104 @@ jobs: fi sleep 1 done - YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT - echo "YugabyteDB ready at $YB_IP" - name: Setup build environment uses: ./.github/actions/setup-build-env with: go-version: ${{ env.GO_VERSION }} - - name: Run parallel integration tests + - name: Run curio integration test env: CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + run: go test -v --tags=debug -timeout 60m ./itests/curio_test.go + + - name: Stop YugabyteDB + if: always() + run: docker stop $YB_CONTAINER 2>/dev/null || true + + # Integration tests - other parallel tests + test-itest-other: + runs-on: [self-hosted, docker] + env: + YB_CONTAINER: yugabyte-other-${{ github.run_id }} + steps: + - name: Check local proof parameters + id: local-params run: | - echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" - # Serial tests require "serial" build tag, so they're excluded here - go test -v --tags=debug -timeout 60m -parallel 4 ./itests/... + PARAMS_DIR="/var/tmp/filecoin-proof-parameters" + if [ -d "$PARAMS_DIR" ] && [ "$(ls -A $PARAMS_DIR 2>/dev/null)" ]; then + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "exists=false" >> $GITHUB_OUTPUT + fi + + - name: Restore proof parameters from cache + if: steps.local-params.outputs.exists != 'true' + id: cache-params + uses: actions/cache/restore@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + restore-keys: proof-params- + + - name: Fetch proof parameters + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' + run: | + echo "Fetching proof parameters (cache miss)..." + lotus fetch-params 8388608 + + - name: Save proof parameters to cache + if: steps.local-params.outputs.exists != 'true' && steps.cache-params.outputs.cache-hit != 'true' && github.ref == 'refs/heads/main' + uses: actions/cache/save@v4 + with: + path: /var/tmp/filecoin-proof-parameters + key: proof-params-2k-v1 + + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Start YugabyteDB + id: start-yb + run: | + docker stop $YB_CONTAINER 2>/dev/null || true + docker rm $YB_CONTAINER 2>/dev/null || true + docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false + for i in {1..60}; do + if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then + echo "YugabyteDB is ready" + break + fi + sleep 1 + done + YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') + echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT + + - name: Setup build environment + uses: ./.github/actions/setup-build-env + with: + go-version: ${{ env.GO_VERSION }} + + - name: Run other integration tests + env: + CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} + run: | + go test -v --tags=debug -timeout 30m -parallel 4 \ + ./itests/harmonydb_test.go \ + ./itests/pdp_prove_test.go \ + ./itests/indexstore_test.go \ + ./itests/move_shared_test.go \ + ./itests/local_test.go - name: Stop YugabyteDB if: always() run: docker stop $YB_CONTAINER 2>/dev/null || true - # Integration tests - serial tests (modify global state, cannot use t.Parallel()) + # Integration tests - serial tests (modify global state) test-itest-serial: runs-on: [self-hosted, docker] env: @@ -226,14 +303,9 @@ jobs: - name: Start YugabyteDB id: start-yb run: | - # Stop any existing container from previous runs docker stop $YB_CONTAINER 2>/dev/null || true docker rm $YB_CONTAINER 2>/dev/null || true - - # Start fresh container docker run --rm --name $YB_CONTAINER -d yugabytedb/yugabyte:2024.1.2.0-b77 bin/yugabyted start --daemon=false - - # Wait for it to be ready for i in {1..60}; do if docker exec $YB_CONTAINER bin/yugabyted status 2>/dev/null | grep -q Running; then echo "YugabyteDB is ready" @@ -241,10 +313,8 @@ jobs: fi sleep 1 done - YB_IP=$(docker inspect $YB_CONTAINER --format '{{ .NetworkSettings.Networks.bridge.IPAddress }}') echo "yb_ip=$YB_IP" >> $GITHUB_OUTPUT - echo "YugabyteDB ready at $YB_IP" - name: Setup build environment uses: ./.github/actions/setup-build-env @@ -255,16 +325,14 @@ jobs: env: CURIO_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} LOTUS_HARMONYDB_HOSTS: ${{ steps.start-yb.outputs.yb_ip }} - run: | - echo "Using YugabyteDB at: $CURIO_HARMONYDB_HOSTS" - # Tests in serial/ modify global state and cannot run in parallel - go test -v --tags=debug,serial -timeout 30m ./itests/serial/... + run: go test -v --tags=debug,serial -timeout 30m ./itests/serial/... - name: Stop YugabyteDB if: always() run: docker stop $YB_CONTAINER 2>/dev/null || true # Gen check - verify generated code is up to date + # Optimized: run independent gen steps in parallel, then build once gen-check: runs-on: ubuntu-latest steps: @@ -277,39 +345,46 @@ jobs: with: go-version: ${{ env.GO_VERSION }} - - name: Install Go tools + - name: Install Go tools in parallel run: | - go install golang.org/x/tools/cmd/goimports & - go install github.com/hannahhoward/cbor-gen-for & - go install github.com/swaggo/swag/cmd/swag & + go install golang.org/x/tools/cmd/goimports@latest & + go install github.com/hannahhoward/cbor-gen-for@latest & + go install github.com/swaggo/swag/cmd/swag@latest & wait - - name: api-gen - run: make api-gen - - - name: go-generate - run: make go-generate - - - name: cfgdoc-gen - run: make cfgdoc-gen - - - name: docsgen (md + openrpc) - run: make docsgen - - - name: marketgen - run: make marketgen + - name: Run code generation (parallel where possible) + run: | + # These can run in parallel - they don't depend on each other + make api-gen & + API_PID=$! + + make cfgdoc-gen & + CFG_PID=$! + + make marketgen & + MKT_PID=$! + + wait $API_PID || exit 1 + wait $CFG_PID || exit 1 + wait $MKT_PID || exit 1 + + # go-generate depends on api-gen completing + make go-generate - - name: docsgen-cli (builds curio + sptool) - run: make docsgen-cli + - name: Generate docs (requires binaries) + run: | + # docsgen builds docgen-md and docgen-openrpc binaries + make docsgen + # docsgen-cli builds curio + sptool, then generates CLI docs + make docsgen-cli - - name: fiximports + go mod tidy + - name: Fix imports and tidy run: | go run ./scripts/fiximports go mod tidy - name: Check for changes - run: | - git diff --quiet || { git diff; exit 1; } + run: git diff --quiet || { git diff; exit 1; } # Supraseal build (kept separate - long running, different runner) build-supraseal-ubuntu24: