From 50d4ed03c6965a56061460d2e211deb1eedaad5a Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 00:53:13 +1000 Subject: [PATCH 01/14] Add pre-commit infrastructure --- .github/workflows/pre-commit.yml | 41 +++++++++++ .pre-commit-config.yaml | 96 ++++++++++++++++++++++++++ CONTRIBUTING.md | 75 ++++++++++++++++++-- _typos.toml | 28 ++++++++ contrib/verify_ffi.py | 114 +++++++++++++++++++++++++++++++ 5 files changed, 349 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/pre-commit.yml create mode 100644 .pre-commit-config.yaml create mode 100644 _typos.toml create mode 100755 contrib/verify_ffi.py diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 000000000..bb237ad8f --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,41 @@ +name: Pre-commit Checks + +on: + push: + branches: + - master + - 'v**-dev' + pull_request: + +jobs: + pre-commit: + name: Pre-commit (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + # TODO: Windows excluded - rs-x11-hash requires POSIX headers (unistd.h) + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.x" + + - name: Setup Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + + - name: Cache cargo dependencies + uses: Swatinem/rust-cache@v2 + with: + shared-key: "rust-cache-${{ matrix.os }}" + + - name: Run pre-commit + uses: pre-commit/action@v3.0.1 + with: + extra_args: --all-files --hook-stage push --verbose diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..102980b54 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,96 @@ +# Pre-commit hooks for rust-dashcore +# See CONTRIBUTING.md for instructions on how to install and use +# And https://pre-commit.com for more information +repos: + # ============================================================================ + # FAST CHECKS - Run on every commit. + # ============================================================================ + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + exclude: '\.md$' # Allow trailing spaces in markdown for line breaks + - id: end-of-file-fixer + exclude: | + (?x)^( + .*\.snap$| + .*/tests/data/.*| + fuzz/hfuzz_input/.* + )$ + - id: check-yaml + args: ['--allow-multiple-documents'] + - id: check-json + - id: check-toml + - id: check-merge-conflict + - id: mixed-line-ending + args: ['--fix=lf'] + - id: check-added-large-files + args: ['--maxkb=1000'] + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: check-shebang-scripts-are-executable + exclude: '\.rs$' # Rust inner attributes (#![...]) look like shebangs + + - repo: https://github.com/rhysd/actionlint + rev: v1.7.8 + hooks: + - id: actionlint + + - repo: https://github.com/crate-ci/typos + rev: v1.16.23 + hooks: + - id: typos + args: ['--write-changes'] + exclude: | + (?x)^( + .*\.snap$| + .*\.lock$| + CHANGELOG\.md| + dash/tests/data/.*| + key-wallet/tests/data/.*| + fuzz/hfuzz_input/.* + )$ + + - repo: local + hooks: + - id: cargo-fmt + name: cargo fmt + description: Format Rust code with rustfmt + entry: cargo fmt --all + language: system + types: [rust] + pass_filenames: false + + # ============================================================================ + # SLOW CHECKS - Run on git push only. + # ============================================================================ + + - repo: local + hooks: + - id: verify-ffi + name: verify FFI + description: Verify FFI headers and documentation are up to date + entry: contrib/verify_ffi.py + language: python + pass_filenames: false + files: ^(key-wallet-ffi|dash-spv-ffi)/.*\.(rs|toml|py)$ + stages: [pre-push, manual] + + - id: clippy-workspace + name: clippy (workspace strict) + description: Strict clippy on entire workspace - deny all warnings + entry: cargo clippy --workspace --all-features --all-targets -- -D warnings + language: system + types: [rust] + pass_filenames: false + stages: [pre-push, manual] + +exclude: | + (?x)^( + .*\.snap$| + target/| + .*/target/| + \.cargo/| + \.git/ + )$ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cef006c1c..d0a06fab0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -77,13 +77,78 @@ Prerequisites that a PR must satisfy for merging into the `master` branch: * include inline docs for newly introduced APIs and pass doc tests; * be based on the recent tip of the target branch in this repository. -Reviewers may run additional scripts; passing CI is necessary but may not be sufficient for merge. To mirror CI locally: +### Pre-commit Hooks + +Reviewers may run additional scripts; passing CI is necessary but may not be sufficient for merge. This repo integrates +[pre-commit](https://pre-commit.com/) to mirror CI locally to run automated checks before commits and pushes. +This catches formatting issues, typos, and linting problems early before CI runs. + +#### Quick Setup + ```bash -# Full suite with optional knobs -DO_COV=true DO_LINT=true DO_FMT=true ./contrib/test.sh +# 1. Install pre-commit (one-time) +pip install pre-commit +# or: brew install pre-commit (macOS) +# or: pipx install pre-commit (isolated install) + +# 2. Install git hooks (in this repo) +pre-commit install # Runs on every commit +pre-commit install --hook-type pre-push # Runs on every push +``` + +That's it! Hooks run automatically from now on. + +#### What Runs Automatically + +**On every commit** (~2-5 seconds): +- `cargo fmt` — Rust code formatting (auto-fixes) +- `typos` — Spell checking in code/comments (auto-fixes) +- `actionlint` — GitHub Actions workflow validation +- File checks — Trailing whitespace, EOF newlines, YAML/JSON/TOML syntax (auto-fixes) + +**On git push** (~30-90 seconds additional): +- `cargo clippy` — Strict linting on entire workspace +- `verify-ffi-headers` — Ensures FFI C headers are up to date +- `verify-ffi-docs` — Ensures FFI API documentation is current + +**Note:** CI runs the exact same checks, so passing locally = passing in CI. + +#### Bash Aliases (Optional) + +Add these to your `~/.bashrc`, `~/.zshrc`, or `~/.bash_aliases`: + +```bash +# Pre-commit shortcuts +alias checks='pre-commit run --all-files' +alias checks-all='pre-commit run --all-files --hook-stage push' +alias checks-on='pre-commit install && pre-commit install --hook-type pre-push' +alias checks-off='pre-commit uninstall && pre-commit uninstall --hook-type pre-push' +``` + +**Usage:** +```bash +checks # Quick check before committing +checks-all # Full check (same as CI runs) +checks-on # Enable hooks +checks-off # Disable hooks +``` + +#### Bypassing Hooks (When You Need To) + +Sometimes you need to bypass checks (e.g., work-in-progress commits, fixing pre-commit itself): + +```bash +# Skip commit checks +git commit --no-verify + +# Skip push checks +git push --no-verify + +# Temporarily disable all hooks +checks-off # or: pre-commit uninstall --hook-type pre-commit --hook-type pre-push -# Or workspace-wide checks -cargo fmt --all && cargo clippy --workspace --all-targets -- -D warnings && cargo test --workspace --all-features +# Re-enable later +checks-on # or: pre-commit install && pre-commit install --hook-type pre-push ``` ### Peer review diff --git a/_typos.toml b/_typos.toml new file mode 100644 index 000000000..e964c5852 --- /dev/null +++ b/_typos.toml @@ -0,0 +1,28 @@ +[default] +extend-ignore-re = [ + # Dash addresses and hex strings contain random character sequences + # that may match typo patterns (e.g., "nce", "Ue", "LAF", "BA") + # These are intentionally excluded as they are valid cryptographic data + ".*[XYy][a-zA-Z0-9]{33}.*", # Dash addresses (base58) + ".*tb1q[a-z0-9]{58}.*", # Testnet bech32 addresses + "0x[0-9a-fA-F]+", # Hex strings with 0x prefix + '"[0-9a-fA-F]+"', # Quoted hex strings in code + '[0-9a-fA-F]{5,}', # Hex strings (5+ chars to catch "BA70D") + 'b"[^"]*"', # Byte strings - never check these for typos + "b'[^']*'", # Byte strings with single quotes +] + +[default.extend-words] +# Technical acronyms and abbreviations +FPR = "FPR" # False Positive Rate (bloom filter metric) + +[files] +# Exclude test data and binary files +extend-exclude = [ + "*.hex", + "*.snap", + "*.lock", + "fuzz/hfuzz_input/**", + "dash/tests/data/**", + "key-wallet/tests/data/**", +] diff --git a/contrib/verify_ffi.py b/contrib/verify_ffi.py new file mode 100755 index 000000000..694653b7c --- /dev/null +++ b/contrib/verify_ffi.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +"""Verify that FFI headers and documentation are up to date.""" + +import subprocess +import sys +from pathlib import Path +from concurrent.futures import ThreadPoolExecutor + + +def build_ffi_crate(crate_dir: Path) -> tuple[str, int]: + """Build crate to regenerate headers.""" + print(f" Building {crate_dir.name}...") + result = subprocess.run( + ["cargo", "build", "--quiet"], + cwd=crate_dir, + capture_output=True, + text=True + ) + return crate_dir.name, result.returncode + + +def generate_ffi_docs(crate_dir: Path) -> tuple[str, int]: + """Generate FFI documentation for a crate.""" + print(f" Generating {crate_dir.name} docs...") + result = subprocess.run( + [sys.executable, "scripts/generate_ffi_docs.py"], + cwd=crate_dir, + capture_output=True, + text=True + ) + if result.returncode == 0: + if result.stdout: + for line in result.stdout.strip().split('\n'): + print(f" {line}") + return crate_dir.name, result.returncode + + +def main(): + repo_root = Path(__file__).parent.parent + ffi_crates = [ + repo_root / "key-wallet-ffi", + repo_root / "dash-spv-ffi" + ] + + print("Regenerating FFI headers and documentation") + + # Build and generate docs for both crates in parallel + with ThreadPoolExecutor(max_workers=4) as executor: + build_futures = [executor.submit(build_ffi_crate, crate) for crate in ffi_crates] + doc_futures = [executor.submit(generate_ffi_docs, crate) for crate in ffi_crates] + + build_results = [f.result() for f in build_futures] + doc_results = [f.result() for f in doc_futures] + + # Check if any builds failed + for crate_name, returncode in build_results: + if returncode != 0: + print(f"Build failed for {crate_name}", file=sys.stderr) + sys.exit(1) + + # Check if any doc generation failed + for crate_name, returncode in doc_results: + if returncode != 0: + print(f"Documentation generation failed for {crate_name}", file=sys.stderr) + sys.exit(1) + + print(" Generation complete, checking for changes...") + + # Check if headers changed + headers_result = subprocess.run( + ["git", "diff", "--exit-code", "--quiet", "--", + "key-wallet-ffi/include/", "dash-spv-ffi/include/"], + cwd=repo_root + ) + + # Check if docs changed + docs_result = subprocess.run( + ["git", "diff", "--exit-code", "--quiet", "--", + "key-wallet-ffi/FFI_API.md", "dash-spv-ffi/FFI_API.md"], + cwd=repo_root + ) + + headers_changed = headers_result.returncode != 0 + docs_changed = docs_result.returncode != 0 + + if headers_changed or docs_changed: + print() + if headers_changed: + print("FFI headers are out of date!\n") + print("Header changes detected:") + subprocess.run( + ["git", "--no-pager", "diff", "--", + "key-wallet-ffi/include/", "dash-spv-ffi/include/"], + cwd=repo_root + ) + print() + + if docs_changed: + print("FFI documentation is out of date!\n") + print("Documentation changes detected:") + subprocess.run( + ["git", "--no-pager", "diff", "--", + "key-wallet-ffi/FFI_API.md", "dash-spv-ffi/FFI_API.md"], + cwd=repo_root + ) + print() + + sys.exit(1) + + print("FFI headers and documentation are up to date") + + +if __name__ == "__main__": + main() From eb3ae951b7f001c8cd8a649e549d66136b2224c3 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 19:30:56 +1000 Subject: [PATCH 02/14] Remove all steps in `rust.yml` which are now covered by `pre-commit` --- .github/workflows/rust.yml | 146 -------------------------- .github/workflows/verify-ffi-docs.yml | 126 ---------------------- 2 files changed, 272 deletions(-) delete mode 100644 .github/workflows/verify-ffi-docs.yml diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 074f22d63..9b9892c2c 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -176,139 +176,6 @@ jobs: - name: Run script-based tests run: ./contrib/test.sh - clippy: - name: Clippy (Non-strict) - runs-on: ubuntu-22.04-arm - steps: - - name: Checkout Crate - uses: actions/checkout@v4 - - name: Setup Rust toolchain - uses: dtolnay/rust-toolchain@stable - with: - components: clippy - - name: Run clippy (excluding strict-checked crates) - run: | - # Auto-discover all workspace crates and exclude strict-checked ones - STRICT_CRATES=("key-wallet" "key-wallet-manager" "key-wallet-ffi" "dashcore_hashes" "dashcore" "dash-spv" "dash-spv-ffi") - mapfile -t ALL_CRATES < <(cargo metadata --no-deps --format-version=1 | jq -r '.packages[].name' | sort -u) - for crate in "${ALL_CRATES[@]}"; do - if printf '%s\n' "${STRICT_CRATES[@]}" | grep -qx "$crate"; then - continue - fi - echo "Checking $crate (warnings allowed, errors will fail)..." - cargo clippy -p "$crate" --all-features --all-targets -- -W warnings - done - - strict-checks: - name: Strict Warnings and Clippy Checks - runs-on: ubuntu-22.04-arm - steps: - - name: Checkout Crate - uses: actions/checkout@v4 - - name: Setup Rust toolchain - uses: dtolnay/rust-toolchain@stable - with: - components: clippy - - name: Cache cargo dependencies - uses: Swatinem/rust-cache@v2 - - # Check key-wallet with strict warnings - - name: Check key-wallet (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p key-wallet --all-features --lib --bins --tests - cargo build -p key-wallet --all-features --lib --bins - cargo test -p key-wallet --all-features --lib --bins - - - name: Clippy key-wallet (deny all warnings) - run: cargo clippy -p key-wallet --all-features --lib --bins --tests -- -D warnings - - # Check key-wallet-manager with strict warnings - - name: Check key-wallet-manager (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p key-wallet-manager --all-features --lib --bins --tests - cargo build -p key-wallet-manager --all-features --lib --bins - cargo test -p key-wallet-manager --all-features --lib --bins - - - name: Clippy key-wallet-manager (deny all warnings) - run: cargo clippy -p key-wallet-manager --all-features --lib --bins --tests -- -D warnings - - # Check key-wallet-ffi with strict warnings - - name: Check key-wallet-ffi (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p key-wallet-ffi --all-features --lib --bins --tests - cargo build -p key-wallet-ffi --all-features --lib --bins - cargo test -p key-wallet-ffi --all-features --lib --bins - - - name: Clippy key-wallet-ffi (deny all warnings) - run: cargo clippy -p key-wallet-ffi --all-features --lib --bins --tests -- -D warnings - - # Check dashcore with strict warnings - - name: Check dashcore (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p dashcore --all-features --lib --bins --tests - cargo build -p dashcore --all-features --lib --bins - cargo test -p dashcore --all-features --lib --bins - - - name: Clippy dashcore (deny all warnings) - run: cargo clippy -p dashcore --all-features --lib --bins --tests -- -D warnings - - # Check dashcore_hashes with strict warnings - - name: Check dashcore_hashes (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p dashcore_hashes --all-features --lib --bins --tests - cargo build -p dashcore_hashes --all-features --lib --bins - cargo test -p dashcore_hashes --all-features --lib --bins - - - name: Clippy dashcore_hashes (deny all warnings) - run: cargo clippy -p dashcore_hashes --all-features --lib --bins --tests -- -D warnings - - # Check dash-spv with strict warnings - - name: Check dash-spv (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p dash-spv --all-features --lib --bins --tests - cargo build -p dash-spv --all-features --lib --bins - cargo test -p dash-spv --all-features --lib --bins - - - name: Clippy dash-spv (deny all warnings) - run: cargo clippy -p dash-spv --all-features --lib --bins --tests -- -D warnings - - # Check dash-spv-ffi with strict warnings - - name: Check dash-spv-ffi (deny warnings) - env: - RUSTFLAGS: "-D warnings" - run: | - cargo check -p dash-spv-ffi --all-features --lib --bins --tests - cargo build -p dash-spv-ffi --all-features --lib --bins - cargo test -p dash-spv-ffi --all-features --lib --bins - - - name: Clippy dash-spv-ffi (deny all warnings) - run: cargo clippy -p dash-spv-ffi --all-features --lib --bins --tests -- -D warnings - - fmt: - name: Format - runs-on: ubuntu-22.04-arm - steps: - - name: Checkout Crate - uses: actions/checkout@v4 - - name: Setup Rust toolchain - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt - - name: Check formatting - run: cargo fmt --all -- --check - rpc_tests: name: RPC Tests runs-on: ubuntu-22.04-arm @@ -344,16 +211,3 @@ jobs: env: DASHVERSION: ${{ matrix.dashversion }} run: ./contrib/test-rpc.sh - - actionlint: - name: Lint GitHub Actions - runs-on: ubuntu-22.04-arm - permissions: - contents: read - steps: - - name: Checkout Crate - uses: actions/checkout@v4 - - name: Run actionlint - uses: reviewdog/action-actionlint@v1 - with: - fail_on_error: true diff --git a/.github/workflows/verify-ffi-docs.yml b/.github/workflows/verify-ffi-docs.yml deleted file mode 100644 index e4fd4d08e..000000000 --- a/.github/workflows/verify-ffi-docs.yml +++ /dev/null @@ -1,126 +0,0 @@ -name: Verify FFI Documentation - -on: - push: - paths: - - 'key-wallet-ffi/src/**/*.rs' - - 'key-wallet-ffi/FFI_API.md' - - 'key-wallet-ffi/scripts/generate_ffi_docs.py' - - 'dash-spv-ffi/src/**/*.rs' - - 'dash-spv-ffi/FFI_API.md' - - 'dash-spv-ffi/scripts/generate_ffi_docs.py' - - '.github/workflows/verify-ffi-docs.yml' - pull_request: - paths: - - 'key-wallet-ffi/src/**/*.rs' - - 'key-wallet-ffi/FFI_API.md' - - 'key-wallet-ffi/scripts/generate_ffi_docs.py' - - 'dash-spv-ffi/src/**/*.rs' - - 'dash-spv-ffi/FFI_API.md' - - 'dash-spv-ffi/scripts/generate_ffi_docs.py' - - '.github/workflows/verify-ffi-docs.yml' - -jobs: - verify-key-wallet-docs: - runs-on: ubuntu-22.04-arm - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - name: Generate key-wallet-ffi documentation - run: | - cd key-wallet-ffi - python3 scripts/generate_ffi_docs.py - - name: Check if key-wallet-ffi documentation is up to date - run: | - cd key-wallet-ffi - if ! git diff --exit-code FFI_API.md; then - echo "❌ key-wallet-ffi documentation is out of date!" - echo "" - echo "The FFI_API.md file needs to be regenerated." - echo "Please run the following command and commit the changes:" - echo "" - echo " cd key-wallet-ffi && make update-docs" - echo "" - echo "Changes detected:" - git diff FFI_API.md - exit 1 - else - echo "✅ key-wallet-ffi documentation is up to date" - fi - - verify-dash-spv-docs: - runs-on: ubuntu-22.04-arm - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - name: Generate dash-spv-ffi documentation - run: | - cd dash-spv-ffi - python3 scripts/generate_ffi_docs.py - - name: Check if dash-spv-ffi documentation is up to date - run: | - cd dash-spv-ffi - if ! git diff --exit-code FFI_API.md; then - echo "❌ dash-spv-ffi documentation is out of date!" - echo "" - echo "The FFI_API.md file needs to be regenerated." - echo "Please run the following command and commit the changes:" - echo "" - echo " cd dash-spv-ffi && make update-docs" - echo "" - echo "Changes detected:" - git diff FFI_API.md - exit 1 - else - echo "✅ dash-spv-ffi documentation is up to date" - fi - - update-docs-comment: - runs-on: ubuntu-22.04-arm - if: failure() && github.event_name == 'pull_request' - needs: - - verify-key-wallet-docs - - verify-dash-spv-docs - permissions: - pull-requests: write - steps: - - name: Comment on PR - uses: actions/github-script@v7 - with: - script: | - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: `## ⚠️ FFI Documentation Update Required - - The FFI API documentation is out of date. Please regenerate it by running: - - For key-wallet-ffi: - \`\`\`bash - cd key-wallet-ffi - make update-docs - \`\`\` - - For dash-spv-ffi: - \`\`\`bash - cd dash-spv-ffi - make update-docs - \`\`\` - - Then commit the changes: - \`\`\`bash - git add */FFI_API.md - git commit -m "docs: update FFI API documentation" - \`\`\` - - This ensures the documentation stays in sync with the actual FFI functions.` - }) From da91ce218340d3ea618d3eea3c5a0512164e85cb Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 01:02:32 +1000 Subject: [PATCH 03/14] Fix `trailing-whitespace` checks --- contrib/test-rpc.sh | 18 +- dash-spv-ffi/examples/basic_usage.c | 12 +- dash-spv-ffi/scripts/generate_ffi_docs.py | 62 +-- dash-spv-ffi/src/callbacks.rs | 2 +- dash-spv-ffi/tests/c_tests/test_advanced.c | 124 +++--- dash-spv-ffi/tests/c_tests/test_basic.c | 122 +++--- dash-spv-ffi/tests/c_tests/test_integration.c | 106 ++--- .../tests/integration/test_cross_language.rs | 86 ++-- .../tests/integration/test_full_workflow.rs | 194 ++++----- .../tests/performance/test_benchmarks.rs | 154 ++++---- dash-spv-ffi/tests/security/test_security.rs | 144 +++---- dash-spv/src/chain/chainlock_manager.rs | 2 +- dash-spv/src/client/config.rs | 2 +- dash-spv/src/client/filter_sync.rs | 4 +- dash-spv/src/client/message_handler.rs | 4 +- dash-spv/src/lib.rs | 2 +- dash-spv/src/network/manager.rs | 4 +- dash-spv/src/network/peer.rs | 4 +- dash-spv/src/sync/headers.rs | 2 +- dash-spv/src/types.rs | 2 +- dash-spv/tests/storage_consistency_test.rs | 18 +- .../tests/transaction_calculation_test.rs | 2 +- key-wallet-ffi/cbindgen.toml | 4 +- key-wallet-ffi/examples/check_transaction.c | 20 +- key-wallet-ffi/generate_header.sh | 4 +- key-wallet-ffi/include/key_wallet_ffi.h | 4 +- key-wallet-ffi/include/key_wallet_ffi_test.h | 4 +- key-wallet-ffi/scripts/generate_ffi_docs.py | 58 +-- key-wallet-ffi/src/managed_wallet_tests.rs | 112 +++--- key-wallet/src/account/account_type.rs | 2 +- key-wallet/src/derivation.rs | 2 +- key-wallet/src/wallet/initialization.rs | 2 +- .../src/wallet/managed_wallet_info/mod.rs | 2 +- .../transaction_building.rs | 2 +- key-wallet/test_bip38.sh | 6 +- key-wallet/test_bip38_advanced.sh | 18 +- .../Examples/DashHDWalletApp_Template.swift | 8 +- .../CLIDemos/CLIDemo.swift | 8 +- .../CLIDemos/SimpleHDWalletDemo.swift | 64 +-- .../DashHDWalletExample/DashHDWalletApp.swift | 6 +- .../Models/HDWalletModels.swift | 40 +- .../Services/HDWalletService.swift | 110 +++--- .../Services/WalletService.swift | 358 ++++++++--------- .../StandaloneModels.swift | 4 +- .../DashHDWalletExample/TestContentView.swift | 4 +- .../DashHDWalletExample/Utils/Clipboard.swift | 8 +- .../Utils/ModelContainerHelper.swift | 52 +-- .../Utils/PlatformColor.swift | 18 +- .../Views/AccountDetailView.swift | 128 +++--- .../Views/ContentView.swift | 44 +-- .../Views/CreateAccountView.swift | 38 +- .../Views/CreateWalletView.swift | 128 +++--- .../Views/EnhancedSyncProgressView.swift | 84 ++-- .../Views/ReceiveAddressView.swift | 50 +-- .../Views/SendTransactionView.swift | 68 ++-- .../Views/SettingsView.swift | 14 +- .../Views/SyncProgressView.swift | 60 +-- .../Views/WalletDetailView.swift | 52 +-- .../Views/WatchStatusView.swift | 10 +- .../clean-simulator-data.sh | 4 +- .../DashHDWalletExample/fix-linking.sh | 4 +- .../DashHDWalletExample/test-link.swift | 6 +- .../DashWalletExample/ContentView.swift | 100 ++--- .../DashWalletExample/WalletViewModel.swift | 122 +++--- .../KeyWalletFFI/include/key_wallet_ffi.h | 4 +- .../SwiftDashCoreSDK/Core/AsyncBridge.swift | 64 +-- .../SwiftDashCoreSDK/Core/DashSDKError.swift | 4 +- .../SwiftDashCoreSDK/Core/FFIBridge.swift | 56 +-- .../SwiftDashCoreSDK/Core/FFITypes.swift | 2 +- .../Core/SPVClient+Verification.swift | 18 +- .../SwiftDashCoreSDK/Core/SPVClient.swift | 370 +++++++++--------- .../Core/SPVClientConfiguration.swift | 60 +-- .../Sources/SwiftDashCoreSDK/DashSDK.swift | 118 +++--- .../Errors/WatchAddressError.swift | 4 +- .../SwiftDashCoreSDK/Models/Balance.swift | 22 +- .../SwiftDashCoreSDK/Models/Network.swift | 10 +- .../SwiftDashCoreSDK/Models/SPVStats.swift | 20 +- .../Models/SyncProgress.swift | 20 +- .../SwiftDashCoreSDK/Models/Transaction.swift | 16 +- .../SwiftDashCoreSDK/Models/UTXO.swift | 12 +- .../Models/ValidationMode.swift | 6 +- .../Models/WatchedAddress.swift | 20 +- .../Storage/PersistentWalletManager.swift | 140 +++---- .../Storage/StorageManager.swift | 88 ++--- .../SwiftDashCoreSDK/Types/MempoolTypes.swift | 46 +-- .../SwiftDashCoreSDK/Types/WatchResult.swift | 2 +- .../SwiftDashCoreSDK/Utils/Extensions.swift | 16 +- .../Utils/WatchAddressRetryManager.swift | 36 +- .../Wallet/WalletManager.swift | 174 ++++---- .../SwiftDashCoreSDKTests/DashSDKTests.swift | 102 ++--- .../SwiftDashCoreSDKTests/MempoolTests.swift | 38 +- swift-dash-core-sdk/build-ios.sh | 4 +- swift-dash-core-sdk/build.sh | 4 +- test-utils/src/fixtures.rs | 2 +- 94 files changed, 2190 insertions(+), 2190 deletions(-) diff --git a/contrib/test-rpc.sh b/contrib/test-rpc.sh index 2c6190e0d..a5a27c6f9 100755 --- a/contrib/test-rpc.sh +++ b/contrib/test-rpc.sh @@ -23,41 +23,41 @@ fi # Integration test. if [ -n "$DASHVERSION" ]; then ASSET="dashcore-$DASHVERSION-x86_64-linux-gnu.tar.gz" - + # Download the Dash binary echo "Downloading $ASSET..." if ! wget "https://github.com/dashpay/dash/releases/download/v$DASHVERSION/$ASSET"; then echo "Error: Failed to download $ASSET" >&2 exit 1 fi - + # Verify the downloaded file exists if [ ! -f "$ASSET" ]; then echo "Error: Downloaded file $ASSET not found" >&2 exit 1 fi - + # Extract and determine the actual extracted directory echo "Extracting $ASSET..." if ! tar -xzvf "$ASSET"; then echo "Error: Failed to extract $ASSET" >&2 exit 1 fi - + # Find the extracted directory (should be dashcore-$DASHVERSION) EXTRACT_DIR="dashcore-$DASHVERSION" if [ ! -d "$EXTRACT_DIR" ]; then echo "Error: Expected directory $EXTRACT_DIR not found after extraction" >&2 exit 1 fi - + # Add the bin directory to PATH (avoid SC2155) DASH_BIN_PATH="$(pwd)/$EXTRACT_DIR/bin" PATH="$PATH:$DASH_BIN_PATH" export PATH - + echo "Added $DASH_BIN_PATH to PATH" - + # Change to the correct integration test directory if [ -d "rpc-integration-test" ]; then cd rpc-integration-test @@ -65,7 +65,7 @@ if [ -n "$DASHVERSION" ]; then echo "Error: rpc-integration-test directory not found" >&2 exit 1 fi - + # Run the integration tests if [ -f "./run.sh" ]; then ./run.sh @@ -73,7 +73,7 @@ if [ -n "$DASHVERSION" ]; then echo "Error: run.sh script not found in rpc-integration-test" >&2 exit 1 fi - + exit 0 else # Regular build/unit test. diff --git a/dash-spv-ffi/examples/basic_usage.c b/dash-spv-ffi/examples/basic_usage.c index 711fc69fe..bc3d6e0fb 100644 --- a/dash-spv-ffi/examples/basic_usage.c +++ b/dash-spv-ffi/examples/basic_usage.c @@ -8,21 +8,21 @@ int main() { fprintf(stderr, "Failed to initialize logging\n"); return 1; } - + // Create a configuration for testnet FFIClientConfig* config = dash_spv_ffi_config_testnet(); if (config == NULL) { fprintf(stderr, "Failed to create config\n"); return 1; } - + // Set data directory if (dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test") != 0) { fprintf(stderr, "Failed to set data dir\n"); dash_spv_ffi_config_destroy(config); return 1; } - + // Create the client FFIDashSpvClient* client = dash_spv_ffi_client_new(config); if (client == NULL) { @@ -31,12 +31,12 @@ int main() { dash_spv_ffi_config_destroy(config); return 1; } - + printf("Successfully created Dash SPV client!\n"); - + // Clean up dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + return 0; } \ No newline at end of file diff --git a/dash-spv-ffi/scripts/generate_ffi_docs.py b/dash-spv-ffi/scripts/generate_ffi_docs.py index 517e52b47..43ef6aab9 100644 --- a/dash-spv-ffi/scripts/generate_ffi_docs.py +++ b/dash-spv-ffi/scripts/generate_ffi_docs.py @@ -138,10 +138,10 @@ def categorize_functions(functions: List[FFIFunction]) -> Dict[str, List[FFIFunc 'Error Handling': [], 'Utility Functions': [], } - + for func in functions: name = func.name.lower() - + if 'client_new' in name or 'client_start' in name or 'client_stop' in name or 'client_destroy' in name: categories['Client Management'].append(func) elif 'config' in name: @@ -166,15 +166,15 @@ def categorize_functions(functions: List[FFIFunction]) -> Dict[str, List[FFIFunc categories['Error Handling'].append(func) else: categories['Utility Functions'].append(func) - + # Remove empty categories return {k: v for k, v in categories.items() if v} def generate_markdown(functions: List[FFIFunction]) -> str: """Generate markdown documentation from FFI functions.""" - + categories = categorize_functions(functions) - + md = [] md.append("# Dash SPV FFI API Documentation") md.append("") @@ -184,7 +184,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("") md.append(f"**Total Functions**: {len(functions)}") md.append("") - + # Table of Contents md.append("## Table of Contents") md.append("") @@ -192,45 +192,45 @@ def generate_markdown(functions: List[FFIFunction]) -> str: anchor = category.lower().replace(' ', '-').replace('&', 'and') md.append(f"- [{category}](#{anchor})") md.append("") - + # Function Reference md.append("## Function Reference") md.append("") - + for category, funcs in categories.items(): if not funcs: continue - + anchor = category.lower().replace(' ', '-').replace('&', 'and') md.append(f"### {category}") md.append("") md.append(f"Functions: {len(funcs)}") md.append("") - + # Create a table for each category md.append("| Function | Description | Module |") md.append("|----------|-------------|--------|") - + for func in sorted(funcs, key=lambda f: f.name): desc = func.doc_comment.split('.')[0] if func.doc_comment else "No description" desc = desc.replace('|', '\\|') # Escape pipes in description if len(desc) > 80: desc = desc[:77] + "..." md.append(f"| `{func.name}` | {desc} | {func.module} |") - + md.append("") - + # Detailed Function Documentation md.append("## Detailed Function Documentation") md.append("") - + for category, funcs in categories.items(): if not funcs: continue - + md.append(f"### {category} - Detailed") md.append("") - + for func in sorted(funcs, key=lambda f: f.name): md.append(f"#### `{func.name}`") md.append("") @@ -238,22 +238,22 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append(func.signature) md.append("```") md.append("") - + if func.doc_comment: md.append("**Description:**") md.append(func.doc_comment) md.append("") - + if func.safety_comment: md.append("**Safety:**") md.append(func.safety_comment) md.append("") - + md.append(f"**Module:** `{func.module}`") md.append("") md.append("---") md.append("") - + # Type Definitions md.append("## Type Definitions") md.append("") @@ -269,7 +269,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("- `FFIEventCallbacks` - Event callback structure") md.append("- `CoreSDKHandle` - Platform SDK integration handle") md.append("") - + md.append("### Enumerations") md.append("") md.append("- `FFINetwork` - Network type (Dash, Testnet, Regtest, Devnet)") @@ -277,7 +277,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("- `FFIMempoolStrategy` - Mempool strategy (FetchAll, BloomFilter, Selective)") md.append("- `FFISyncStage` - Synchronization stage") md.append("") - + # Memory Management md.append("## Memory Management") md.append("") @@ -292,7 +292,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: "that must be released with `dash_spv_ffi_wallet_manager_free()`" ) md.append("") - + # Usage Examples md.append("## Usage Examples") md.append("") @@ -323,7 +323,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("dash_spv_ffi_config_destroy(config);") md.append("```") md.append("") - + md.append("### Event Callbacks") md.append("") md.append("```c") @@ -347,30 +347,30 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("dash_spv_ffi_client_set_event_callbacks(client, callbacks);") md.append("```") md.append("") - + return '\n'.join(md) def main(): # Find all Rust source files src_dir = Path(__file__).parent.parent / "src" - + all_functions = [] - + for rust_file in src_dir.rglob("*.rs"): functions = extract_ffi_functions(rust_file) all_functions.extend(functions) - + # Generate markdown markdown = generate_markdown(all_functions) - + # Write to file output_file = Path(__file__).parent.parent / "FFI_API.md" with open(output_file, 'w') as f: f.write(markdown) - + print(f"Generated FFI documentation with {len(all_functions)} functions") print(f"Output: {output_file}") - + return 0 if __name__ == "__main__": diff --git a/dash-spv-ffi/src/callbacks.rs b/dash-spv-ffi/src/callbacks.rs index 5506b3ddf..dbc85dc8c 100644 --- a/dash-spv-ffi/src/callbacks.rs +++ b/dash-spv-ffi/src/callbacks.rs @@ -247,7 +247,7 @@ impl FFIEventCallbacks { is_instant_send: bool, ) { if let Some(callback) = self.on_mempool_transaction_added { - tracing::info!("🎯 Calling mempool transaction added callback: txid={}, amount={}, is_instant_send={}", + tracing::info!("🎯 Calling mempool transaction added callback: txid={}, amount={}, is_instant_send={}", txid, amount, is_instant_send); let txid_bytes = txid.as_byte_array(); let addresses_str = addresses.join(","); diff --git a/dash-spv-ffi/tests/c_tests/test_advanced.c b/dash-spv-ffi/tests/c_tests/test_advanced.c index 970fc5752..8d0162e78 100644 --- a/dash-spv-ffi/tests/c_tests/test_advanced.c +++ b/dash-spv-ffi/tests/c_tests/test_advanced.c @@ -20,25 +20,25 @@ // Test wallet operations void test_wallet_operations() { TEST_START("test_wallet_operations"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test-wallet"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Test watching addresses const char* test_addresses[] = { "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E", "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN", "XpAy3DUNod14KdJJh3XUjtkAiUkD2kd4JT" }; - + for (int i = 0; i < 3; i++) { int32_t result = dash_spv_ffi_client_watch_address(client, test_addresses[i]); TEST_ASSERT(result == FFIErrorCode_Success); } - + // Test getting balance FFIBalance* balance = dash_spv_ffi_client_get_address_balance(client, test_addresses[0]); if (balance != NULL) { @@ -47,7 +47,7 @@ void test_wallet_operations() { TEST_ASSERT(balance->pending == 0); dash_spv_ffi_balance_destroy(balance); } - + // Test getting UTXOs FFIArray* utxos = dash_spv_ffi_client_get_address_utxos(client, test_addresses[0]); if (utxos != NULL) { @@ -55,27 +55,27 @@ void test_wallet_operations() { TEST_ASSERT(utxos->len == 0); dash_spv_ffi_array_destroy(utxos); } - + // Test unwatching address int32_t result = dash_spv_ffi_client_unwatch_address(client, test_addresses[0]); TEST_ASSERT(result == FFIErrorCode_Success); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_wallet_operations"); } // Test sync progress void test_sync_progress() { TEST_START("test_sync_progress"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test-sync"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Get initial sync progress FFISyncProgress* progress = dash_spv_ffi_client_get_sync_progress(client); if (progress != NULL) { @@ -84,10 +84,10 @@ void test_sync_progress() { TEST_ASSERT(progress->filter_header_height >= 0); TEST_ASSERT(progress->masternode_height >= 0); TEST_ASSERT(progress->peer_count >= 0); - + dash_spv_ffi_sync_progress_destroy(progress); } - + // Get stats FFISpvStats* stats = dash_spv_ffi_client_get_stats(client); if (stats != NULL) { @@ -95,13 +95,13 @@ void test_sync_progress() { TEST_ASSERT(stats->filters_downloaded >= 0); TEST_ASSERT(stats->bytes_received >= 0); TEST_ASSERT(stats->bytes_sent >= 0); - + dash_spv_ffi_spv_stats_destroy(stats); } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_sync_progress"); } @@ -115,7 +115,7 @@ typedef struct { // Thread function for concurrent operations void* concurrent_operations(void* arg) { ThreadData* data = (ThreadData*)arg; - + for (int i = 0; i < 100; i++) { // Perform various operations switch (i % 4) { @@ -138,7 +138,7 @@ void* concurrent_operations(void* arg) { case 2: { // Check address balance FFIBalance* balance = dash_spv_ffi_client_get_address_balance( - data->client, + data->client, "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E" ); if (balance != NULL) { @@ -155,112 +155,112 @@ void* concurrent_operations(void* arg) { break; } } - + data->operations_completed++; usleep(1000); // 1ms delay } - + return NULL; } // Test concurrent access void test_concurrent_access() { TEST_START("test_concurrent_access"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test-concurrent"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + const int num_threads = 4; pthread_t threads[num_threads]; ThreadData thread_data[num_threads]; - + // Start threads for (int i = 0; i < num_threads; i++) { thread_data[i].client = client; thread_data[i].thread_id = i; thread_data[i].operations_completed = 0; - + int result = pthread_create(&threads[i], NULL, concurrent_operations, &thread_data[i]); TEST_ASSERT(result == 0); } - + // Wait for threads to complete for (int i = 0; i < num_threads; i++) { pthread_join(threads[i], NULL); - printf("Thread %d completed %d operations\n", - thread_data[i].thread_id, + printf("Thread %d completed %d operations\n", + thread_data[i].thread_id, thread_data[i].operations_completed); } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_concurrent_access"); } // Test memory management void test_memory_management() { TEST_START("test_memory_management"); - + // Test rapid allocation/deallocation for (int i = 0; i < 1000; i++) { FFIClientConfig* config = dash_spv_ffi_config_testnet(); - + char data_dir[256]; snprintf(data_dir, sizeof(data_dir), "/tmp/dash-spv-test-mem-%d", i); dash_spv_ffi_config_set_data_dir(config, data_dir); - + // Add some peers dash_spv_ffi_config_add_peer(config, "127.0.0.1:9999"); dash_spv_ffi_config_add_peer(config, "192.168.1.1:9999"); - + // Create and immediately destroy client FFIDashSpvClient* client = dash_spv_ffi_client_new(config); if (client != NULL) { dash_spv_ffi_client_destroy(client); } - + dash_spv_ffi_config_destroy(config); } - + TEST_SUCCESS("test_memory_management"); } // Test error conditions void test_error_conditions() { TEST_START("test_error_conditions"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test-errors"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Test invalid address int32_t result = dash_spv_ffi_client_watch_address(client, "invalid_address"); TEST_ASSERT(result == FFIErrorCode_InvalidArgument); - + // Check error was set const char* error = dash_spv_ffi_get_last_error(); TEST_ASSERT(error != NULL); - + // Clear error dash_spv_ffi_clear_error(); - + // Test invalid transaction ID FFITransaction* tx = dash_spv_ffi_client_get_transaction(client, "not_a_txid"); TEST_ASSERT(tx == NULL); - + // Test invalid script result = dash_spv_ffi_client_watch_script(client, "not_hex"); TEST_ASSERT(result == FFIErrorCode_InvalidArgument); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_error_conditions"); } @@ -275,7 +275,7 @@ void real_progress_callback(double progress, const char* message, void* user_dat CallbackData* data = (CallbackData*)user_data; data->progress_count++; data->last_progress = progress; - + if (message != NULL) { printf("Progress %.1f%%: %s\n", progress, message); } @@ -284,7 +284,7 @@ void real_progress_callback(double progress, const char* message, void* user_dat void real_completion_callback(int success, const char* error, void* user_data) { CallbackData* data = (CallbackData*)user_data; data->completion_called = 1; - + if (!success && error != NULL) { printf("Operation failed: %s\n", error); } @@ -292,35 +292,35 @@ void real_completion_callback(int success, const char* error, void* user_data) { void test_callbacks_with_operations() { TEST_START("test_callbacks_with_operations"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test-callbacks"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + CallbackData callback_data = {0}; - + FFICallbacks callbacks = {0}; callbacks.on_progress = real_progress_callback; callbacks.on_completion = real_completion_callback; callbacks.on_data = NULL; callbacks.user_data = &callback_data; - + // Start sync operation int32_t result = dash_spv_ffi_client_sync_to_tip(client, callbacks); - + // Wait a bit for callbacks usleep(100000); // 100ms - + // Callbacks might or might not be called depending on network - printf("Progress callbacks: %d, Completion: %d\n", - callback_data.progress_count, + printf("Progress callbacks: %d, Completion: %d\n", + callback_data.progress_count, callback_data.completion_called); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_callbacks_with_operations"); } @@ -328,16 +328,16 @@ void test_callbacks_with_operations() { int main() { printf("Running Dash SPV FFI Advanced C Tests\n"); printf("=====================================\n\n"); - + test_wallet_operations(); test_sync_progress(); test_concurrent_access(); test_memory_management(); test_error_conditions(); test_callbacks_with_operations(); - + printf("\n=====================================\n"); printf("All advanced tests passed!\n"); - + return 0; } \ No newline at end of file diff --git a/dash-spv-ffi/tests/c_tests/test_basic.c b/dash-spv-ffi/tests/c_tests/test_basic.c index 8e30be85a..a5a285224 100644 --- a/dash-spv-ffi/tests/c_tests/test_basic.c +++ b/dash-spv-ffi/tests/c_tests/test_basic.c @@ -19,228 +19,228 @@ // Test basic configuration void test_config_creation() { TEST_START("test_config_creation"); - + // Test creating config for each network FFIClientConfig* config_mainnet = dash_spv_ffi_config_new(FFINetwork_Dash); TEST_ASSERT(config_mainnet != NULL); - + FFIClientConfig* config_testnet = dash_spv_ffi_config_new(FFINetwork_Testnet); TEST_ASSERT(config_testnet != NULL); - + FFIClientConfig* config_regtest = dash_spv_ffi_config_new(FFINetwork_Regtest); TEST_ASSERT(config_regtest != NULL); - + // Test convenience constructors FFIClientConfig* config_testnet2 = dash_spv_ffi_config_testnet(); TEST_ASSERT(config_testnet2 != NULL); - + // Clean up dash_spv_ffi_config_destroy(config_mainnet); dash_spv_ffi_config_destroy(config_testnet); dash_spv_ffi_config_destroy(config_regtest); dash_spv_ffi_config_destroy(config_testnet2); - + TEST_SUCCESS("test_config_creation"); } // Test configuration setters void test_config_setters() { TEST_START("test_config_setters"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); TEST_ASSERT(config != NULL); - + // Test setting data directory int32_t result = dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test"); TEST_ASSERT(result == FFIErrorCode_Success); - + // Test setting validation mode result = dash_spv_ffi_config_set_validation_mode(config, FFIValidationMode_Basic); TEST_ASSERT(result == FFIErrorCode_Success); - + // Test setting max peers result = dash_spv_ffi_config_set_max_peers(config, 16); TEST_ASSERT(result == FFIErrorCode_Success); - + // Test adding peers result = dash_spv_ffi_config_add_peer(config, "127.0.0.1:9999"); TEST_ASSERT(result == FFIErrorCode_Success); - + result = dash_spv_ffi_config_add_peer(config, "192.168.1.1:9999"); TEST_ASSERT(result == FFIErrorCode_Success); - + // Test setting user agent result = dash_spv_ffi_config_set_user_agent(config, "TestClient/1.0"); TEST_ASSERT(result == FFIErrorCode_Success); - + // Test boolean setters result = dash_spv_ffi_config_set_relay_transactions(config, 1); TEST_ASSERT(result == FFIErrorCode_Success); - + result = dash_spv_ffi_config_set_filter_load(config, 1); TEST_ASSERT(result == FFIErrorCode_Success); - + dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_config_setters"); } // Test configuration getters void test_config_getters() { TEST_START("test_config_getters"); - + FFIClientConfig* config = dash_spv_ffi_config_new(FFINetwork_Testnet); TEST_ASSERT(config != NULL); - + // Set some values dash_spv_ffi_config_set_data_dir(config, "/tmp/test-dir"); - + // Test getting network FFINetwork network = dash_spv_ffi_config_get_network(config); TEST_ASSERT(network == FFINetwork_Testnet); - + // Test getting data directory FFIString data_dir = dash_spv_ffi_config_get_data_dir(config); if (data_dir.ptr != NULL) { TEST_ASSERT(strcmp(data_dir.ptr, "/tmp/test-dir") == 0); dash_spv_ffi_string_destroy(data_dir); } - + dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_config_getters"); } // Test error handling void test_error_handling() { TEST_START("test_error_handling"); - + // Clear any existing error dash_spv_ffi_clear_error(); - + // Test that no error is set initially const char* error = dash_spv_ffi_get_last_error(); TEST_ASSERT(error == NULL); - + // Trigger an error by using NULL config int32_t result = dash_spv_ffi_config_set_data_dir(NULL, "/tmp"); TEST_ASSERT(result == FFIErrorCode_NullPointer); - + // Check error was set error = dash_spv_ffi_get_last_error(); TEST_ASSERT(error != NULL); TEST_ASSERT(strlen(error) > 0); - + // Clear error dash_spv_ffi_clear_error(); error = dash_spv_ffi_get_last_error(); TEST_ASSERT(error == NULL); - + TEST_SUCCESS("test_error_handling"); } // Test client creation void test_client_creation() { TEST_START("test_client_creation"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); TEST_ASSERT(config != NULL); - + // Set required configuration dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-test"); - + // Create client FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Clean up dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_client_creation"); } // Test string operations void test_string_operations() { TEST_START("test_string_operations"); - + // Test creating and destroying strings FFIString str = {0}; str.ptr = strdup("Hello, FFI!"); TEST_ASSERT(str.ptr != NULL); - + // Note: In real usage, strings would come from FFI functions free(str.ptr); // Using free instead of dash_spv_ffi_string_destroy for test string - + TEST_SUCCESS("test_string_operations"); } // Test array operations void test_array_operations() { TEST_START("test_array_operations"); - + // Arrays would typically come from FFI functions // Here we just test the structure FFIArray array = {0}; array.data = NULL; array.len = 0; - + // Test destroying empty array dash_spv_ffi_array_destroy(array); - + TEST_SUCCESS("test_array_operations"); } // Test address validation void test_address_validation() { TEST_START("test_address_validation"); - + // Test valid mainnet address int32_t valid = dash_spv_ffi_validate_address("XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E", FFINetwork_Dash); TEST_ASSERT(valid == 1); - + // Test invalid address valid = dash_spv_ffi_validate_address("invalid_address", FFINetwork_Dash); TEST_ASSERT(valid == 0); - + // Test empty address valid = dash_spv_ffi_validate_address("", FFINetwork_Dash); TEST_ASSERT(valid == 0); - + // Test Bitcoin address (should be invalid for Dash) valid = dash_spv_ffi_validate_address("1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa", FFINetwork_Dash); TEST_ASSERT(valid == 0); - + TEST_SUCCESS("test_address_validation"); } // Test null pointer handling void test_null_pointer_handling() { TEST_START("test_null_pointer_handling"); - + // Test all functions with NULL pointers - + // Config functions TEST_ASSERT(dash_spv_ffi_config_set_data_dir(NULL, NULL) == FFIErrorCode_NullPointer); TEST_ASSERT(dash_spv_ffi_config_set_validation_mode(NULL, FFIValidationMode_Basic) == FFIErrorCode_NullPointer); TEST_ASSERT(dash_spv_ffi_config_set_max_peers(NULL, 10) == FFIErrorCode_NullPointer); TEST_ASSERT(dash_spv_ffi_config_add_peer(NULL, NULL) == FFIErrorCode_NullPointer); - + // Client functions TEST_ASSERT(dash_spv_ffi_client_new(NULL) == NULL); TEST_ASSERT(dash_spv_ffi_client_start(NULL) == FFIErrorCode_NullPointer); TEST_ASSERT(dash_spv_ffi_client_stop(NULL) == FFIErrorCode_NullPointer); - + // Destruction functions (should handle NULL gracefully) dash_spv_ffi_client_destroy(NULL); dash_spv_ffi_config_destroy(NULL); - + FFIString null_string = {0}; dash_spv_ffi_string_destroy(null_string); - + FFIArray null_array = {0}; dash_spv_ffi_array_destroy(null_array); - + TEST_SUCCESS("test_null_pointer_handling"); } @@ -248,7 +248,7 @@ void test_null_pointer_handling() { void progress_callback(double progress, const char* message, void* user_data) { int* called = (int*)user_data; *called = 1; - + TEST_ASSERT(progress >= 0.0 && progress <= 100.0); // Message can be NULL } @@ -256,7 +256,7 @@ void progress_callback(double progress, const char* message, void* user_data) { void completion_callback(int success, const char* error, void* user_data) { int* called = (int*)user_data; *called = 1; - + // Error should be NULL on success, non-NULL on failure if (success) { TEST_ASSERT(error == NULL); @@ -265,19 +265,19 @@ void completion_callback(int success, const char* error, void* user_data) { void test_callbacks() { TEST_START("test_callbacks"); - + int progress_called = 0; int completion_called = 0; - + FFICallbacks callbacks = {0}; callbacks.on_progress = progress_callback; callbacks.on_completion = completion_callback; callbacks.on_data = NULL; callbacks.user_data = &progress_called; // Simplified for test - + // In a real test, these callbacks would be invoked by FFI functions // Here we just test the structure - + TEST_SUCCESS("test_callbacks"); } @@ -285,7 +285,7 @@ void test_callbacks() { int main() { printf("Running Dash SPV FFI C Tests\n"); printf("=============================\n\n"); - + test_config_creation(); test_config_setters(); test_config_getters(); @@ -296,9 +296,9 @@ int main() { test_address_validation(); test_null_pointer_handling(); test_callbacks(); - + printf("\n=============================\n"); printf("All tests passed!\n"); - + return 0; } \ No newline at end of file diff --git a/dash-spv-ffi/tests/c_tests/test_integration.c b/dash-spv-ffi/tests/c_tests/test_integration.c index 37464ff46..f42c404f9 100644 --- a/dash-spv-ffi/tests/c_tests/test_integration.c +++ b/dash-spv-ffi/tests/c_tests/test_integration.c @@ -43,63 +43,63 @@ void on_transaction_event(const char* txid, int confirmed, void* user_data) { void on_balance_update_event(uint64_t confirmed, uint64_t unconfirmed, void* user_data) { IntegrationContext* ctx = (IntegrationContext*)user_data; ctx->total_balance = confirmed + unconfirmed; - printf("Balance update: confirmed=%llu, unconfirmed=%llu\n", + printf("Balance update: confirmed=%llu, unconfirmed=%llu\n", (unsigned long long)confirmed, (unsigned long long)unconfirmed); } // Test full workflow void test_full_workflow() { TEST_START("test_full_workflow"); - + IntegrationContext ctx = {0}; - + // Create configuration ctx.config = dash_spv_ffi_config_new(FFINetwork_Regtest); TEST_ASSERT(ctx.config != NULL); - + // Configure client dash_spv_ffi_config_set_data_dir(ctx.config, "/tmp/dash-spv-integration"); dash_spv_ffi_config_set_validation_mode(ctx.config, FFIValidationMode_Basic); dash_spv_ffi_config_set_max_peers(ctx.config, 8); - + // Add some test peers dash_spv_ffi_config_add_peer(ctx.config, "127.0.0.1:19999"); dash_spv_ffi_config_add_peer(ctx.config, "127.0.0.1:19998"); - + // Create client ctx.client = dash_spv_ffi_client_new(ctx.config); TEST_ASSERT(ctx.client != NULL); - + // Set up event callbacks FFIEventCallbacks event_callbacks = {0}; event_callbacks.on_block = on_block_event; event_callbacks.on_transaction = on_transaction_event; event_callbacks.on_balance_update = on_balance_update_event; event_callbacks.user_data = &ctx; - + int32_t result = dash_spv_ffi_client_set_event_callbacks(ctx.client, event_callbacks); TEST_ASSERT(result == FFIErrorCode_Success); - + // Add addresses to watch const char* addresses[] = { "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E", "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN", "XpAy3DUNod14KdJJh3XUjtkAiUkD2kd4JT" }; - + for (int i = 0; i < 3; i++) { result = dash_spv_ffi_client_watch_address(ctx.client, addresses[i]); TEST_ASSERT(result == FFIErrorCode_Success); } - + // Start the client result = dash_spv_ffi_client_start(ctx.client); printf("Client start result: %d\n", result); - + // Monitor for a while time_t start_time = time(NULL); time_t monitor_duration = 5; // 5 seconds - + while (time(NULL) - start_time < monitor_duration) { // Check sync progress FFISyncProgress* progress = dash_spv_ffi_client_get_sync_progress(ctx.client); @@ -110,7 +110,7 @@ void test_full_workflow() { progress->peer_count); dash_spv_ffi_sync_progress_destroy(progress); } - + // Check stats FFISpvStats* stats = dash_spv_ffi_client_get_stats(ctx.client); if (stats != NULL) { @@ -120,49 +120,49 @@ void test_full_workflow() { (unsigned long long)stats->bytes_received); dash_spv_ffi_spv_stats_destroy(stats); } - + sleep(1); } - + // Stop the client result = dash_spv_ffi_client_stop(ctx.client); TEST_ASSERT(result == FFIErrorCode_Success); - + // Print summary printf("\nWorkflow summary:\n"); printf(" Blocks received: %d\n", ctx.block_count); printf(" Transactions: %d\n", ctx.tx_count); printf(" Total balance: %llu\n", (unsigned long long)ctx.total_balance); - + // Clean up dash_spv_ffi_client_destroy(ctx.client); dash_spv_ffi_config_destroy(ctx.config); - + TEST_SUCCESS("test_full_workflow"); } // Test persistence void test_persistence() { TEST_START("test_persistence"); - + const char* data_dir = "/tmp/dash-spv-persistence"; - + // Phase 1: Create client and add data { FFIClientConfig* config = dash_spv_ffi_config_new(FFINetwork_Regtest); dash_spv_ffi_config_set_data_dir(config, data_dir); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Add watched addresses dash_spv_ffi_client_watch_address(client, "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E"); dash_spv_ffi_client_watch_address(client, "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN"); - + // Start and sync for a bit dash_spv_ffi_client_start(client); sleep(2); - + // Get current state FFISyncProgress* progress = dash_spv_ffi_client_get_sync_progress(client); uint32_t height1 = 0; @@ -170,60 +170,60 @@ void test_persistence() { height1 = progress->header_height; dash_spv_ffi_sync_progress_destroy(progress); } - + printf("Phase 1 height: %u\n", height1); - + dash_spv_ffi_client_stop(client); dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } - + // Phase 2: Create new client with same data directory { FFIClientConfig* config = dash_spv_ffi_config_new(FFINetwork_Regtest); dash_spv_ffi_config_set_data_dir(config, data_dir); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Check if state was persisted FFISyncProgress* progress = dash_spv_ffi_client_get_sync_progress(client); if (progress != NULL) { printf("Phase 2 height: %u\n", progress->header_height); dash_spv_ffi_sync_progress_destroy(progress); } - + // Check watched addresses FFIArray* watched = dash_spv_ffi_client_get_watched_addresses(client); if (watched != NULL) { printf("Persisted watched addresses: %zu\n", watched->len); dash_spv_ffi_array_destroy(*watched); } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } - + TEST_SUCCESS("test_persistence"); } // Test transaction handling void test_transaction_handling() { TEST_START("test_transaction_handling"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-tx-test"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Test transaction validation (minimal tx for testing) const char* test_tx_hex = "01000000000100000000000000001976a914000000000000000000000000000000000000000088ac00000000"; - + // Try to broadcast (will likely fail, but tests the API) int32_t result = dash_spv_ffi_client_broadcast_transaction(client, test_tx_hex); printf("Broadcast result: %d\n", result); - + // If failed, check error if (result != FFIErrorCode_Success) { const char* error = dash_spv_ffi_get_last_error(); @@ -232,7 +232,7 @@ void test_transaction_handling() { } dash_spv_ffi_clear_error(); } - + // Test transaction query const char* test_txid = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; FFITransaction* tx = dash_spv_ffi_client_get_transaction(client, test_txid); @@ -241,45 +241,45 @@ void test_transaction_handling() { } else { dash_spv_ffi_transaction_destroy(tx); } - + // Test confirmation status int32_t confirmations = dash_spv_ffi_client_get_transaction_confirmations(client, test_txid); printf("Transaction confirmations: %d\n", confirmations); - + int32_t is_confirmed = dash_spv_ffi_client_is_transaction_confirmed(client, test_txid); printf("Transaction confirmed: %d\n", is_confirmed); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_transaction_handling"); } // Test rescan functionality void test_rescan() { TEST_START("test_rescan"); - + FFIClientConfig* config = dash_spv_ffi_config_testnet(); dash_spv_ffi_config_set_data_dir(config, "/tmp/dash-spv-rescan-test"); - + FFIDashSpvClient* client = dash_spv_ffi_client_new(config); TEST_ASSERT(client != NULL); - + // Add addresses to watch dash_spv_ffi_client_watch_address(client, "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E"); dash_spv_ffi_client_watch_address(client, "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN"); - + // Start rescan from height 0 int32_t result = dash_spv_ffi_client_rescan_blockchain(client, 0); printf("Rescan from height 0 result: %d\n", result); - + // Start rescan from specific height result = dash_spv_ffi_client_rescan_blockchain(client, 100000); printf("Rescan from height 100000 result: %d\n", result); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + TEST_SUCCESS("test_rescan"); } @@ -287,14 +287,14 @@ void test_rescan() { int main() { printf("Running Dash SPV FFI Integration C Tests\n"); printf("========================================\n\n"); - + test_full_workflow(); test_persistence(); test_transaction_handling(); test_rescan(); - + printf("\n========================================\n"); printf("All integration tests completed!\n"); - + return 0; } \ No newline at end of file diff --git a/dash-spv-ffi/tests/integration/test_cross_language.rs b/dash-spv-ffi/tests/integration/test_cross_language.rs index da49d5165..a6a21cf4d 100644 --- a/dash-spv-ffi/tests/integration/test_cross_language.rs +++ b/dash-spv-ffi/tests/integration/test_cross_language.rs @@ -8,14 +8,14 @@ mod tests { use std::process::Command; use std::path::PathBuf; use std::fs; - + #[test] #[serial] fn test_c_header_generation() { // Verify that cbindgen can generate valid C headers let crate_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let header_path = crate_dir.join("dash_spv_ffi.h"); - + // Run cbindgen let output = Command::new("cbindgen") .current_dir(&crate_dir) @@ -26,25 +26,25 @@ mod tests { .arg("--output") .arg(&header_path) .output(); - + if let Ok(output) = output { if output.status.success() { // Verify header was created assert!(header_path.exists(), "C header file was not generated"); - + // Read and validate header content let header_content = fs::read_to_string(&header_path).unwrap(); - + // Check for essential function declarations assert!(header_content.contains("dash_spv_ffi_client_new")); assert!(header_content.contains("dash_spv_ffi_client_destroy")); assert!(header_content.contains("dash_spv_ffi_config_new")); assert!(header_content.contains("FFINetwork")); assert!(header_content.contains("FFIErrorCode")); - + // Check for proper extern "C" blocks assert!(header_content.contains("extern \"C\"") || header_content.contains("#ifdef __cplusplus")); - + println!("C header generated successfully with {} lines", header_content.lines().count()); } else { println!("cbindgen not available or failed: {}", String::from_utf8_lossy(&output.stderr)); @@ -53,7 +53,7 @@ mod tests { println!("cbindgen command not found, skipping header generation test"); } } - + #[test] #[serial] fn test_string_encoding_compatibility() { @@ -67,7 +67,7 @@ mod tests { "Null in middle: before\0after", // Will be truncated at null long_string.as_str(), ]; - + for test_str in &test_strings { // Simulate C string creation let c_string = CString::new(test_str.as_bytes()).unwrap_or_else(|_| { @@ -75,13 +75,13 @@ mod tests { let null_pos = test_str.find('\0').unwrap_or(test_str.len()); CString::new(&test_str[..null_pos]).unwrap() }); - + // Pass through FFI boundary let ffi_string = FFIString { ptr: c_string.as_ptr() as *mut c_char, length: test_str.len(), }; - + // Recover on Rust side if let Ok(recovered) = FFIString::from_ptr(ffi_string.ptr) { // Verify we can handle the string @@ -90,30 +90,30 @@ mod tests { } } } - + #[test] #[serial] fn test_struct_alignment_compatibility() { // Verify struct sizes and alignments match C expectations - + // Check size of enums (should be C int-compatible) assert_eq!(std::mem::size_of::(), std::mem::size_of::()); assert_eq!(std::mem::size_of::(), std::mem::size_of::()); assert_eq!(std::mem::size_of::(), std::mem::size_of::()); - + // Check alignment of structs assert!(std::mem::align_of::() <= 8); assert!(std::mem::align_of::() <= 8); assert!(std::mem::align_of::() <= 8); - + // Verify FFIString is pointer-sized assert_eq!(std::mem::size_of::(), std::mem::size_of::<*mut c_char>()); - + // Verify FFIArray has expected layout - assert_eq!(std::mem::size_of::(), + assert_eq!(std::mem::size_of::(), std::mem::size_of::<*mut c_void>() + std::mem::size_of::()); } - + #[test] #[serial] fn test_callback_calling_conventions() { @@ -121,30 +121,30 @@ mod tests { // Test that callbacks work with different calling conventions let mut callback_called = false; let mut received_progress = 0.0; - + extern "C" fn test_callback(progress: f64, msg: *const c_char, user_data: *mut c_void) { let data = user_data as *mut (bool, f64); let (called, prog) = &mut *data; *called = true; *prog = progress; - + // Verify we can safely access the message if !msg.is_null() { let _ = CStr::from_ptr(msg); } } - + let mut user_data = (callback_called, received_progress); let user_data_ptr = &mut user_data as *mut _ as *mut c_void; - + // Simulate callback invocation test_callback(50.0, std::ptr::null(), user_data_ptr); - + assert!(user_data.0); assert_eq!(user_data.1, 50.0); } } - + #[test] #[serial] fn test_error_code_consistency() { @@ -162,22 +162,22 @@ mod tests { FFIErrorCode::RuntimeError as i32, FFIErrorCode::Unknown as i32, ]; - + // Check all codes are unique let mut seen = std::collections::HashSet::new(); for code in &error_codes { assert!(seen.insert(*code), "Duplicate error code: {}", code); } - + // Verify Success is 0 (C convention) assert_eq!(FFIErrorCode::Success as i32, 0); - + // Verify other codes are positive for code in &error_codes[1..] { assert!(*code > 0, "Error code should be positive: {}", code); } } - + #[test] #[serial] fn test_pointer_validity_across_calls() { @@ -186,12 +186,12 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + // Create client and store pointer let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); let client_addr = client as usize; - + // Use client multiple times - pointer should remain valid for _ in 0..10 { let progress = dash_spv_ffi_client_get_sync_progress(client); @@ -202,32 +202,32 @@ mod tests { dash_spv_ffi_sync_progress_destroy(progress); } } - + // Verify client pointer hasn't changed assert_eq!(client as usize, client_addr); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_thread_safety_annotations() { // This test verifies our thread safety assumptions // In a real C integration, these would be documented - + // Client should be Send (can be moved between threads) fn assert_send() {} assert_send::<*mut FFIDashSpvClient>(); - + // Config should be Send assert_send::<*mut FFIClientConfig>(); - + // But raw pointers are not Sync by default (correct) // This means C code needs proper synchronization for concurrent access } - + #[test] #[serial] fn test_null_termination_handling() { @@ -235,7 +235,7 @@ mod tests { // Test that all string functions properly null-terminate let test_str = "Test string"; let ffi_str = FFIString::new(test_str); - + // Manually verify null termination let c_str = ffi_str.ptr as *const c_char; let mut len = 0; @@ -243,14 +243,14 @@ mod tests { len += 1; } assert_eq!(len as usize, test_str.len()); - + // Verify the byte after the string is null assert_eq!(*c_str.offset(len), 0); - + dash_spv_ffi_string_destroy(ffi_str); } } - + #[test] #[serial] fn test_platform_specific_types() { @@ -258,11 +258,11 @@ mod tests { assert_eq!(std::mem::size_of::(), 1); // c_void is a zero-sized type in Rust (it's an opaque type) assert_eq!(std::mem::size_of::(), 0); - + // Verify pointer sizes (platform-dependent) let ptr_size = std::mem::size_of::<*mut c_void>(); assert!(ptr_size == 4 || ptr_size == 8); // 32-bit or 64-bit - + // Verify usize matches pointer size (important for FFI) assert_eq!(std::mem::size_of::(), ptr_size); } diff --git a/dash-spv-ffi/tests/integration/test_full_workflow.rs b/dash-spv-ffi/tests/integration/test_full_workflow.rs index ccf0039f5..fa79d8f7c 100644 --- a/dash-spv-ffi/tests/integration/test_full_workflow.rs +++ b/dash-spv-ffi/tests/integration/test_full_workflow.rs @@ -8,7 +8,7 @@ mod tests { use std::sync::{Arc, Mutex, atomic::{AtomicBool, AtomicU32, Ordering}}; use std::thread; use std::time::{Duration, Instant}; - + struct IntegrationTestContext { client: *mut FFIDashSpvClient, config: *mut FFIClientConfig, @@ -17,31 +17,31 @@ mod tests { errors: Arc>>, events: Arc>>, } - + impl IntegrationTestContext { unsafe fn new(network: FFINetwork) -> Self { let temp_dir = TempDir::new().unwrap(); let config = dash_spv_ffi_config_new(network); - + let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); dash_spv_ffi_config_set_validation_mode(config, FFIValidationMode::Basic); dash_spv_ffi_config_set_max_peers(config, 8); - + // Add some test peers if available let test_peers = [ "127.0.0.1:19999", "127.0.0.1:19998", ]; - + for peer in &test_peers { let c_peer = CString::new(*peer).unwrap(); dash_spv_ffi_config_add_peer(config, c_peer.as_ptr()); } - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + IntegrationTestContext { client, config, @@ -51,123 +51,123 @@ mod tests { events: Arc::new(Mutex::new(Vec::new())), } } - + unsafe fn cleanup(self) { dash_spv_ffi_client_destroy(self.client); dash_spv_ffi_config_destroy(self.config); } } - + #[test] #[serial] fn test_complete_sync_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + // Set up callbacks let sync_completed = ctx.sync_completed.clone(); let errors = ctx.errors.clone(); - + extern "C" fn on_sync_progress(progress: f64, msg: *const c_char, user_data: *mut c_void) { let ctx = unsafe { &*(user_data as *const IntegrationTestContext) }; if progress >= 100.0 { ctx.sync_completed.store(true, Ordering::SeqCst); } - + if !msg.is_null() { let msg_str = unsafe { CStr::from_ptr(msg).to_str().unwrap() }; ctx.events.lock().unwrap().push(format!("Progress {:.1}%: {}", progress, msg_str)); } } - + extern "C" fn on_sync_complete(success: bool, error: *const c_char, user_data: *mut c_void) { let ctx = unsafe { &*(user_data as *const IntegrationTestContext) }; ctx.sync_completed.store(true, Ordering::SeqCst); - + if !success && !error.is_null() { let error_str = unsafe { CStr::from_ptr(error).to_str().unwrap() }; ctx.errors.lock().unwrap().push(error_str.to_string()); } } - + let callbacks = FFICallbacks { on_progress: Some(on_sync_progress), on_completion: Some(on_sync_complete), on_data: None, user_data: &ctx as *const _ as *mut c_void, }; - + // Start the client let result = dash_spv_ffi_client_start(ctx.client); - + // Start syncing let sync_result = dash_spv_ffi_client_sync_to_tip(ctx.client, callbacks); - + // Wait for sync to complete or timeout let start = Instant::now(); let timeout = Duration::from_secs(10); - + while !ctx.sync_completed.load(Ordering::SeqCst) && start.elapsed() < timeout { thread::sleep(Duration::from_millis(100)); - + // Check sync progress let progress = dash_spv_ffi_client_get_sync_progress(ctx.client); if !progress.is_null() { let p = &*progress; - println!("Sync progress: headers={}, filters={}, masternodes={}", + println!("Sync progress: headers={}, filters={}, masternodes={}", p.header_height, p.filter_header_height, p.masternode_height); dash_spv_ffi_sync_progress_destroy(progress); } } - + // Stop the client dash_spv_ffi_client_stop(ctx.client); - + // Check results let errors_vec = ctx.errors.lock().unwrap(); if !errors_vec.is_empty() { println!("Sync errors: {:?}", errors_vec); } - + let events_vec = ctx.events.lock().unwrap(); println!("Sync events: {} total", events_vec.len()); - + ctx.cleanup(); } } - + #[test] #[serial] fn test_wallet_monitoring_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + // Add addresses to watch let test_addresses = [ "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E", "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN", "XpAy3DUNod14KdJJh3XUjtkAiUkD2kd4JT", ]; - + for addr in &test_addresses { let c_addr = CString::new(*addr).unwrap(); let result = dash_spv_ffi_client_watch_address(ctx.client, c_addr.as_ptr()); assert_eq!(result, FFIErrorCode::Success as i32); } - + // Set up event callbacks let events = ctx.events.clone(); - + extern "C" fn on_block(height: u32, hash: *const c_char, user_data: *mut c_void) { let ctx = unsafe { &*(user_data as *const IntegrationTestContext) }; - let hash_str = if hash.is_null() { - "null".to_string() - } else { + let hash_str = if hash.is_null() { + "null".to_string() + } else { unsafe { CStr::from_ptr(hash).to_str().unwrap().to_string() } }; ctx.events.lock().unwrap().push(format!("New block at height {}: {}", height, hash_str)); } - + extern "C" fn on_transaction(txid: *const c_char, confirmed: bool, user_data: *mut c_void) { let ctx = unsafe { &*(user_data as *const IntegrationTestContext) }; let txid_str = if txid.is_null() { @@ -179,14 +179,14 @@ mod tests { format!("Transaction {}: confirmed={}", txid_str, confirmed) ); } - + extern "C" fn on_balance(confirmed: u64, unconfirmed: u64, user_data: *mut c_void) { let ctx = unsafe { &*(user_data as *const IntegrationTestContext) }; ctx.events.lock().unwrap().push( format!("Balance update: confirmed={}, unconfirmed={}", confirmed, unconfirmed) ); } - + let event_callbacks = FFIEventCallbacks { on_block: Some(on_block), on_transaction: Some(on_transaction), @@ -198,66 +198,66 @@ mod tests { on_wallet_transaction: None, user_data: &ctx as *const _ as *mut c_void, }; - + dash_spv_ffi_client_set_event_callbacks(ctx.client, event_callbacks); - + // Start monitoring dash_spv_ffi_client_start(ctx.client); - + // Monitor for a while let monitor_duration = Duration::from_secs(5); let start = Instant::now(); - + while start.elapsed() < monitor_duration { // Check balances for addr in &test_addresses { let c_addr = CString::new(*addr).unwrap(); let balance = dash_spv_ffi_client_get_address_balance(ctx.client, c_addr.as_ptr()); - + if !balance.is_null() { let bal = &*balance; if bal.confirmed > 0 || bal.pending > 0 { - println!("Address {} has balance: confirmed={}, pending={}", + println!("Address {} has balance: confirmed={}, pending={}", addr, bal.confirmed, bal.pending); } dash_spv_ffi_balance_destroy(balance); } } - + thread::sleep(Duration::from_secs(1)); } - + dash_spv_ffi_client_stop(ctx.client); - + // Check events let events_vec = ctx.events.lock().unwrap(); println!("Wallet monitoring events: {} total", events_vec.len()); for event in events_vec.iter().take(10) { println!(" {}", event); } - + ctx.cleanup(); } } - + #[test] #[serial] fn test_transaction_broadcast_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + // Start the client dash_spv_ffi_client_start(ctx.client); - + // Create a test transaction (this would normally come from wallet) // For testing, we'll use a minimal transaction hex let test_tx_hex = "01000000000100000000000000001976a914000000000000000000000000000000000000000088ac00000000"; let c_tx = CString::new(test_tx_hex).unwrap(); - + // Set up broadcast tracking let broadcast_result = Arc::new(Mutex::new(None)); let result_clone = broadcast_result.clone(); - + extern "C" fn on_broadcast_complete(success: bool, error: *const c_char, user_data: *mut c_void) { let result = unsafe { &*(user_data as *const Arc>>) }; let error_str = if error.is_null() { @@ -267,47 +267,47 @@ mod tests { }; *result.lock().unwrap() = Some((success, error_str)); } - + let callbacks = FFICallbacks { on_progress: None, on_completion: Some(on_broadcast_complete), on_data: None, user_data: &result_clone as *const _ as *mut c_void, }; - + // Broadcast transaction let result = dash_spv_ffi_client_broadcast_transaction(ctx.client, c_tx.as_ptr()); - + // In a real test, we'd wait for the broadcast result thread::sleep(Duration::from_secs(2)); - + // Check result if let Some((success, error)) = &*broadcast_result.lock().unwrap() { println!("Broadcast result: success={}, error={}", success, error); } - + dash_spv_ffi_client_stop(ctx.client); ctx.cleanup(); } } - + #[test] #[serial] fn test_concurrent_operations_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + dash_spv_ffi_client_start(ctx.client); - + let client_ptr = Arc::new(Mutex::new(ctx.client)); let mut handles = vec![]; - + // Spawn multiple threads doing different operations for i in 0..5 { let client_clone = client_ptr.clone(); let handle = thread::spawn(move || { let client = *client_clone.lock().unwrap(); - + match i % 5 { 0 => { // Thread 1: Monitor sync progress @@ -365,116 +365,116 @@ mod tests { }); handles.push(handle); } - + // Wait for all threads for handle in handles { handle.join().unwrap(); } - + let client = *client_ptr.lock().unwrap(); dash_spv_ffi_client_stop(client); - + // Can't use cleanup() because client_ptr owns the client dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(ctx.config); } } - + #[test] #[serial] fn test_error_recovery_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + // Test recovery from various error conditions - + // 1. Start without peers let result = dash_spv_ffi_client_start(ctx.client); - + // 2. Try to sync without being started (if not started above) let callbacks = FFICallbacks::default(); let sync_result = dash_spv_ffi_client_sync_to_tip(ctx.client, callbacks); - + // 3. Add invalid address let invalid_addr = CString::new("invalid_address").unwrap(); let watch_result = dash_spv_ffi_client_watch_address(ctx.client, invalid_addr.as_ptr()); assert_eq!(watch_result, FFIErrorCode::InvalidArgument as i32); - + // Check error was set let error_ptr = dash_spv_ffi_get_last_error(); if !error_ptr.is_null() { let error_str = CStr::from_ptr(error_ptr).to_str().unwrap(); println!("Expected error: {}", error_str); } - + // 4. Clear error and continue with valid operations dash_spv_ffi_clear_error(); - + let valid_addr = CString::new("XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E").unwrap(); let watch_result = dash_spv_ffi_client_watch_address(ctx.client, valid_addr.as_ptr()); assert_eq!(watch_result, FFIErrorCode::Success as i32); - + // 5. Test graceful shutdown dash_spv_ffi_client_stop(ctx.client); - + ctx.cleanup(); } } - + #[test] #[serial] fn test_persistence_workflow() { let temp_dir = TempDir::new().unwrap(); let data_path = temp_dir.path().to_str().unwrap(); - + unsafe { // Phase 1: Create client, add data, and shut down { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(data_path).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + // Add some watched addresses let addresses = [ "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1E", "XuQQkwA4FYkq2XERzMY2CiAZhJTEkgZ6uN", ]; - + for addr in &addresses { let c_addr = CString::new(*addr).unwrap(); dash_spv_ffi_client_watch_address(client, c_addr.as_ptr()); } - + // Perform some sync dash_spv_ffi_client_start(client); thread::sleep(Duration::from_secs(2)); - + // Get current state let progress1 = dash_spv_ffi_client_get_sync_progress(client); let height1 = if progress1.is_null() { 0 } else { (*progress1).header_height }; if !progress1.is_null() { dash_spv_ffi_sync_progress_destroy(progress1); } - + dash_spv_ffi_client_stop(client); dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); - + println!("Phase 1 complete, height: {}", height1); } - + // Phase 2: Create new client with same data directory { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(data_path).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + // Check if state was persisted let progress2 = dash_spv_ffi_client_get_sync_progress(client); if !progress2.is_null() { @@ -482,40 +482,40 @@ mod tests { println!("Phase 2 loaded, height: {}", height2); dash_spv_ffi_sync_progress_destroy(progress2); } - + // Check if watched addresses were persisted let watched = dash_spv_ffi_client_get_watched_addresses(client); if !watched.is_null() { println!("Watched addresses persisted: {} addresses", (*watched).len); dash_spv_ffi_array_destroy(*watched); } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } } - + #[test] #[serial] fn test_network_resilience_workflow() { unsafe { let mut ctx = IntegrationTestContext::new(FFINetwork::Regtest); - + // Add unreachable peers to test timeout handling let unreachable_peers = [ "192.0.2.1:9999", // TEST-NET-1 (unreachable) "198.51.100.1:9999", // TEST-NET-2 (unreachable) ]; - + for peer in &unreachable_peers { let c_peer = CString::new(*peer).unwrap(); dash_spv_ffi_config_add_peer(ctx.config, c_peer.as_ptr()); } - + // Start with network issues let start_result = dash_spv_ffi_client_start(ctx.client); - + // Try to sync with poor connectivity let sync_start = Instant::now(); let callbacks = FFICallbacks { @@ -524,19 +524,19 @@ mod tests { on_data: None, user_data: std::ptr::null_mut(), }; - + dash_spv_ffi_client_sync_to_tip(ctx.client, callbacks); - + // Should handle timeouts gracefully thread::sleep(Duration::from_secs(3)); - + // Check client is still responsive let stats = dash_spv_ffi_client_get_stats(ctx.client); if !stats.is_null() { println!("Client still responsive after network issues"); dash_spv_ffi_spv_stats_destroy(stats); } - + dash_spv_ffi_client_stop(ctx.client); ctx.cleanup(); } diff --git a/dash-spv-ffi/tests/performance/test_benchmarks.rs b/dash-spv-ffi/tests/performance/test_benchmarks.rs index 4096b9def..e4b1b0692 100644 --- a/dash-spv-ffi/tests/performance/test_benchmarks.rs +++ b/dash-spv-ffi/tests/performance/test_benchmarks.rs @@ -8,7 +8,7 @@ mod tests { use std::time::{Duration, Instant}; use std::sync::{Arc, Mutex}; use std::thread; - + struct BenchmarkResult { name: String, iterations: u64, @@ -18,7 +18,7 @@ mod tests { avg_time: Duration, ops_per_second: f64, } - + impl BenchmarkResult { fn new(name: &str, times: Vec) -> Self { let iterations = times.len() as u64; @@ -27,7 +27,7 @@ mod tests { let max_time = *times.iter().max().unwrap(); let avg_time = Duration::from_nanos((total_time.as_nanos() / iterations as u128) as u64); let ops_per_second = iterations as f64 / total_time.as_secs_f64(); - + BenchmarkResult { name: name.to_string(), iterations, @@ -38,7 +38,7 @@ mod tests { ops_per_second, } } - + fn print(&self) { println!("\nBenchmark: {}", self.name); println!(" Iterations: {}", self.iterations); @@ -49,7 +49,7 @@ mod tests { println!(" Ops/second: {:.2}", self.ops_per_second); } } - + #[test] #[serial] fn bench_string_allocation() { @@ -60,18 +60,18 @@ mod tests { &"x".repeat(1000), &"very long string ".repeat(1000), ]; - + for test_str in &test_strings { let mut times = Vec::new(); let iterations = 10000; - + for _ in 0..iterations { let start = Instant::now(); let ffi_str = FFIString::new(test_str); dash_spv_ffi_string_destroy(ffi_str); times.push(start.elapsed()); } - + let result = BenchmarkResult::new( &format!("String allocation (len={})", test_str.len()), times @@ -80,17 +80,17 @@ mod tests { } } } - + #[test] #[serial] fn bench_array_allocation() { unsafe { let sizes = vec![10, 100, 1000, 10000, 100000]; - + for size in sizes { let mut times = Vec::new(); let iterations = 1000; - + for _ in 0..iterations { let data: Vec = (0..size).collect(); let start = Instant::now(); @@ -98,7 +98,7 @@ mod tests { dash_spv_ffi_array_destroy(ffi_array); times.push(start.elapsed()); } - + let result = BenchmarkResult::new( &format!("Array allocation (size={})", size), times @@ -107,35 +107,35 @@ mod tests { } } } - + #[test] #[serial] fn bench_client_creation() { unsafe { let mut times = Vec::new(); let iterations = 100; - + for _ in 0..iterations { let temp_dir = TempDir::new().unwrap(); let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let start = Instant::now(); let client = dash_spv_ffi_client_new(config); let creation_time = start.elapsed(); - + times.push(creation_time); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } - + let result = BenchmarkResult::new("Client creation", times); result.print(); } } - + #[test] #[serial] fn bench_address_validation() { @@ -147,10 +147,10 @@ mod tests { "1BitcoinAddress", "XpAy3DUNod14KdJJh3XUjtkAiUkD2kd4JT", ]; - + let mut times = Vec::new(); let iterations = 10000; - + for _ in 0..iterations { for addr in &addresses { let c_addr = CString::new(*addr).unwrap(); @@ -159,12 +159,12 @@ mod tests { times.push(start.elapsed()); } } - + let result = BenchmarkResult::new("Address validation", times); result.print(); } } - + #[test] #[serial] fn bench_concurrent_operations() { @@ -173,71 +173,71 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + let client_ptr = Arc::new(Mutex::new(client)); let thread_count = 4; let ops_per_thread = 1000; - + let start = Instant::now(); let mut handles = vec![]; - + for _ in 0..thread_count { let client_clone = client_ptr.clone(); let handle = thread::spawn(move || { let mut times = Vec::new(); - + for _ in 0..ops_per_thread { let client = *client_clone.lock().unwrap(); let op_start = Instant::now(); - + // Perform various operations let progress = dash_spv_ffi_client_get_sync_progress(client); if !progress.is_null() { dash_spv_ffi_sync_progress_destroy(progress); } - + times.push(op_start.elapsed()); } - + times }); handles.push(handle); } - + let mut all_times = Vec::new(); for handle in handles { all_times.extend(handle.join().unwrap()); } - + let total_elapsed = start.elapsed(); - + let result = BenchmarkResult::new("Concurrent operations", all_times); result.print(); - + println!("Total concurrent execution time: {:?}", total_elapsed); println!("Total operations: {}", thread_count * ops_per_thread); - println!("Overall throughput: {:.2} ops/sec", + println!("Overall throughput: {:.2} ops/sec", (thread_count * ops_per_thread) as f64 / total_elapsed.as_secs_f64()); - + let client = *client_ptr.lock().unwrap(); dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn bench_callback_overhead() { unsafe { let iterations = 100000; let mut times = Vec::new(); - + // Minimal callback that does nothing extern "C" fn noop_callback(_: f64, _: *const c_char, _: *mut c_void) {} - + // Callback that does some work extern "C" fn work_callback(progress: f64, msg: *const c_char, user_data: *mut c_void) { if !user_data.is_null() { @@ -248,35 +248,35 @@ mod tests { let _ = CStr::from_ptr(msg); } } - + // Benchmark noop callback for _ in 0..iterations { let start = Instant::now(); noop_callback(50.0, std::ptr::null(), std::ptr::null_mut()); times.push(start.elapsed()); } - + let noop_result = BenchmarkResult::new("Noop callback", times.clone()); noop_result.print(); - + // Benchmark work callback times.clear(); let mut counter = 0u64; let msg = CString::new("Progress update").unwrap(); - + for _ in 0..iterations { let start = Instant::now(); work_callback(50.0, msg.as_ptr(), &mut counter as *mut _ as *mut c_void); times.push(start.elapsed()); } - + let work_result = BenchmarkResult::new("Work callback", times); work_result.print(); - + assert_eq!(counter, iterations); } } - + #[test] #[serial] fn bench_memory_churn() { @@ -286,17 +286,17 @@ mod tests { ("Sequential", false), ("Interleaved", true), ]; - + for (pattern_name, interleaved) in patterns { let mut times = Vec::new(); let iterations = 1000; let allocations_per_iteration = 100; - + let start = Instant::now(); - + for _ in 0..iterations { let iter_start = Instant::now(); - + if interleaved { // Interleaved allocation/deallocation for i in 0..allocations_per_iteration { @@ -317,36 +317,36 @@ mod tests { dash_spv_ffi_string_destroy(s); } } - + times.push(iter_start.elapsed()); } - + let total_elapsed = start.elapsed(); - + let result = BenchmarkResult::new( &format!("Memory churn - {}", pattern_name), times ); result.print(); - + println!("Total allocations: {}", iterations * allocations_per_iteration * 3); - println!("Allocations/sec: {:.2}", + println!("Allocations/sec: {:.2}", (iterations * allocations_per_iteration * 3) as f64 / total_elapsed.as_secs_f64()); } } } - + #[test] #[serial] fn bench_error_handling() { unsafe { let iterations = 100000; let mut times = Vec::new(); - + // Benchmark error setting and retrieval for i in 0..iterations { let error_msg = format!("Error number {}", i); - + let start = Instant::now(); set_last_error(&error_msg); let error_ptr = dash_spv_ffi_get_last_error(); @@ -356,26 +356,26 @@ mod tests { dash_spv_ffi_clear_error(); times.push(start.elapsed()); } - + let result = BenchmarkResult::new("Error handling cycle", times); result.print(); } } - + #[test] #[serial] fn bench_type_conversions() { let iterations = 100000; let mut times = Vec::new(); - + // Benchmark various type conversions for _ in 0..iterations { let start = Instant::now(); - + // Network enum conversions let net: dashcore::Network = FFINetwork::Dash.into(); let _ffi_net: FFINetwork = net.into(); - + // Create and convert complex types let progress = dash_spv::SyncProgress { header_height: 12345, @@ -388,60 +388,60 @@ mod tests { sync_start: std::time::SystemTime::now(), last_update: std::time::SystemTime::now(), }; - + let _ffi_progress = FFISyncProgress::from(progress); - + times.push(start.elapsed()); } - + let result = BenchmarkResult::new("Type conversions", times); result.print(); } - + #[test] #[serial] fn bench_large_data_handling() { unsafe { // Test performance with large data sets let sizes = vec![1_000, 10_000, 100_000, 1_000_000]; - + for size in sizes { // Large string handling let large_string = "X".repeat(size); let string_start = Instant::now(); let ffi_str = FFIString::new(&large_string); let string_alloc_time = string_start.elapsed(); - + let read_start = Instant::now(); let recovered = FFIString::from_ptr(ffi_str.ptr).unwrap(); let read_time = read_start.elapsed(); assert_eq!(recovered.len(), size); - + let destroy_start = Instant::now(); dash_spv_ffi_string_destroy(ffi_str); let destroy_time = destroy_start.elapsed(); - + println!("\nLarge string (size={}):", size); println!(" Allocation: {:?}", string_alloc_time); println!(" Read: {:?}", read_time); println!(" Destruction: {:?}", destroy_time); - println!(" MB/sec alloc: {:.2}", + println!(" MB/sec alloc: {:.2}", (size as f64 / 1_000_000.0) / string_alloc_time.as_secs_f64()); - + // Large array handling let large_array: Vec = (0..size as u64).collect(); let array_start = Instant::now(); let ffi_array = FFIArray::new(large_array); let array_alloc_time = array_start.elapsed(); - + let array_destroy_start = Instant::now(); dash_spv_ffi_array_destroy(ffi_array); let array_destroy_time = array_destroy_start.elapsed(); - + println!("Large array (size={}):", size); println!(" Allocation: {:?}", array_alloc_time); println!(" Destruction: {:?}", array_destroy_time); - println!(" Million elements/sec: {:.2}", + println!(" Million elements/sec: {:.2}", (size as f64 / 1_000_000.0) / array_alloc_time.as_secs_f64()); } } diff --git a/dash-spv-ffi/tests/security/test_security.rs b/dash-spv-ffi/tests/security/test_security.rs index 74d414b1c..db0755a8c 100644 --- a/dash-spv-ffi/tests/security/test_security.rs +++ b/dash-spv-ffi/tests/security/test_security.rs @@ -8,24 +8,24 @@ mod tests { use std::ptr; use std::sync::{Arc, Mutex}; use std::thread; - + #[test] #[serial] fn test_buffer_overflow_protection() { unsafe { // Test string handling with potential overflow scenarios - + // Very long string let long_string = "A".repeat(10_000_000); let ffi_str = FFIString::new(&long_string); assert!(!ffi_str.ptr.is_null()); - + // Verify we can read it back without corruption let recovered = FFIString::from_ptr(ffi_str.ptr).unwrap(); assert_eq!(recovered.len(), long_string.len()); - + dash_spv_ffi_string_destroy(ffi_str); - + // Test with strings containing special characters let special_chars = "\0\n\r\t\x01\x02\x03\xFF"; let c_string = CString::new(special_chars.replace('\0', "")).unwrap(); @@ -33,20 +33,20 @@ mod tests { ptr: c_string.as_ptr() as *mut c_char, length: special_chars.replace('\0', "").len(), }; - + if let Ok(recovered) = FFIString::from_ptr(ffi_special.ptr) { // Should handle special chars safely assert!(!recovered.is_empty()); } } } - + #[test] #[serial] fn test_null_pointer_dereferencing() { unsafe { // Test all functions with null pointers - + // Config functions assert_eq!(dash_spv_ffi_config_set_data_dir(ptr::null_mut(), ptr::null()), FFIErrorCode::NullPointer as i32); @@ -54,13 +54,13 @@ mod tests { FFIErrorCode::NullPointer as i32); assert_eq!(dash_spv_ffi_config_add_peer(ptr::null_mut(), ptr::null()), FFIErrorCode::NullPointer as i32); - + // Client functions assert!(dash_spv_ffi_client_new(ptr::null()).is_null()); assert_eq!(dash_spv_ffi_client_start(ptr::null_mut()), FFIErrorCode::NullPointer as i32); assert!(dash_spv_ffi_client_get_sync_progress(ptr::null_mut()).is_null()); - + // Destruction functions should handle null gracefully dash_spv_ffi_client_destroy(ptr::null_mut()); dash_spv_ffi_config_destroy(ptr::null_mut()); @@ -68,7 +68,7 @@ mod tests { dash_spv_ffi_array_destroy(FFIArray { data: ptr::null_mut(), len: 0, capacity: 0 }); } } - + #[test] #[serial] fn test_use_after_free_prevention() { @@ -77,38 +77,38 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + // Destroy the client dash_spv_ffi_client_destroy(client); - + // These operations should handle the freed pointer safely // (In a real implementation, these should check for validity) let result = dash_spv_ffi_client_start(client); assert_ne!(result, FFIErrorCode::Success as i32); - + // Destroy config dash_spv_ffi_config_destroy(config); - + // Using config after free should fail let result = dash_spv_ffi_config_set_max_peers(config, 10); assert_ne!(result, FFIErrorCode::Success as i32); } } - + #[test] #[serial] fn test_integer_overflow_protection() { unsafe { // Test with maximum values let config = dash_spv_ffi_config_new(FFINetwork::Regtest); - + // Test setting max peers to u32::MAX let result = dash_spv_ffi_config_set_max_peers(config, u32::MAX); assert_eq!(result, FFIErrorCode::Success as i32); - + // Test large array allocation let huge_size = usize::MAX / 2; // Avoid actual overflow let huge_array = FFIArray { @@ -116,14 +116,14 @@ mod tests { len: huge_size, capacity: huge_size, }; - + // Should handle large sizes safely dash_spv_ffi_array_destroy(huge_array); - + dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_race_condition_safety() { @@ -132,23 +132,23 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); assert!(!client.is_null()); - + let client_ptr = Arc::new(Mutex::new(client)); let stop_flag = Arc::new(Mutex::new(false)); let mut handles = vec![]; - + // Spawn threads that will race for i in 0..10 { let client_clone = client_ptr.clone(); let stop_clone = stop_flag.clone(); - + let handle = thread::spawn(move || { while !*stop_clone.lock().unwrap() { let client = *client_clone.lock().unwrap(); - + // Perform operations that might race match i % 3 { 0 => { @@ -169,36 +169,36 @@ mod tests { } _ => {} } - + thread::yield_now(); } }); handles.push(handle); } - + // Let threads race for a bit thread::sleep(std::time::Duration::from_millis(100)); - + // Stop all threads *stop_flag.lock().unwrap() = true; - + for handle in handles { handle.join().unwrap(); } - + let client = *client_ptr.lock().unwrap(); dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_input_validation() { unsafe { // Test various invalid inputs let config = dash_spv_ffi_config_new(FFINetwork::Regtest); - + // Invalid IP addresses let invalid_ips = vec![ "999.999.999.999:9999", @@ -209,21 +209,21 @@ mod tests { "", // Empty string ":::::", // Invalid IPv6 ]; - + for ip in invalid_ips { let c_ip = CString::new(ip).unwrap(); let result = dash_spv_ffi_config_add_peer(config, c_ip.as_ptr()); assert_eq!(result, FFIErrorCode::InvalidArgument as i32, "Should reject invalid IP: {}", ip); } - + // Invalid Bitcoin/Dash addresses let temp_dir = TempDir::new().unwrap(); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); - + let invalid_addrs = vec![ "", "notanaddress", @@ -232,31 +232,31 @@ mod tests { "XjSgy6PaVCB3V4KhCiCDkaVbx9ewxe9R1EE", // Too long &"X".repeat(100), // Way too long ]; - + for addr in invalid_addrs { let c_addr = CString::new(addr).unwrap(); let result = dash_spv_ffi_client_watch_address(client, c_addr.as_ptr()); assert_eq!(result, FFIErrorCode::InvalidArgument as i32, "Should reject invalid address: {}", addr); } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_memory_exhaustion_handling() { unsafe { // Test allocation of many small objects let mut strings = Vec::new(); - + // Try to allocate many strings (but not enough to actually exhaust memory) for i in 0..10000 { let s = FFIString::new(&format!("String number {}", i)); strings.push(s); - + // Every 1000 allocations, free half to prevent actual exhaustion if i % 1000 == 999 { let half = strings.len() / 2; @@ -267,23 +267,23 @@ mod tests { } } } - + // Clean up remaining for s in strings { dash_spv_ffi_string_destroy(s); } - + // Test single large allocation let large_size = 100_000_000; // 100MB let large_string = "X".repeat(large_size); let large_ffi = FFIString::new(&large_string); - + // Should handle large allocation assert!(!large_ffi.ptr.is_null()); dash_spv_ffi_string_destroy(large_ffi); } } - + #[test] #[serial] fn test_callback_security() { @@ -297,26 +297,26 @@ mod tests { "../../../etc/passwd", // Path traversal "%00%00%00%00", // URL encoded nulls ]; - + extern "C" fn test_callback(progress: f64, msg: *const c_char, user_data: *mut c_void) { if !msg.is_null() { // Should safely handle any input let _ = CStr::from_ptr(msg); } - + // Validate progress is in expected range assert!(progress >= 0.0 && progress <= 100.0); } - + // Test callbacks with malicious messages for data in malicious_data { let c_str = CString::new(data.replace('\0', "")).unwrap(); test_callback(50.0, c_str.as_ptr(), ptr::null_mut()); } - + // Test callback with null message test_callback(50.0, ptr::null(), ptr::null_mut()); - + // Test callback with invalid progress values test_callback(-1.0, ptr::null(), ptr::null_mut()); test_callback(101.0, ptr::null(), ptr::null_mut()); @@ -324,13 +324,13 @@ mod tests { test_callback(f64::INFINITY, ptr::null(), ptr::null_mut()); } } - + #[test] #[serial] fn test_path_traversal_prevention() { unsafe { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); - + // Test potentially dangerous paths let dangerous_paths = vec![ "../../../sensitive/data", @@ -341,24 +341,24 @@ mod tests { "\0/etc/passwd", "data\0../../etc/passwd", ]; - + for path in dangerous_paths { // Remove null bytes for CString let safe_path = path.replace('\0', ""); let c_path = CString::new(safe_path).unwrap(); - + // Should accept the path (validation is up to the implementation) // but should not allow actual traversal let result = dash_spv_ffi_config_set_data_dir(config, c_path.as_ptr()); - + // The implementation should sanitize or validate paths println!("Path '{}' result: {}", path, result); } - + dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_cryptographic_material_handling() { @@ -368,21 +368,21 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); - + // Test with private key-like hex strings (should be rejected or handled carefully) let private_key_hex = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; let c_key = CString::new(private_key_hex).unwrap(); - + // This should not accept raw private keys let result = dash_spv_ffi_client_watch_script(client, c_key.as_ptr()); - + // Test transaction broadcast doesn't leak sensitive info let tx_hex = "0100000000010000000000000000"; let c_tx = CString::new(tx_hex).unwrap(); let broadcast_result = dash_spv_ffi_client_broadcast_transaction(client, c_tx.as_ptr()); - + // Check error messages don't contain sensitive data if broadcast_result != FFIErrorCode::Success as i32 { let error_ptr = dash_spv_ffi_get_last_error(); @@ -392,12 +392,12 @@ mod tests { assert!(!error_str.contains(tx_hex)); } } - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } } - + #[test] #[serial] fn test_dos_resistance() { @@ -406,14 +406,14 @@ mod tests { let config = dash_spv_ffi_config_new(FFINetwork::Regtest); let path = CString::new(temp_dir.path().to_str().unwrap()).unwrap(); dash_spv_ffi_config_set_data_dir(config, path.as_ptr()); - + let client = dash_spv_ffi_client_new(config); - + // Test rapid repeated operations let start = std::time::Instant::now(); let duration = std::time::Duration::from_millis(100); let mut operation_count = 0; - + while start.elapsed() < duration { // Rapidly request sync progress let progress = dash_spv_ffi_client_get_sync_progress(client); @@ -422,14 +422,14 @@ mod tests { } operation_count += 1; } - + println!("Performed {} operations in {:?}", operation_count, duration); - + // System should still be responsive let final_progress = dash_spv_ffi_client_get_sync_progress(client); assert!(!final_progress.is_null()); dash_spv_ffi_sync_progress_destroy(final_progress); - + dash_spv_ffi_client_destroy(client); dash_spv_ffi_config_destroy(config); } diff --git a/dash-spv/src/chain/chainlock_manager.rs b/dash-spv/src/chain/chainlock_manager.rs index 049c5aeac..977ad95ab 100644 --- a/dash-spv/src/chain/chainlock_manager.rs +++ b/dash-spv/src/chain/chainlock_manager.rs @@ -214,7 +214,7 @@ impl ChainLockManager { let required_height = chain_lock .block_height .saturating_sub(CHAINLOCK_VALIDATION_MASTERNODE_OFFSET); - warn!("⚠️ Masternode engine exists but lacks required masternode lists for height {} (needs list at height {} for ChainLock validation), queueing ChainLock for later validation", + warn!("⚠️ Masternode engine exists but lacks required masternode lists for height {} (needs list at height {} for ChainLock validation), queueing ChainLock for later validation", chain_lock.block_height, required_height); self.queue_pending_chainlock(chain_lock.clone()).map_err(|e| { ValidationError::InvalidChainLock(format!( diff --git a/dash-spv/src/client/config.rs b/dash-spv/src/client/config.rs index 1243196ee..5d9e579bf 100644 --- a/dash-spv/src/client/config.rs +++ b/dash-spv/src/client/config.rs @@ -90,7 +90,7 @@ pub struct ClientConfig { /// Interval for checking CFHeader gaps (seconds) pub cfheader_gap_check_interval_secs: u64, - /// Cooldown between CFHeader restart attempts (seconds) + /// Cooldown between CFHeader restart attempts (seconds) pub cfheader_gap_restart_cooldown_secs: u64, /// Maximum CFHeader gap restart attempts diff --git a/dash-spv/src/client/filter_sync.rs b/dash-spv/src/client/filter_sync.rs index f2a90725e..3bfaaab8e 100644 --- a/dash-spv/src/client/filter_sync.rs +++ b/dash-spv/src/client/filter_sync.rs @@ -139,7 +139,7 @@ impl< /// Sync filters in coordination with the monitoring loop using flow control processing async fn sync_filters_coordinated(&mut self, start_height: u32, count: u32) -> Result<()> { - tracing::info!("Starting coordinated filter sync with flow control from height {} to {} ({} filters expected)", + tracing::info!("Starting coordinated filter sync with flow control from height {} to {} ({} filters expected)", start_height, start_height + count - 1, count); // Start tracking filter sync progress @@ -163,7 +163,7 @@ impl< let (pending_count, active_count, flow_enabled) = self.sync_manager.filter_sync().get_flow_control_status(); - tracing::info!("✅ Filter sync with flow control initiated (flow control enabled: {}, {} requests queued, {} active)", + tracing::info!("✅ Filter sync with flow control initiated (flow control enabled: {}, {} requests queued, {} active)", flow_enabled, pending_count, active_count); Ok(()) diff --git a/dash-spv/src/client/message_handler.rs b/dash-spv/src/client/message_handler.rs index 9a6fbd84a..258f42d77 100644 --- a/dash-spv/src/client/message_handler.rs +++ b/dash-spv/src/client/message_handler.rs @@ -94,7 +94,7 @@ impl< }); } NetworkMessage::MnListDiff(ref diff) => { - tracing::info!("📨 Received MnListDiff message: {} new masternodes, {} deleted masternodes, {} quorums", + tracing::info!("📨 Received MnListDiff message: {} new masternodes, {} deleted masternodes, {} quorums", diff.new_masternodes.len(), diff.deleted_masternodes.len(), diff.new_quorums.len()); // Move to sync manager without cloning return self @@ -423,7 +423,7 @@ impl< tracing::error!("Failed to request transaction {}: {}", txid, e); } } else { - tracing::debug!("Not fetching transaction {} (fetch_mempool_transactions={}, should_fetch={})", + tracing::debug!("Not fetching transaction {} (fetch_mempool_transactions={}, should_fetch={})", txid, self.config.fetch_mempool_transactions, filter.should_fetch_transaction(&txid).await diff --git a/dash-spv/src/lib.rs b/dash-spv/src/lib.rs index 7ef611726..60f6d6c19 100644 --- a/dash-spv/src/lib.rs +++ b/dash-spv/src/lib.rs @@ -3,7 +3,7 @@ //! This library provides a complete implementation of a Dash SPV client that can: //! //! - Synchronize block headers from the Dash network -//! - Download and verify BIP157 compact block filters +//! - Download and verify BIP157 compact block filters //! - Maintain an up-to-date masternode list //! - Validate ChainLocks and InstantLocks //! - Monitor addresses and scripts for transactions diff --git a/dash-spv/src/network/manager.rs b/dash-spv/src/network/manager.rs index d578115d4..91b994203 100644 --- a/dash-spv/src/network/manager.rs +++ b/dash-spv/src/network/manager.rs @@ -464,7 +464,7 @@ impl PeerNetworkManager { payload, } => { // Log unknown messages with more detail - log::warn!("Received unknown message from {}: command='{}', payload_len={}", + log::warn!("Received unknown message from {}: command='{}', payload_len={}", addr, command, payload.len()); // Still forward to client } @@ -850,7 +850,7 @@ impl PeerNetworkManager { log::debug!("Sending {} to {}", message.cmd(), addr); } NetworkMessage::GetHeaders2(gh2) => { - log::info!("📤 Sending GetHeaders2 to {} - version: {}, locator_count: {}, locator: {:?}, stop: {}", + log::info!("📤 Sending GetHeaders2 to {} - version: {}, locator_count: {}, locator: {:?}, stop: {}", addr, gh2.version, gh2.locator_hashes.len(), diff --git a/dash-spv/src/network/peer.rs b/dash-spv/src/network/peer.rs index 8992d1222..1147a663b 100644 --- a/dash-spv/src/network/peer.rs +++ b/dash-spv/src/network/peer.rs @@ -663,7 +663,7 @@ impl Peer { if let Ok(duration) = now.duration_since(last_pong) { // If no pong in 10 minutes, consider unhealthy if duration > Duration::from_secs(600) { - tracing::warn!("Connection to {} marked unhealthy: no pong received for {} seconds (limit: 600)", + tracing::warn!("Connection to {} marked unhealthy: no pong received for {} seconds (limit: 600)", self.address, duration.as_secs()); return false; } @@ -673,7 +673,7 @@ impl Peer { if let Ok(duration) = now.duration_since(connected_at) { // Give new connections 5 minutes before considering them unhealthy if duration > Duration::from_secs(300) { - tracing::warn!("Connection to {} marked unhealthy: no pong activity after {} seconds (limit: 300, last_ping_sent: {:?})", + tracing::warn!("Connection to {} marked unhealthy: no pong activity after {} seconds (limit: 300, last_ping_sent: {:?})", self.address, duration.as_secs(), self.last_ping_sent.is_some()); return false; } diff --git a/dash-spv/src/sync/headers.rs b/dash-spv/src/sync/headers.rs index 146d13ce3..e1e60c49f 100644 --- a/dash-spv/src/sync/headers.rs +++ b/dash-spv/src/sync/headers.rs @@ -152,7 +152,7 @@ impl HeaderSyncManager { tip_after ); } else { - tracing::warn!("⚠️ Headers validated but tip height unchanged! Validated {} headers but tip remains at {:?}", + tracing::warn!("⚠️ Headers validated but tip height unchanged! Validated {} headers but tip remains at {:?}", validated_headers.len(), tip_before); } diff --git a/dash-spv/src/types.rs b/dash-spv/src/types.rs index a4d1e70ed..598bbd8e2 100644 --- a/dash-spv/src/types.rs +++ b/dash-spv/src/types.rs @@ -311,7 +311,7 @@ impl ChainState { // Add genesis header to the chain state state.headers.push(genesis_header); - tracing::debug!("Initialized ChainState with genesis block - network: {:?}, hash: {}, headers_count: {}", + tracing::debug!("Initialized ChainState with genesis block - network: {:?}, hash: {}, headers_count: {}", network, genesis_header.block_hash(), state.headers.len()); // Initialize masternode engine for the network diff --git a/dash-spv/tests/storage_consistency_test.rs b/dash-spv/tests/storage_consistency_test.rs index 6f1463bd2..40c3790dd 100644 --- a/dash-spv/tests/storage_consistency_test.rs +++ b/dash-spv/tests/storage_consistency_test.rs @@ -132,7 +132,7 @@ async fn test_tip_height_header_consistency_large_dataset() { if let Some(height) = tip_height { let header = storage.get_header(height).await.unwrap(); if header.is_none() { - panic!("❌ CONSISTENCY BUG DETECTED: tip_height={} but get_header({}) returned None after batch ending at {}", + panic!("❌ CONSISTENCY BUG DETECTED: tip_height={} but get_header({}) returned None after batch ending at {}", height, height, batch_end - 1); } @@ -211,7 +211,7 @@ async fn test_concurrent_tip_header_access() { if let Some(height) = tip_height { let header = storage.get_header(height).await.unwrap(); if header.is_none() { - panic!("❌ CONCURRENCY BUG DETECTED in task {}, iteration {}: tip_height={} but get_header({}) returned None", + panic!("❌ CONCURRENCY BUG DETECTED in task {}, iteration {}: tip_height={} but get_header({}) returned None", i, iteration, height, height); } @@ -220,7 +220,7 @@ async fn test_concurrent_tip_header_access() { let test_height = height.saturating_sub(offset); let test_header = storage.get_header(test_height).await.unwrap(); if test_header.is_none() { - panic!("❌ CONCURRENCY BUG: Header missing at height {} (tip is {}) in task {}", + panic!("❌ CONCURRENCY BUG: Header missing at height {} (tip is {}) in task {}", test_height, height, i); } } @@ -287,7 +287,7 @@ async fn test_reproduce_filter_sync_bug() { println!("Header at tip height {}: {:?}", tip_height, tip_header.is_some()); if tip_header.is_none() { - println!("🎯 REPRODUCED THE BUG! get_tip_height() returned {} but get_header({}) returned None", + println!("🎯 REPRODUCED THE BUG! get_tip_height() returned {} but get_header({}) returned None", tip_height, tip_height); } @@ -307,7 +307,7 @@ async fn test_reproduce_filter_sync_bug() { ); if tip_header.is_none() { - println!("🔥 EXACT BUG REPRODUCED: Fallback to tip {} also failed - this is the exact error from the logs!", + println!("🔥 EXACT BUG REPRODUCED: Fallback to tip {} also failed - this is the exact error from the logs!", tip_height); panic!("Reproduced the exact filter sync bug scenario"); } @@ -355,7 +355,7 @@ async fn test_reproduce_filter_sync_bug_small() { println!("Header at tip height {}: {:?}", tip_height, tip_header.is_some()); if tip_header.is_none() { - println!("🎯 REPRODUCED THE BUG! get_tip_height() returned {} but get_header({}) returned None", + println!("🎯 REPRODUCED THE BUG! get_tip_height() returned {} but get_header({}) returned None", tip_height, tip_height); } @@ -550,7 +550,7 @@ async fn test_concurrent_tip_height_access_with_eviction() { let header_result = storage.get_header(height).await.unwrap(); if header_result.is_none() { - panic!("🎯 CONCURRENT RACE CONDITION REPRODUCED in task {}, iteration {}!\n get_tip_height() = {}\n get_header({}) = None", + panic!("🎯 CONCURRENT RACE CONDITION REPRODUCED in task {}, iteration {}!\n get_tip_height() = {}\n get_header({}) = None", task_id, iteration, height, height); } @@ -615,7 +615,7 @@ async fn test_concurrent_tip_height_access_with_eviction_heavy() { let header_result = storage.get_header(height).await.unwrap(); if header_result.is_none() { - panic!("🎯 CONCURRENT RACE CONDITION REPRODUCED in task {}, iteration {}!\n get_tip_height() = {}\n get_header({}) = None", + panic!("🎯 CONCURRENT RACE CONDITION REPRODUCED in task {}, iteration {}!\n get_tip_height() = {}\n get_header({}) = None", task_id, iteration, height, height); } @@ -684,7 +684,7 @@ async fn test_tip_height_segment_boundary_race() { if let Some(tip) = reported_tip { let header = storage.get_header(tip).await.unwrap(); if header.is_none() { - panic!("🎯 SEGMENT BOUNDARY RACE DETECTED: After storing segment {}, tip_height={} but header is None", + panic!("🎯 SEGMENT BOUNDARY RACE DETECTED: After storing segment {}, tip_height={} but header is None", i, tip); } } diff --git a/dash-spv/tests/transaction_calculation_test.rs b/dash-spv/tests/transaction_calculation_test.rs index 47e224c82..a28b01433 100644 --- a/dash-spv/tests/transaction_calculation_test.rs +++ b/dash-spv/tests/transaction_calculation_test.rs @@ -6,7 +6,7 @@ use std::str::FromStr; /// Transaction 62364518eeb41d01f71f7aff9d1046f188dd6c1b311e84908298b2f82c0b7a1b /// /// This transaction shows wrong net amount calculation where: -/// - Expected: -0.00020527 BTC (fee + small transfer) +/// - Expected: -0.00020527 BTC (fee + small transfer) /// - Actual log showed: +13.88979473 BTC (incorrect) /// /// The bug appears to be in the balance change calculation logic where diff --git a/key-wallet-ffi/cbindgen.toml b/key-wallet-ffi/cbindgen.toml index 379d674a2..a0348dd69 100644 --- a/key-wallet-ffi/cbindgen.toml +++ b/key-wallet-ffi/cbindgen.toml @@ -4,10 +4,10 @@ language = "C" header = """/** * Key Wallet FFI - C Header File - * + * * This header provides C-compatible function declarations for the key-wallet * Rust library FFI bindings. - * + * * AUTO-GENERATED FILE - DO NOT EDIT * Generated using cbindgen */""" diff --git a/key-wallet-ffi/examples/check_transaction.c b/key-wallet-ffi/examples/check_transaction.c index 6a516cb2a..3bc2c2a55 100644 --- a/key-wallet-ffi/examples/check_transaction.c +++ b/key-wallet-ffi/examples/check_transaction.c @@ -58,23 +58,23 @@ extern void wallet_free(void* wallet); int main() { // Example mnemonic (DO NOT USE IN PRODUCTION) const char* mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"; - + FFIError error = {0}; FFINetwork network = Testnet; - + // Create wallet void* wallet = wallet_create_from_mnemonic(mnemonic, NULL, network, &error); if (!wallet) { printf("Failed to create wallet: %s\n", error.message); return 1; } - + printf("Wallet created successfully\n"); - + // Example transaction bytes (this would be a real transaction in practice) uint8_t tx_bytes[] = { /* ... transaction data ... */ }; size_t tx_len = sizeof(tx_bytes); - + // Check if transaction belongs to wallet FFITransactionCheckResult result = {0}; bool success = wallet_check_transaction( @@ -90,7 +90,7 @@ int main() { &result, &error ); - + if (success) { if (result.is_relevant) { printf("Transaction belongs to wallet!\n"); @@ -103,7 +103,7 @@ int main() { } else { printf("Failed to check transaction: %s\n", error.message); } - + // Check a confirmed transaction uint8_t block_hash[32] = { /* ... block hash ... */ }; success = wallet_check_transaction( @@ -119,13 +119,13 @@ int main() { &result, &error ); - + if (success && result.is_relevant) { printf("Confirmed transaction processed and wallet state updated\n"); } - + // Clean up wallet_free(wallet); - + return 0; } \ No newline at end of file diff --git a/key-wallet-ffi/generate_header.sh b/key-wallet-ffi/generate_header.sh index be823c9ea..8ffaff698 100755 --- a/key-wallet-ffi/generate_header.sh +++ b/key-wallet-ffi/generate_header.sh @@ -36,13 +36,13 @@ cbindgen \ if [ $? -eq 0 ]; then echo -e "${GREEN}✓ Header file generated successfully at include/key_wallet_ffi.h${NC}" - + # Show statistics echo -e "${GREEN}Header file statistics:${NC}" echo " Functions: $(grep -c "^[^/]*(" include/key_wallet_ffi.h 2>/dev/null || echo 0)" echo " Structs: $(grep -c "^typedef struct" include/key_wallet_ffi.h 2>/dev/null || echo 0)" echo " Enums: $(grep -c "^typedef enum" include/key_wallet_ffi.h 2>/dev/null || echo 0)" - + else echo -e "${RED}✗ Failed to generate header file${NC}" exit 1 diff --git a/key-wallet-ffi/include/key_wallet_ffi.h b/key-wallet-ffi/include/key_wallet_ffi.h index f46cf8e61..0e5dba7ef 100644 --- a/key-wallet-ffi/include/key_wallet_ffi.h +++ b/key-wallet-ffi/include/key_wallet_ffi.h @@ -1,9 +1,9 @@ /** * Key Wallet FFI - C Header File - * + * * This header provides C-compatible function declarations for the key-wallet * Rust library FFI bindings. - * + * * AUTO-GENERATED FILE - DO NOT EDIT * Generated using cbindgen */ diff --git a/key-wallet-ffi/include/key_wallet_ffi_test.h b/key-wallet-ffi/include/key_wallet_ffi_test.h index 039d5394e..2f7994c6d 100644 --- a/key-wallet-ffi/include/key_wallet_ffi_test.h +++ b/key-wallet-ffi/include/key_wallet_ffi_test.h @@ -1,9 +1,9 @@ /** * Key Wallet FFI - C Header File - * + * * This header provides C-compatible function declarations for the key-wallet * Rust library FFI bindings. - * + * * AUTO-GENERATED FILE - DO NOT EDIT * Generated using cbindgen */ diff --git a/key-wallet-ffi/scripts/generate_ffi_docs.py b/key-wallet-ffi/scripts/generate_ffi_docs.py index db1502f9f..73745e6b7 100755 --- a/key-wallet-ffi/scripts/generate_ffi_docs.py +++ b/key-wallet-ffi/scripts/generate_ffi_docs.py @@ -126,10 +126,10 @@ def categorize_functions(functions: List[FFIFunction]) -> Dict[str, List[FFIFunc 'Mnemonic Operations': [], 'Utility Functions': [], } - + for func in functions: name = func.name.lower() - + if 'initialize' in name or 'version' in name: categories['Initialization'].append(func) elif 'error' in name: @@ -154,15 +154,15 @@ def categorize_functions(functions: List[FFIFunction]) -> Dict[str, List[FFIFunc categories['Mnemonic Operations'].append(func) else: categories['Utility Functions'].append(func) - + # Remove empty categories return {k: v for k, v in categories.items() if v} def generate_markdown(functions: List[FFIFunction]) -> str: """Generate markdown documentation from FFI functions.""" - + categories = categorize_functions(functions) - + md = [] md.append("# Key-Wallet FFI API Documentation") md.append("") @@ -172,7 +172,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("") md.append(f"**Total Functions**: {len(functions)}") md.append("") - + # Table of Contents md.append("## Table of Contents") md.append("") @@ -180,45 +180,45 @@ def generate_markdown(functions: List[FFIFunction]) -> str: anchor = category.lower().replace(' ', '-') md.append(f"- [{category}](#{anchor})") md.append("") - + # Function Reference md.append("## Function Reference") md.append("") - + for category, funcs in categories.items(): if not funcs: continue - + anchor = category.lower().replace(' ', '-') md.append(f"### {category}") md.append("") md.append(f"Functions: {len(funcs)}") md.append("") - + # Create a table for each category md.append("| Function | Description | Module |") md.append("|----------|-------------|--------|") - + for func in sorted(funcs, key=lambda f: f.name): desc = func.doc_comment.split('.')[0] if func.doc_comment else "No description" desc = desc.replace('|', '\\|') # Escape pipes in description if len(desc) > 80: desc = desc[:77] + "..." md.append(f"| `{func.name}` | {desc} | {func.module} |") - + md.append("") - + # Detailed Function Documentation md.append("## Detailed Function Documentation") md.append("") - + for category, funcs in categories.items(): if not funcs: continue - + md.append(f"### {category} - Detailed") md.append("") - + for func in sorted(funcs, key=lambda f: f.name): md.append(f"#### `{func.name}`") md.append("") @@ -226,22 +226,22 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append(func.signature) md.append("```") md.append("") - + if func.doc_comment: md.append("**Description:**") md.append(func.doc_comment) md.append("") - + if func.safety_comment: md.append("**Safety:**") md.append(func.safety_comment) md.append("") - + md.append(f"**Module:** `{func.module}`") md.append("") md.append("---") md.append("") - + # Type Definitions md.append("## Type Definitions") md.append("") @@ -254,7 +254,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("- `FFIUTXO` - Unspent transaction output") md.append("- `FFINetworks` - Network enumeration") md.append("") - + # Memory Management md.append("## Memory Management") md.append("") @@ -265,7 +265,7 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("3. **Thread Safety**: Most functions are thread-safe, but check individual function documentation") md.append("4. **Error Handling**: Always check the `FFIError` parameter after function calls") md.append("") - + # Usage Examples md.append("## Usage Examples") md.append("") @@ -286,30 +286,30 @@ def generate_markdown(functions: List[FFIFunction]) -> str: md.append("wallet_manager_free(manager);") md.append("```") md.append("") - + return '\n'.join(md) def main(): # Find all Rust source files src_dir = Path(__file__).parent.parent / "src" - + all_functions = [] - + for rust_file in src_dir.rglob("*.rs"): functions = extract_ffi_functions(rust_file) all_functions.extend(functions) - + # Generate markdown markdown = generate_markdown(all_functions) - + # Write to file output_file = Path(__file__).parent.parent / "FFI_API.md" with open(output_file, 'w') as f: f.write(markdown) - + print(f"Generated FFI documentation with {len(all_functions)} functions") print(f"Output: {output_file}") - + return 0 if __name__ == "__main__": diff --git a/key-wallet-ffi/src/managed_wallet_tests.rs b/key-wallet-ffi/src/managed_wallet_tests.rs index 7c7ba8d83..6aaf87117 100644 --- a/key-wallet-ffi/src/managed_wallet_tests.rs +++ b/key-wallet-ffi/src/managed_wallet_tests.rs @@ -14,11 +14,11 @@ mod tests { #[test] fn test_managed_wallet_create_success() { let mut error = FFIError::success(); - + // Create a wallet first let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -28,16 +28,16 @@ mod tests { ) }; assert!(!wallet.is_null()); - + // Create managed wallet let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; - + // Should succeed assert!(!managed_wallet.is_null()); assert_eq!(error.code, FFIErrorCode::Success); - + // Clean up unsafe { managed_wallet_free(managed_wallet); @@ -48,11 +48,11 @@ mod tests { #[test] fn test_managed_wallet_create_null_wallet() { let mut error = FFIError::success(); - + let managed_wallet = unsafe { managed_wallet_create(ptr::null(), &mut error) }; - + assert!(managed_wallet.is_null()); assert_eq!(error.code, FFIErrorCode::InvalidInput); } @@ -60,11 +60,11 @@ mod tests { #[test] fn test_managed_wallet_mark_address_used_valid() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -73,11 +73,11 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; - + // Test with a valid testnet address let address = CString::new("yXdxAYfK7KGx7gNpVHUfRsQMNpMj5cAadG").unwrap(); let success = unsafe { @@ -88,7 +88,7 @@ mod tests { &mut error, ) }; - + // Should succeed or fail gracefully depending on address validation // The function validates the address format internally if success { @@ -97,7 +97,7 @@ mod tests { // Address validation might fail due to library version differences assert!(error.code == FFIErrorCode::InvalidAddress); } - + // Clean up unsafe { managed_wallet_free(managed_wallet); @@ -108,11 +108,11 @@ mod tests { #[test] fn test_managed_wallet_mark_address_used_invalid() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -121,11 +121,11 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; - + // Test with invalid address let address = CString::new("invalid_address").unwrap(); let success = unsafe { @@ -136,10 +136,10 @@ mod tests { &mut error, ) }; - + assert!(!success); assert_eq!(error.code, FFIErrorCode::InvalidAddress); - + // Clean up unsafe { managed_wallet_free(managed_wallet); @@ -150,7 +150,7 @@ mod tests { #[test] fn test_managed_wallet_mark_address_used_null_address() { let mut error = FFIError::success(); - + let success = unsafe { managed_wallet_mark_address_used( ptr::null_mut(), @@ -159,7 +159,7 @@ mod tests { &mut error, ) }; - + assert!(!success); assert_eq!(error.code, FFIErrorCode::InvalidInput); } @@ -167,7 +167,7 @@ mod tests { #[test] fn test_managed_wallet_get_next_receive_address_not_implemented() { let mut error = FFIError::success(); - + let address = unsafe { managed_wallet_get_next_receive_address( ptr::null_mut(), @@ -177,7 +177,7 @@ mod tests { &mut error, ) }; - + assert!(address.is_null()); assert_eq!(error.code, FFIErrorCode::WalletError); } @@ -185,7 +185,7 @@ mod tests { #[test] fn test_managed_wallet_get_next_change_address_not_implemented() { let mut error = FFIError::success(); - + let address = unsafe { managed_wallet_get_next_change_address( ptr::null_mut(), @@ -195,7 +195,7 @@ mod tests { &mut error, ) }; - + assert!(address.is_null()); assert_eq!(error.code, FFIErrorCode::WalletError); } @@ -205,7 +205,7 @@ mod tests { let mut error = FFIError::success(); let mut addresses_out: *mut *mut std::os::raw::c_char = ptr::null_mut(); let mut count_out: usize = 0; - + let success = unsafe { managed_wallet_get_all_addresses( ptr::null(), @@ -216,7 +216,7 @@ mod tests { &mut error, ) }; - + assert!(success); assert_eq!(count_out, 0); assert!(addresses_out.is_null()); @@ -226,7 +226,7 @@ mod tests { #[test] fn test_managed_wallet_get_all_addresses_null_outputs() { let mut error = FFIError::success(); - + // Test with null addresses_out let success = unsafe { managed_wallet_get_all_addresses( @@ -238,10 +238,10 @@ mod tests { &mut error, ) }; - + assert!(!success); assert_eq!(error.code, FFIErrorCode::InvalidInput); - + // Test with null count_out let mut addresses_out: *mut *mut std::os::raw::c_char = ptr::null_mut(); let success = unsafe { @@ -254,7 +254,7 @@ mod tests { &mut error, ) }; - + assert!(!success); assert_eq!(error.code, FFIErrorCode::InvalidInput); } @@ -270,11 +270,11 @@ mod tests { #[test] fn test_managed_wallet_free_valid() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -283,17 +283,17 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; assert!(!managed_wallet.is_null()); - + // Free managed wallet - should not crash unsafe { managed_wallet_free(managed_wallet); } - + // Clean up wallet unsafe { unsafe {wallet::wallet_free(wallet);} @@ -303,11 +303,11 @@ mod tests { #[test] fn test_ffi_managed_wallet_info_methods() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -316,21 +316,21 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; assert!(!managed_wallet.is_null()); - + // Test that we can access the inner methods unsafe { let managed_ref = &*managed_wallet; let _inner = managed_ref.inner(); - + let managed_mut = &mut *managed_wallet; let _inner_mut = managed_mut.inner_mut(); } - + // Clean up unsafe { managed_wallet_free(managed_wallet); @@ -341,11 +341,11 @@ mod tests { #[test] fn test_managed_wallet_mark_address_used_utf8_error() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -354,11 +354,11 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; - + // Create invalid UTF-8 string let invalid_utf8 = [0xFF, 0xFE, 0xFD, 0x00]; // Invalid UTF-8 bytes with null terminator let success = unsafe { @@ -369,10 +369,10 @@ mod tests { &mut error, ) }; - + assert!(!success); assert_eq!(error.code, FFIErrorCode::InvalidInput); - + // Clean up unsafe { managed_wallet_free(managed_wallet); @@ -383,11 +383,11 @@ mod tests { #[test] fn test_managed_wallet_address_operations_with_real_wallet() { let mut error = FFIError::success(); - + // Create managed wallet let mnemonic = CString::new(TEST_MNEMONIC).unwrap(); let passphrase = CString::new("").unwrap(); - + let wallet = unsafe { wallet::wallet_create_from_mnemonic( mnemonic.as_ptr(), @@ -396,12 +396,12 @@ mod tests { &mut error, ) }; - + let managed_wallet = unsafe { managed_wallet_create(wallet, &mut error) }; assert!(!managed_wallet.is_null()); - + // Test get_next_receive_address with real wallet (should still fail as not implemented) let address = unsafe { managed_wallet_get_next_receive_address( @@ -412,10 +412,10 @@ mod tests { &mut error, ) }; - + assert!(address.is_null()); assert_eq!(error.code, FFIErrorCode::WalletError); - + // Test get_next_change_address with real wallet (should still fail as not implemented) let address = unsafe { managed_wallet_get_next_change_address( @@ -426,10 +426,10 @@ mod tests { &mut error, ) }; - + assert!(address.is_null()); assert_eq!(error.code, FFIErrorCode::WalletError); - + // Clean up unsafe { managed_wallet_free(managed_wallet); diff --git a/key-wallet/src/account/account_type.rs b/key-wallet/src/account/account_type.rs index 10056de42..08ee92bc4 100644 --- a/key-wallet/src/account/account_type.rs +++ b/key-wallet/src/account/account_type.rs @@ -42,7 +42,7 @@ pub enum AccountType { }, /// Identity registration funding IdentityRegistration, - /// Identity top-up funding + /// Identity top-up funding IdentityTopUp { /// Registration index (which identity this is topping up) registration_index: u32, diff --git a/key-wallet/src/derivation.rs b/key-wallet/src/derivation.rs index 5f933a878..52722b501 100644 --- a/key-wallet/src/derivation.rs +++ b/key-wallet/src/derivation.rs @@ -594,7 +594,7 @@ mod tests { #[test] fn test_special_derivation_paths() { let mnemonic = Mnemonic::from_phrase( - "upper renew that grow pelican pave subway relief describe enforce suit hedgehog blossom dose swallow", + "upper renew that grow pelican pave subway relief describe enforce suit hedgehog blossom dose swallow", crate::mnemonic::Language::English ).unwrap(); diff --git a/key-wallet/src/wallet/initialization.rs b/key-wallet/src/wallet/initialization.rs index 2d215c175..e2310b9f9 100644 --- a/key-wallet/src/wallet/initialization.rs +++ b/key-wallet/src/wallet/initialization.rs @@ -62,7 +62,7 @@ pub enum WalletAccountCreationOptions { /// # Arguments /// * First: Set of BIP44 account indices /// * Second: Set of BIP32 account indices - /// * Third: Set of CoinJoin account indices + /// * Third: Set of CoinJoin account indices /// * Fourth: Set of identity top-up registration indices /// * Fifth: Additional special account type to create (e.g., IdentityRegistration) SpecificAccounts( diff --git a/key-wallet/src/wallet/managed_wallet_info/mod.rs b/key-wallet/src/wallet/managed_wallet_info/mod.rs index 4224b26b8..ad93570f2 100644 --- a/key-wallet/src/wallet/managed_wallet_info/mod.rs +++ b/key-wallet/src/wallet/managed_wallet_info/mod.rs @@ -37,7 +37,7 @@ pub struct ManagedWalletInfo { pub wallet_id: [u8; 32], /// Wallet name pub name: Option, - /// Wallet description + /// Wallet description pub description: Option, /// Wallet metadata pub metadata: WalletMetadata, diff --git a/key-wallet/src/wallet/managed_wallet_info/transaction_building.rs b/key-wallet/src/wallet/managed_wallet_info/transaction_building.rs index b37f75d86..cdc10709b 100644 --- a/key-wallet/src/wallet/managed_wallet_info/transaction_building.rs +++ b/key-wallet/src/wallet/managed_wallet_info/transaction_building.rs @@ -13,7 +13,7 @@ use dashcore::Transaction; pub enum AccountTypePreference { /// Use BIP44 account only BIP44, - /// Use BIP32 account only + /// Use BIP32 account only BIP32, /// Prefer BIP44, fallback to BIP32 PreferBIP44, diff --git a/key-wallet/test_bip38.sh b/key-wallet/test_bip38.sh index 3c63a1337..c3e0eaab3 100755 --- a/key-wallet/test_bip38.sh +++ b/key-wallet/test_bip38.sh @@ -1,7 +1,7 @@ #!/bin/bash # BIP38 Test Runner Script -# +# # This script runs all BIP38-related tests that are normally ignored due to their # slow execution time (caused by the computationally intensive scrypt algorithm). # @@ -30,9 +30,9 @@ cd "$(dirname "$0")" run_test_module() { local module=$1 local description=$2 - + echo -e "${YELLOW}Running $description...${NC}" - + if cargo test --lib $module -- --ignored --nocapture "$@" 2>&1; then echo -e "${GREEN}✓ $description passed${NC}" echo "" diff --git a/key-wallet/test_bip38_advanced.sh b/key-wallet/test_bip38_advanced.sh index 1b1b68423..580306c8e 100755 --- a/key-wallet/test_bip38_advanced.sh +++ b/key-wallet/test_bip38_advanced.sh @@ -1,10 +1,10 @@ #!/bin/bash # Advanced BIP38 Test Runner Script -# +# # This script provides more control over running BIP38 tests with various options. # -# Usage: +# Usage: # ./test_bip38_advanced.sh # Run all BIP38 tests # ./test_bip38_advanced.sh --quick # Run only quick BIP38 tests (skip performance) # ./test_bip38_advanced.sh --single # Run a specific test @@ -132,21 +132,21 @@ run_test() { local test_pattern=$1 local description=$2 local start_time=$(date +%s) - + echo -e "${YELLOW}Running: $description${NC}" - + # Build the test command local cmd="cargo test $CONFIG --lib $test_pattern -- --ignored" - + if [ "$VERBOSE" = true ]; then cmd="$cmd --nocapture" fi - + # Execute the test if eval $cmd 2>&1; then local end_time=$(date +%s) local duration=$((end_time - start_time)) - + if [ "$SHOW_TIMING" = true ]; then echo -e "${GREEN}✓ $description passed${NC} ($(format_duration $duration))" else @@ -199,12 +199,12 @@ else ["bip38_tests::tests::test_bip38_round_trip"]="Round-trip encryption/decryption" ["bip38_tests::tests::test_bip38_invalid_prefix"]="Invalid prefix handling" ) - + # Add performance test if not in quick mode if [ "$QUICK_MODE" = false ]; then TEST_MODULES["bip38_tests::tests::test_bip38_performance"]="Performance benchmark" fi - + # Run each test module for test in "${!TEST_MODULES[@]}"; do TOTAL_TESTS=$((TOTAL_TESTS + 1)) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift b/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift index acb83f2a0..7c51bc0f6 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift @@ -5,7 +5,7 @@ import SwiftDashCoreSDK @main struct DashHDWalletApp: App { let modelContainer: ModelContainer - + init() { do { let schema = Schema([ @@ -17,14 +17,14 @@ struct DashHDWalletApp: App { Balance.self, SyncState.self ]) - + let modelConfiguration = ModelConfiguration( schema: schema, isStoredInMemoryOnly: false, groupContainer: .automatic, cloudKitDatabase: .none ) - + modelContainer = try ModelContainer( for: schema, configurations: [modelConfiguration] @@ -33,7 +33,7 @@ struct DashHDWalletApp: App { fatalError("Could not create ModelContainer: \(error)") } } - + var body: some Scene { WindowGroup { ContentView() diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift index 313f689d8..ed5beef36 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift @@ -20,12 +20,12 @@ struct Account { let label: String let xpub: String var addresses: [Address] = [] - + var derivationPath: String { let coinType = network == "mainnet" ? 5 : 1 return "m/44'/\(coinType)'/\(index)'" } - + let network: String } @@ -65,7 +65,7 @@ for i in 0..<3 { xpub: "tpubMockXpub\(i)", network: wallet.network ) - + // Generate addresses for j in 0..<5 { let address = Address( @@ -77,7 +77,7 @@ for i in 0..<3 { ) account.addresses.append(address) } - + wallet.accounts.append(account) print("✅ Created: \(account.label) (\(account.derivationPath))") } diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift index a681f7b43..2e6aa8e68 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift @@ -19,7 +19,7 @@ struct HDAccount { var label: String var addresses: [String] = [] var balance: Double = 0.0 - + var derivationPath: String { "m/44'/5'/\(index)'" } @@ -34,16 +34,16 @@ class MockWalletService: ObservableObject { @Published var syncProgress: Double = 0.0 @Published var currentBlock: Int = 0 @Published var totalBlocks: Int = 1000000 - + func createWallet(name: String, network: String) { let seedPhrase = [ "abandon", "abandon", "abandon", "abandon", - "abandon", "abandon", "abandon", "abandon", + "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "abandon", "about" ] - + var wallet = HDWallet(name: name, network: network, seedPhrase: seedPhrase) - + // Create default account var account = HDAccount(index: 0, label: "Primary Account") account.addresses = [ @@ -52,17 +52,17 @@ class MockWalletService: ObservableObject { ] account.balance = 1.5 wallet.accounts.append(account) - + wallets.append(wallet) currentWallet = wallet } - + func startSync() { guard !isConnected else { return } - + isConnected = true currentBlock = 900000 - + // Simulate sync progress Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { timer in if self.currentBlock < self.totalBlocks { @@ -81,7 +81,7 @@ class MockWalletService: ObservableObject { struct ContentView: View { @StateObject private var walletService = MockWalletService() @State private var showCreateWallet = false - + var body: some View { NavigationView { VStack { @@ -108,19 +108,19 @@ struct ContentView: View { struct EmptyStateView: View { let onCreateWallet: () -> Void - + var body: some View { VStack(spacing: 20) { Image(systemName: "wallet.pass") .font(.system(size: 60)) .foregroundColor(.gray) - + Text("No Wallets") .font(.title2) - + Text("Create a wallet to get started") .foregroundColor(.secondary) - + Button("Create Wallet", action: onCreateWallet) .buttonStyle(.borderedProminent) } @@ -130,22 +130,22 @@ struct EmptyStateView: View { struct CreateWalletView: View { @ObservedObject var walletService: MockWalletService @Binding var isPresented: Bool - + @State private var walletName = "" @State private var selectedNetwork = "testnet" - + var body: some View { NavigationView { Form { Section("Wallet Details") { TextField("Wallet Name", text: $walletName) - + Picker("Network", selection: $selectedNetwork) { Text("Mainnet").tag("mainnet") Text("Testnet").tag("testnet") } } - + Section("Recovery Phrase") { Text("A new recovery phrase will be generated") .foregroundColor(.secondary) @@ -158,7 +158,7 @@ struct CreateWalletView: View { isPresented = false } } - + ToolbarItem(placement: .confirmationAction) { Button("Create") { walletService.createWallet(name: walletName, network: selectedNetwork) @@ -174,7 +174,7 @@ struct CreateWalletView: View { struct WalletView: View { let wallet: HDWallet @ObservedObject var walletService: MockWalletService - + var body: some View { VStack(alignment: .leading, spacing: 20) { // Wallet Info @@ -182,11 +182,11 @@ struct WalletView: View { Text(wallet.name) .font(.title) .bold() - + HStack { Label(wallet.network.capitalized, systemImage: "network") Spacer() - Label(walletService.isConnected ? "Connected" : "Disconnected", + Label(walletService.isConnected ? "Connected" : "Disconnected", systemImage: walletService.isConnected ? "circle.fill" : "circle") .foregroundColor(walletService.isConnected ? .green : .red) } @@ -195,15 +195,15 @@ struct WalletView: View { .padding() .background(Color.gray.opacity(0.1)) .cornerRadius(10) - + // Sync Progress if walletService.isConnected && walletService.syncProgress < 1.0 { VStack(alignment: .leading, spacing: 10) { Text("Syncing...") .font(.headline) - + ProgressView(value: walletService.syncProgress) - + HStack { Text("Block \(walletService.currentBlock) of \(walletService.totalBlocks)") Spacer() @@ -216,19 +216,19 @@ struct WalletView: View { .background(Color.blue.opacity(0.1)) .cornerRadius(10) } - + // Accounts VStack(alignment: .leading, spacing: 10) { Text("Accounts") .font(.headline) - + ForEach(wallet.accounts, id: \.id) { account in AccountRow(account: account) } } - + Spacer() - + // Action Button if !walletService.isConnected { Button(action: { @@ -246,7 +246,7 @@ struct WalletView: View { struct AccountRow: View { let account: HDAccount - + var body: some View { VStack(alignment: .leading, spacing: 5) { HStack { @@ -256,11 +256,11 @@ struct AccountRow: View { Text("\(account.balance, specifier: "%.8f") DASH") .font(.system(.body, design: .monospaced)) } - + Text(account.derivationPath) .font(.caption) .foregroundColor(.secondary) - + Text("\(account.addresses.count) addresses") .font(.caption2) .foregroundColor(.secondary) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift index 089207fe6..64abf9b23 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift @@ -8,7 +8,7 @@ import UIKit @main struct DashHDWalletApp: App { let modelContainer: ModelContainer - + init() { // Force cleanup on first launch to handle model changes if !UserDefaults.standard.bool(forKey: "ModelV2Migrated") { @@ -16,14 +16,14 @@ struct DashHDWalletApp: App { ModelContainerHelper.cleanupCorruptStore() UserDefaults.standard.set(true, forKey: "ModelV2Migrated") } - + do { modelContainer = try ModelContainerHelper.createContainer() } catch { fatalError("Could not create ModelContainer: \(error)") } } - + var body: some Scene { WindowGroup { ContentView() diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift index e8644993d..d38624e24 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift @@ -13,9 +13,9 @@ final class HDWallet { var lastSynced: Date? var encryptedSeed: Data // Encrypted mnemonic seed var seedHash: String // For duplicate detection - + @Relationship(deleteRule: .cascade) var accounts: [HDAccount] - + init(name: String, network: DashNetwork, encryptedSeed: Data, seedHash: String) { self.id = UUID() self.name = name @@ -25,7 +25,7 @@ final class HDWallet { self.seedHash = seedHash self.accounts = [] } - + var displayNetwork: String { switch network { case .mainnet: @@ -38,7 +38,7 @@ final class HDWallet { return "Devnet" } } - + var totalBalance: Balance { let balance = Balance() for account in accounts { @@ -64,13 +64,13 @@ final class HDAccount { var lastUsedExternalIndex: UInt32 var lastUsedInternalIndex: UInt32 var gapLimit: UInt32 - + @Relationship var wallet: HDWallet? @Relationship(deleteRule: .cascade) var balance: Balance? @Relationship(deleteRule: .cascade) var addresses: [HDWatchedAddress] // Transaction IDs associated with this account (stored as comma-separated string) private var transactionIdsString: String = "" - + var transactionIds: [String] { get { transactionIdsString.isEmpty ? [] : transactionIdsString.split(separator: ",").map(String.init) @@ -79,7 +79,7 @@ final class HDAccount { transactionIdsString = newValue.joined(separator: ",") } } - + init( accountIndex: UInt32, label: String, @@ -96,25 +96,25 @@ final class HDAccount { self.gapLimit = gapLimit self.addresses = [] } - + var displayName: String { return label.isEmpty ? "Account #\(accountIndex)" : label } - + var derivationPath: String { guard let wallet = wallet else { return "" } let coinType: UInt32 = wallet.network == .mainnet ? 5 : 1 return "m/44'/\(coinType)'/\(accountIndex)'" } - + var externalAddresses: [HDWatchedAddress] { addresses.filter { !$0.isChange }.sorted { $0.index < $1.index } } - + var internalAddresses: [HDWatchedAddress] { addresses.filter { $0.isChange }.sorted { $0.index < $1.index } } - + var receiveAddress: HDWatchedAddress? { // Find the first unused address or the next one to generate return externalAddresses.first { $0.transactionIds.isEmpty } @@ -134,7 +134,7 @@ final class HDWatchedAddress { private var transactionIdsString: String = "" // UTXO outpoints associated with this address (stored as comma-separated string) private var utxoOutpointsString: String = "" - + var transactionIds: [String] { get { transactionIdsString.isEmpty ? [] : transactionIdsString.split(separator: ",").map(String.init) @@ -143,7 +143,7 @@ final class HDWatchedAddress { transactionIdsString = newValue.joined(separator: ",") } } - + var utxoOutpoints: [String] { get { utxoOutpointsString.isEmpty ? [] : utxoOutpointsString.split(separator: ",").map(String.init) @@ -152,13 +152,13 @@ final class HDWatchedAddress { utxoOutpointsString = newValue.joined(separator: ",") } } - + // HD specific properties var index: UInt32 var isChange: Bool var derivationPath: String @Relationship(inverse: \HDAccount.addresses) var account: HDAccount? - + init(address: String, index: UInt32, isChange: Bool, derivationPath: String, label: String? = nil) { self.address = address self.index = index @@ -168,7 +168,7 @@ final class HDWatchedAddress { self.createdAt = Date() self.balance = nil } - + var formattedBalance: String { guard let balance = balance else { return "0.00000000 DASH" } return balance.formattedTotal @@ -206,7 +206,7 @@ final class SyncState { var lastError: String? var startTime: Date var estimatedCompletion: Date? - + init(walletId: UUID) { self.walletId = walletId self.currentHeight = 0 @@ -215,13 +215,13 @@ final class SyncState { self.status = "idle" self.startTime = Date() } - + func update(from syncProgress: SyncProgress) { self.currentHeight = syncProgress.currentHeight self.totalHeight = syncProgress.totalHeight self.progress = syncProgress.progress self.status = syncProgress.status.rawValue - + if let eta = syncProgress.estimatedTimeRemaining { self.estimatedCompletion = Date().addingTimeInterval(eta) } diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift index 01491cf42..3e78e0423 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift @@ -6,16 +6,16 @@ import KeyWalletFFISwift // MARK: - HD Wallet Service class HDWalletService { - + // MARK: - Mnemonic Generation - + static func generateMnemonic(strength: Int = 128) -> [String] { do { // Use the proper BIP39 implementation from key-wallet-ffi // Word count: 12 words for 128-bit entropy, 24 words for 256-bit entropy let wordCount: UInt8 = strength == 256 ? 24 : 12 let mnemonic = try Mnemonic.generate(language: .english, wordCount: wordCount) - + // Split the phrase into words let words = mnemonic.phrase().split(separator: " ").map { String($0) } return words @@ -25,7 +25,7 @@ class HDWalletService { return generateFallbackMnemonic() } } - + private static func generateFallbackMnemonic() -> [String] { // Generate 12 random words from a small set // This is NOT cryptographically secure but better than hardcoded values @@ -38,53 +38,53 @@ class HDWalletService { "cash", "cast", "cell", "chat", "chip", "city", "clay", "clean", "clip", "club", "coal", "coat", "code", "coin", "cold", "come" ] - + var mnemonic: [String] = [] for _ in 0..<12 { let randomIndex = Int.random(in: 0.. [String] { // Simplified entropy to word mapping // In production, this should use proper BIP39 algorithm with checksum let wordList = getBIP39WordList() var words: [String] = [] - + // Simple mapping: take 11 bits at a time to index into 2048-word list let bits = entropy.flatMap { byte in (0..<8).reversed().map { (byte >> $0) & 1 } } - + // For 128-bit entropy, we need 12 words (132 bits with checksum) // This is simplified - proper BIP39 adds checksum bits for i in 0..<12 { let startBit = i * 11 let endBit = min(startBit + 11, bits.count) - + if endBit <= bits.count { var index = 0 for j in startBit.. [String] { // First 100 words of BIP39 English word list // In production, use the full 2048-word list @@ -104,7 +104,7 @@ class HDWalletService { "army", "around", "arrange", "arrest", "arrive", "arrow", "art", "artefact" ] } - + static func validateMnemonic(_ words: [String]) -> Bool { let phrase = words.joined(separator: " ") do { @@ -115,9 +115,9 @@ class HDWalletService { return false } } - + // MARK: - Seed Operations - + static func mnemonicToSeed(_ mnemonic: [String], passphrase: String = "") -> Data { do { let phrase = mnemonic.joined(separator: " ") @@ -131,28 +131,28 @@ class HDWalletService { return phrase.data(using: .utf8) ?? Data() } } - + static func seedHash(_ seed: Data) -> String { let hash = SHA256.hash(data: seed) return hash.compactMap { String(format: "%02x", $0) }.joined() } - + // MARK: - Encryption - + static func encryptSeed(_ seed: Data, password: String) throws -> Data { // In a real app, use proper encryption (e.g., CryptoKit) // This is a placeholder return seed } - + static func decryptSeed(_ encryptedSeed: Data, password: String) throws -> Data { // In a real app, use proper decryption // This is a placeholder return encryptedSeed } - + // MARK: - Key Derivation - + static func deriveExtendedPublicKey( seed: Data, network: DashNetwork, @@ -161,13 +161,13 @@ class HDWalletService { do { // Convert DashNetwork to KeyWalletFFI Network let ffiNetwork = convertToFFINetwork(network) - + // Create HD wallet from seed let hdWallet = try HdWallet.fromSeed(seed: Array(seed), network: ffiNetwork) - + // Get account extended public key let accountXPub = try hdWallet.getAccountXpub(account: account) - + return accountXPub.xpub } catch { print("Failed to derive extended public key: \(error)") @@ -176,7 +176,7 @@ class HDWalletService { return "\(prefix)MockExtendedPublicKey\(account)" } } - + static func deriveAddress( xpub: String, network: DashNetwork, @@ -186,10 +186,10 @@ class HDWalletService { do { // Convert DashNetwork to KeyWalletFFI Network let ffiNetwork = convertToFFINetwork(network) - + // Create address generator let addressGenerator = AddressGenerator(network: ffiNetwork) - + // Create AccountXPub from the extended public key string // The derivation path will be filled in by the FFI when getting account xpub let accountXPub = AccountXPub( @@ -197,14 +197,14 @@ class HDWalletService { xpub: xpub, pubKey: nil ) - + // Generate the address let address = try addressGenerator.generate( accountXpub: accountXPub, external: !change, // external=true for receive addresses, false for change index: index ) - + return address.toString() } catch { print("Failed to derive address: \(error)") @@ -214,7 +214,7 @@ class HDWalletService { return "\(prefix)MockAddress\(changeStr)\(index)" } } - + static func deriveAddresses( xpub: String, network: DashNetwork, @@ -225,17 +225,17 @@ class HDWalletService { do { // Convert DashNetwork to KeyWalletFFI Network let ffiNetwork = convertToFFINetwork(network) - + // Create address generator let addressGenerator = AddressGenerator(network: ffiNetwork) - + // Create AccountXPub from string let accountXPub = AccountXPub( derivationPath: "", // Path is not needed for address generation xpub: xpub, pubKey: nil ) - + // Generate addresses in range let addresses = try addressGenerator.generateRange( accountXpub: accountXPub, @@ -243,7 +243,7 @@ class HDWalletService { start: startIndex, count: count ) - + return addresses.map { $0.toString() } } catch { print("Failed to derive addresses: \(error)") @@ -253,9 +253,9 @@ class HDWalletService { } } } - + // MARK: - Helper Functions - + static func convertToFFINetwork(_ network: DashNetwork) -> KeyWalletFFISwift.Network { switch network { case .mainnet: @@ -275,12 +275,12 @@ class HDWalletService { class AddressDiscoveryService { private let sdk: DashSDK private let walletService: HDWalletService - + init(sdk: DashSDK) { self.sdk = sdk self.walletService = HDWalletService() } - + func discoverAddresses( for account: HDAccount, network: DashNetwork, @@ -288,7 +288,7 @@ class AddressDiscoveryService { ) async throws -> (external: [String], internal: [String]) { var externalAddresses: [String] = [] var internalAddresses: [String] = [] - + // Discover external addresses let (lastExternal, discoveredExternal) = try await discoverChain( xpub: account.extendedPublicKey, @@ -299,7 +299,7 @@ class AddressDiscoveryService { ) externalAddresses = discoveredExternal account.lastUsedExternalIndex = lastExternal - + // Discover internal (change) addresses let (lastInternal, discoveredInternal) = try await discoverChain( xpub: account.extendedPublicKey, @@ -310,10 +310,10 @@ class AddressDiscoveryService { ) internalAddresses = discoveredInternal account.lastUsedInternalIndex = lastInternal - + return (externalAddresses, internalAddresses) } - + private func discoverChain( xpub: String, network: DashNetwork, @@ -325,7 +325,7 @@ class AddressDiscoveryService { var lastUsedIndex: UInt32 = 0 var consecutiveUnused: UInt32 = 0 var currentIndex = startIndex - + while consecutiveUnused < gapLimit { // Derive batch of addresses let batchSize: UInt32 = 10 @@ -336,12 +336,12 @@ class AddressDiscoveryService { startIndex: currentIndex, count: batchSize ) - + // Check each address for transactions for (offset, address) in batch.enumerated() { let index = currentIndex + UInt32(offset) addresses.append(address) - + // Check if address has been used let transactions = try await sdk.getTransactions(for: address, limit: 1) if !transactions.isEmpty { @@ -350,15 +350,15 @@ class AddressDiscoveryService { } else { consecutiveUnused += 1 } - + if consecutiveUnused >= gapLimit { break } } - + currentIndex += batchSize } - + return (lastUsedIndex, addresses) } } @@ -366,11 +366,11 @@ class AddressDiscoveryService { // MARK: - Key Wallet FFI Bridge class KeyWalletBridge { - + struct WalletWrapper { let hdWallet: HdWallet let network: DashNetwork - + func deriveAccount(_ index: UInt32) -> AccountWrapper { do { let accountXPub = try hdWallet.getAccountXpub(account: index) @@ -396,12 +396,12 @@ class KeyWalletBridge { } } } - + struct AccountWrapper { let index: UInt32 let xpub: String let network: DashNetwork - + func deriveAddress(change: Bool, index: UInt32) -> String { return HDWalletService.deriveAddress( xpub: xpub, @@ -411,7 +411,7 @@ class KeyWalletBridge { ) } } - + static func createWallet(mnemonic: [String], network: DashNetwork) -> WalletWrapper? { do { let phrase = mnemonic.joined(separator: " ") diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift index 0ff8a1b40..38407ec9c 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift @@ -19,7 +19,7 @@ public enum WatchAddressError: Error, LocalizedError { case networkError(String) case alreadyWatching(String) case unknownError(String) - + public var errorDescription: String? { switch self { case .clientNotConnected: @@ -36,7 +36,7 @@ public enum WatchAddressError: Error, LocalizedError { return "Unknown error: \(reason)" } } - + public var isRecoverable: Bool { switch self { case .clientNotConnected, .networkError, .storageFailure: @@ -50,7 +50,7 @@ public enum WatchAddressError: Error, LocalizedError { @MainActor class WalletService: ObservableObject { static let shared = WalletService() - + @Published var activeWallet: HDWallet? @Published var activeAccount: HDAccount? @Published var syncProgress: SyncProgress? @@ -61,17 +61,17 @@ class WalletService: ObservableObject { @Published var pendingWatchCount: Int = 0 @Published var watchVerificationStatus: WatchVerificationStatus = .unknown @Published var mempoolTransactionCount: Int = 0 - + var sdk: DashSDK? private var cancellables = Set() private var syncTask: Task? var modelContext: ModelContext? - + // Watch address error tracking private var pendingWatchAddresses: [String: [(address: String, error: Error)]] = [:] private var watchVerificationTimer: Timer? private let logger = Logger(subsystem: "com.dash.wallet", category: "WalletService") - + // Computed property for sync statistics var syncStatistics: [String: String] { guard let progress = detailedSyncProgress else { @@ -79,15 +79,15 @@ class WalletService: ObservableObject { } return progress.statistics } - + private init() {} - + func configure(modelContext: ModelContext) { self.modelContext = modelContext } - + // MARK: - Wallet Management - + func createWallet( name: String, mnemonic: [String], @@ -97,21 +97,21 @@ class WalletService: ObservableObject { guard let context = modelContext else { throw WalletError.noContext } - + // Generate seed from mnemonic let seed = HDWalletService.mnemonicToSeed(mnemonic) let seedHash = HDWalletService.seedHash(seed) - + // Check for duplicate wallet let descriptor = FetchDescriptor() let allWallets = try context.fetch(descriptor) if allWallets.first(where: { $0.seedHash == seedHash && $0.network == network }) != nil { throw WalletError.duplicateWallet } - + // Encrypt seed let encryptedSeed = try HDWalletService.encryptSeed(seed, password: password) - + // Create wallet let wallet = HDWallet( name: name, @@ -119,9 +119,9 @@ class WalletService: ObservableObject { encryptedSeed: encryptedSeed, seedHash: seedHash ) - + context.insert(wallet) - + // Create default account let account = try createAccount( for: wallet, @@ -130,12 +130,12 @@ class WalletService: ObservableObject { password: password ) wallet.accounts.append(account) - + try context.save() - + return wallet } - + func createAccount( for wallet: HDWallet, index: UInt32, @@ -144,27 +144,27 @@ class WalletService: ObservableObject { ) throws -> HDAccount { // Decrypt seed let seed = try HDWalletService.decryptSeed(wallet.encryptedSeed, password: password) - + // Derive account xpub let xpub = HDWalletService.deriveExtendedPublicKey( seed: seed, network: wallet.network, account: index ) - + // Create account let account = HDAccount( accountIndex: index, label: label, extendedPublicKey: xpub ) - + account.wallet = wallet - + // Generate initial addresses (5 receive, 1 change) let initialReceiveCount = 5 let initialChangeCount = 1 - + // Generate receive addresses for i in 0.. 0 ? TimeInterval(progress.estimatedSecondsRemaining) : nil, message: progress.stageMessage ) - + // Log progress every second to avoid spam if Date().timeIntervalSince(lastLogTime) > 1.0 { print("\(progress.stage.icon) \(progress.statusMessage)") @@ -375,32 +375,32 @@ class WalletService: ObservableObject { print(" Peers: \(progress.connectedPeers) | Headers: \(progress.totalHeadersProcessed)") lastLogTime = Date() } - + // Update sync state in storage if let wallet = activeWallet { await self.updateSyncState(walletId: wallet.id, progress: self.syncProgress!) } - + // Check if sync is complete if progress.isComplete { break } } - + // Sync completed print("✅ Sync completed!") self.isSyncing = false if let wallet = activeWallet { wallet.lastSynced = Date() try? modelContext?.save() - + // Update balance after sync if let account = activeAccount { print("💰 Updating balance after sync...") try? await updateAccountBalance(account) } } - + } catch { self.isSyncing = false self.detailedSyncProgress = nil @@ -408,7 +408,7 @@ class WalletService: ObservableObject { } } } - + // Helper to map sync stage to legacy status private func mapSyncStageToStatus(_ stage: SyncStage) -> SyncStatus { switch stage { @@ -424,28 +424,28 @@ class WalletService: ObservableObject { return .error } } - + func stopSync() { syncTask?.cancel() isSyncing = false - + // Note: cancelSync would need to be exposed on DashSDK if we want to cancel at the SPVClient level } - + // Alternative sync method using callbacks for real-time updates func startSyncWithCallbacks() async throws { guard let sdk = sdk, isConnected else { throw WalletError.notConnected } - + print("🔄 Starting callback-based sync for wallet: \(activeWallet?.name ?? "Unknown")") isSyncing = true - + try await sdk.syncToTipWithProgress( progressCallback: { [weak self] progress in Task { @MainActor in self?.detailedSyncProgress = progress - + // Convert to legacy SyncProgress self?.syncProgress = SyncProgress( currentHeight: progress.currentHeight, @@ -455,20 +455,20 @@ class WalletService: ObservableObject { estimatedTimeRemaining: progress.estimatedSecondsRemaining > 0 ? TimeInterval(progress.estimatedSecondsRemaining) : nil, message: progress.stageMessage ) - + print("\(progress.stage.icon) \(progress.statusMessage)") } }, completionCallback: { [weak self] success, error in Task { @MainActor in self?.isSyncing = false - + if success { print("✅ Sync completed successfully!") if let wallet = self?.activeWallet { wallet.lastSynced = Date() try? self?.modelContext?.save() - + // Update balance after sync if let account = self?.activeAccount { print("💰 Updating balance after sync...") @@ -483,21 +483,21 @@ class WalletService: ObservableObject { } ) } - + // MARK: - Address Management - + func discoverAddresses(for account: HDAccount) async throws { guard let sdk = sdk, let wallet = account.wallet else { throw WalletError.invalidState } - + let discoveryService = AddressDiscoveryService(sdk: sdk) let (externalAddresses, internalAddresses) = try await discoveryService.discoverAddresses( for: account, network: wallet.network, gapLimit: account.gapLimit ) - + // Save discovered addresses try await saveDiscoveredAddresses( account: account, @@ -505,28 +505,28 @@ class WalletService: ObservableObject { internalAddresses: internalAddresses ) } - + func generateNewAddress(for account: HDAccount, isChange: Bool = false) throws -> HDWatchedAddress { guard let wallet = account.wallet, let context = modelContext else { throw WalletError.noContext } - + let index = isChange ? account.lastUsedInternalIndex + 1 : account.lastUsedExternalIndex + 1 - + let address = HDWalletService.deriveAddress( xpub: account.extendedPublicKey, network: wallet.network, change: isChange, index: index ) - + let path = BIP44.derivationPath( network: wallet.network, account: account.accountIndex, change: isChange, index: index ) - + let watchedAddress = HDWatchedAddress( address: address, index: index, @@ -535,17 +535,17 @@ class WalletService: ObservableObject { label: isChange ? "Change" : "Receive" ) watchedAddress.account = account - + account.addresses.append(watchedAddress) - + if isChange { account.lastUsedInternalIndex = index } else { account.lastUsedExternalIndex = index } - + try context.save() - + // Watch in SDK with proper error handling Task { do { @@ -563,22 +563,22 @@ class WalletService: ObservableObject { } } } - + return watchedAddress } - + // MARK: - Balance & Transactions - + func updateAccountBalance(_ account: HDAccount) async throws { guard let sdk = sdk else { throw WalletError.notConnected } - + var confirmedTotal: UInt64 = 0 var pendingTotal: UInt64 = 0 var instantLockedTotal: UInt64 = 0 var mempoolTotal: UInt64 = 0 - + for address in account.addresses { // Use getBalanceWithMempool to include mempool transactions let balance = try await sdk.getBalanceWithMempool(for: address.address) @@ -587,7 +587,7 @@ class WalletService: ObservableObject { instantLockedTotal += balance.instantLocked mempoolTotal += balance.mempool } - + account.balance = Balance( confirmed: confirmedTotal, pending: pendingTotal, @@ -596,15 +596,15 @@ class WalletService: ObservableObject { ) try? modelContext?.save() } - + func updateTransactions(for account: HDAccount) async throws { guard let sdk = sdk, let context = modelContext else { throw WalletError.notConnected } - + for address in account.addresses { let sdkTransactions = try await sdk.getTransactions(for: address.address) - + for sdkTx in sdkTransactions { // Check if transaction already exists let txidToCheck = sdkTx.txid @@ -614,7 +614,7 @@ class WalletService: ObservableObject { } ) let existingTransactions = try? context.fetch(descriptor) - + if existingTransactions?.isEmpty == false { // Transaction already exists, skip continue @@ -633,7 +633,7 @@ class WalletService: ObservableObject { version: sdkTx.version ) context.insert(newTransaction) - + // Add transaction ID to account and address if !account.transactionIds.contains(sdkTx.txid) { account.transactionIds.append(sdkTx.txid) @@ -644,12 +644,12 @@ class WalletService: ObservableObject { } } } - + try context.save() } - + // MARK: - Private Helpers - + private func setupEventHandling() { sdk?.eventPublisher .receive(on: DispatchQueue.main) @@ -658,7 +658,7 @@ class WalletService: ObservableObject { } .store(in: &cancellables) } - + private func handleSDKEvent(_ event: SPVEvent) { switch event { case .balanceUpdated: @@ -667,7 +667,7 @@ class WalletService: ObservableObject { try? await updateAccountBalance(account) } } - + case .transactionReceived(let txid, let confirmed, let amount, let addresses, let blockHeight): Task { if let account = activeAccount { @@ -675,7 +675,7 @@ class WalletService: ObservableObject { print(" Amount: \(amount) satoshis") print(" Addresses: \(addresses)") print(" Confirmed: \(confirmed), Block: \(blockHeight ?? 0)") - + // Create and save the transaction await saveTransaction( txid: txid, @@ -687,14 +687,14 @@ class WalletService: ObservableObject { ) } } - + case .mempoolTransactionAdded(let txid, let amount, let addresses): Task { if let account = activeAccount { print("🔄 Mempool transaction added: \(txid)") print(" Amount: \(amount) satoshis") print(" Addresses: \(addresses)") - + // Save as unconfirmed transaction await saveTransaction( txid: txid, @@ -704,54 +704,54 @@ class WalletService: ObservableObject { blockHeight: nil, account: account ) - + // Update mempool count await updateMempoolTransactionCount() } } - + case .mempoolTransactionConfirmed(let txid, let blockHeight, let confirmations): Task { if let account = activeAccount { print("✅ Mempool transaction confirmed: \(txid) at height \(blockHeight) with \(confirmations) confirmations") - + // Update transaction confirmation status await confirmTransaction(txid: txid, blockHeight: blockHeight) - + // Update mempool count await updateMempoolTransactionCount() } } - + case .mempoolTransactionRemoved(let txid, let reason): Task { if let account = activeAccount { print("❌ Mempool transaction removed: \(txid), reason: \(reason)") - + // Remove or mark transaction as dropped await removeTransaction(txid: txid) - + // Update mempool count await updateMempoolTransactionCount() } } - + case .syncProgressUpdated(let progress): self.syncProgress = progress - + default: break } } - + private func watchAccountAddresses(_ account: HDAccount) async { guard let sdk = sdk else { logger.error("Cannot watch addresses: SDK not initialized") return } - + var failedAddresses: [(address: String, error: Error)] = [] - + for address in account.addresses { do { try await sdk.watchAddress(address.address) @@ -761,20 +761,20 @@ class WalletService: ObservableObject { failedAddresses.append((address.address, error)) } } - + // Handle failed addresses if !failedAddresses.isEmpty { await handleFailedWatchAddresses(failedAddresses, account: account) } } - + private func handleFailedWatchAddresses(_ failures: [(address: String, error: Error)], account: HDAccount) async { // Store failed addresses for retry pendingWatchAddresses[account.id.uuidString] = failures - + // Update pending watch count pendingWatchCount = pendingWatchAddresses.values.reduce(0) { $0 + $1.count } - + // Notify UI of partial failure watchAddressErrors = failures.map { _, error in if let watchError = error as? WatchAddressError { @@ -783,7 +783,7 @@ class WalletService: ObservableObject { return WatchAddressError.unknownError(error.localizedDescription) } } - + // Schedule retry for recoverable errors let recoverableFailures = failures.filter { _, error in if let watchError = error as? WatchAddressError { @@ -791,12 +791,12 @@ class WalletService: ObservableObject { } return true // Assume unknown errors might be recoverable } - + if !recoverableFailures.isEmpty { scheduleWatchAddressRetry(addresses: recoverableFailures.map { $0.address }, account: account) } } - + private func saveDiscoveredAddresses( account: HDAccount, external: [String], @@ -805,7 +805,7 @@ class WalletService: ObservableObject { guard let wallet = account.wallet, let context = modelContext else { throw WalletError.noContext } - + // Save external addresses for (index, address) in external.enumerated() { let path = BIP44.derivationPath( @@ -814,7 +814,7 @@ class WalletService: ObservableObject { change: false, index: UInt32(index) ) - + let watchedAddress = HDWatchedAddress( address: address, index: UInt32(index), @@ -823,10 +823,10 @@ class WalletService: ObservableObject { label: "Receive" ) watchedAddress.account = account - + account.addresses.append(watchedAddress) } - + // Save internal addresses for (index, address) in internalAddresses.enumerated() { let path = BIP44.derivationPath( @@ -835,7 +835,7 @@ class WalletService: ObservableObject { change: true, index: UInt32(index) ) - + let watchedAddress = HDWatchedAddress( address: address, index: UInt32(index), @@ -844,19 +844,19 @@ class WalletService: ObservableObject { label: "Change" ) watchedAddress.account = account - + account.addresses.append(watchedAddress) } - + try context.save() } - + private func updateSyncState(walletId: UUID, progress: SyncProgress) async { guard let context = modelContext else { return } - + let descriptor = FetchDescriptor() let allStates = try? context.fetch(descriptor) - + if let syncState = allStates?.first(where: { $0.walletId == walletId }) { syncState.update(from: progress) } else { @@ -864,10 +864,10 @@ class WalletService: ObservableObject { syncState.update(from: progress) context.insert(syncState) } - + try? context.save() } - + private func saveTransaction( txid: String, amount: Int64, @@ -877,10 +877,10 @@ class WalletService: ObservableObject { account: HDAccount ) async { guard let context = modelContext else { return } - + // Check if transaction already exists let descriptor = FetchDescriptor() - + let existingTransactions = try? context.fetch(descriptor) if let existingTx = existingTransactions?.first(where: { $0.txid == txid }) { // Update existing transaction @@ -897,12 +897,12 @@ class WalletService: ObservableObject { confirmations: confirmed ? 1 : 0, isInstantLocked: false ) - + // Associate transaction ID with account if !account.transactionIds.contains(txid) { account.transactionIds.append(txid) } - + // Associate transaction ID with addresses for addressString in addresses { if let watchedAddress = account.addresses.first(where: { $0.address == addressString }) { @@ -912,36 +912,36 @@ class WalletService: ObservableObject { print("🔗 Linked transaction to address: \(addressString)") } } - + context.insert(transaction) print("💾 Saved new transaction: \(txid) with amount: \(amount) satoshis") } - + // Save context do { try context.save() print("✅ Transaction saved to database") - + // Update account balance try? await updateAccountBalance(account) } catch { print("❌ Error saving transaction: \(error)") } } - + // MARK: - Mempool Transaction Helpers - + private func confirmTransaction(txid: String, blockHeight: UInt32) async { guard let context = modelContext else { return } - + let descriptor = FetchDescriptor() let existingTransactions = try? context.fetch(descriptor) - + if let transaction = existingTransactions?.first(where: { $0.txid == txid }) { transaction.confirmations = 1 transaction.height = blockHeight print("✅ Updated transaction \(txid) as confirmed at height \(blockHeight)") - + do { try context.save() // Update balance after confirmation @@ -953,27 +953,27 @@ class WalletService: ObservableObject { } } } - + private func removeTransaction(txid: String) async { guard let context = modelContext else { return } - + let descriptor = FetchDescriptor() let existingTransactions = try? context.fetch(descriptor) - + if let transaction = existingTransactions?.first(where: { $0.txid == txid }) { // Remove transaction from account and address references if let account = activeAccount { account.transactionIds.removeAll { $0 == txid } - + for address in account.addresses { address.transactionIds.removeAll { $0 == txid } } } - + // Delete the transaction context.delete(transaction) print("🗑️ Removed transaction \(txid) from database") - + do { try context.save() // Update balance after removal @@ -985,35 +985,35 @@ class WalletService: ObservableObject { } } } - + private func updateMempoolTransactionCount() async { guard let context = modelContext, let account = activeAccount else { return } - + let descriptor = FetchDescriptor() let allTransactions = try? context.fetch(descriptor) - + // Count unconfirmed transactions (confirmations == 0) let accountTxIds = Set(account.transactionIds) let mempoolCount = allTransactions?.filter { transaction in accountTxIds.contains(transaction.txid) && transaction.confirmations == 0 }.count ?? 0 - + await MainActor.run { self.mempoolTransactionCount = mempoolCount } } - + // MARK: - Watch Address Retry - + private func scheduleWatchAddressRetry(addresses: [String], account: HDAccount) { Task { // Simple retry after 5 seconds try? await Task.sleep(nanoseconds: 5_000_000_000) - + guard let sdk = sdk else { return } - + var stillFailedAddresses: [(address: String, error: Error)] = [] - + for address in addresses { do { try await sdk.watchAddress(address) @@ -1023,23 +1023,23 @@ class WalletService: ObservableObject { stillFailedAddresses.append((address, error)) } } - + // Update pending addresses if stillFailedAddresses.isEmpty { pendingWatchAddresses.removeValue(forKey: account.id.uuidString) } else { pendingWatchAddresses[account.id.uuidString] = stillFailedAddresses } - + // Update pending count await MainActor.run { self.pendingWatchCount = self.pendingWatchAddresses.values.reduce(0) { $0 + $1.count } } } } - + // MARK: - Watch Address Verification - + private func startWatchVerification() { watchVerificationTimer = Timer.scheduledTimer(withTimeInterval: 60.0, repeats: true) { _ in Task { @@ -1047,21 +1047,21 @@ class WalletService: ObservableObject { } } } - + private func stopWatchVerification() { watchVerificationTimer?.invalidate() watchVerificationTimer = nil } - + private func verifyAllWatchedAddresses() async { guard let sdk = sdk, let account = activeAccount else { return } - + watchVerificationStatus = .verifying - + let addresses = account.addresses.map { $0.address } let totalAddresses = addresses.count var watchedAddresses = 0 - + do { // TODO: verifyWatchedAddresses method needs to be implemented in SPVClient // For now, assume all addresses are watched @@ -1071,12 +1071,12 @@ class WalletService: ObservableObject { let missingAddresses = verificationResults.compactMap { address, isWatched in isWatched ? nil : address } - + watchedAddresses = addresses.count - missingAddresses.count - + if !missingAddresses.isEmpty { logger.warning("Found \(missingAddresses.count) addresses not being watched for account \(account.label)") - + // Re-watch missing addresses for address in missingAddresses { do { @@ -1090,7 +1090,7 @@ class WalletService: ObservableObject { } } */ - + watchVerificationStatus = .verified(total: totalAddresses, watching: watchedAddresses) } catch { logger.error("Failed to verify watched addresses for account \(account.label): \(error)") @@ -1108,7 +1108,7 @@ enum WalletError: LocalizedError { case invalidState case invalidMnemonic case decryptionFailed - + var errorDescription: String? { switch self { case .noContext: diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift index 080a0c7e3..d89090997 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift @@ -8,7 +8,7 @@ public enum BIP44 { public static let dashTestnetCoinType: UInt32 = 1 public static let purpose: UInt32 = 44 public static let defaultGapLimit: UInt32 = 20 - + public static func coinType(for network: DashNetwork) -> UInt32 { switch network { case .mainnet: @@ -17,7 +17,7 @@ public enum BIP44 { return dashTestnetCoinType } } - + public static func derivationPath( network: DashNetwork, account: UInt32, diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift index 8236a922d..d843cd9bf 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift @@ -6,11 +6,11 @@ struct TestContentView: View { Text("Dash HD Wallet") .font(.largeTitle) .padding() - + Text("iOS App is running!") .font(.title2) .foregroundColor(.green) - + Spacer() } .padding() diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift index d9e06b106..1f49e7baa 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift @@ -16,7 +16,7 @@ struct Clipboard { pasteboard.setString(string, forType: .string) #endif } - + static func paste() -> String? { #if os(iOS) return UIPasteboard.general.string @@ -30,17 +30,17 @@ struct CopyButton: View { let text: String let label: String @State private var copied = false - + init(_ text: String, label: String = "Copy") { self.text = text self.label = label } - + var body: some View { Button(action: { Clipboard.copy(text) copied = true - + DispatchQueue.main.asyncAfter(deadline: .now() + 2) { copied = false } diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift index f484edcdf..36cd43ca3 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift @@ -4,7 +4,7 @@ import SwiftDashCoreSDK /// Helper for creating and managing SwiftData ModelContainer with migration support struct ModelContainerHelper { - + /// Create a ModelContainer with automatic migration recovery static func createContainer() throws -> ModelContainer { let schema = Schema([ @@ -17,7 +17,7 @@ struct ModelContainerHelper { SwiftDashCoreSDK.WatchedAddress.self, SyncState.self ]) - + // Check if we have migration issues by looking for specific error patterns let shouldCleanup = UserDefaults.standard.bool(forKey: "ForceModelCleanup") if shouldCleanup { @@ -25,16 +25,16 @@ struct ModelContainerHelper { cleanupCorruptStore() UserDefaults.standard.set(false, forKey: "ForceModelCleanup") } - + do { // First attempt: try to create normally return try createContainer(with: schema, inMemory: false) } catch { print("Initial ModelContainer creation failed: \(error)") print("Detailed error: \(error.localizedDescription)") - + // Check if it's a migration error or model error - if error.localizedDescription.contains("migration") || + if error.localizedDescription.contains("migration") || error.localizedDescription.contains("relationship") || error.localizedDescription.contains("to-one") || error.localizedDescription.contains("to-many") || @@ -43,22 +43,22 @@ struct ModelContainerHelper { print("Model/Migration error detected, performing complete cleanup...") UserDefaults.standard.set(true, forKey: "ForceModelCleanup") } - + // Second attempt: clean up and retry cleanupCorruptStore() - + do { return try createContainer(with: schema, inMemory: false) } catch { print("Failed to create persistent store after cleanup: \(error)") - + // Final attempt: in-memory store print("Falling back to in-memory store") return try createContainer(with: schema, inMemory: true) } } } - + private static func createContainer(with schema: Schema, inMemory: Bool) throws -> ModelContainer { let modelConfiguration = ModelConfiguration( schema: schema, @@ -66,26 +66,26 @@ struct ModelContainerHelper { groupContainer: .automatic, cloudKitDatabase: .none ) - + return try ModelContainer( for: schema, configurations: [modelConfiguration] ) } - + static func cleanupCorruptStore() { print("Starting cleanup of corrupt store...") - + guard let appSupportURL = FileManager.default.urls( for: .applicationSupportDirectory, in: .userDomainMask ).first else { return } - + let documentsURL = FileManager.default.urls( for: .documentDirectory, in: .userDomainMask ).first - + // Clean up all SQLite and SwiftData related files let patternsToRemove = [ "default.store", @@ -98,17 +98,17 @@ struct ModelContainerHelper { "ModelContainer", ".db" ] - + // Clean up all files in Application Support that could be related to the store if let contents = try? FileManager.default.contentsOfDirectory(at: appSupportURL, includingPropertiesForKeys: nil) { for fileURL in contents { let filename = fileURL.lastPathComponent - + // Check if file matches any of our patterns let shouldRemove = patternsToRemove.contains { pattern in filename.contains(pattern) || filename.hasPrefix("default") } - + if shouldRemove { do { try FileManager.default.removeItem(at: fileURL) @@ -119,18 +119,18 @@ struct ModelContainerHelper { } } } - + // Also clean up Documents directory if let documentsURL = documentsURL, let contents = try? FileManager.default.contentsOfDirectory(at: documentsURL, includingPropertiesForKeys: nil) { for fileURL in contents { let filename = fileURL.lastPathComponent - + // Check if file matches any of our patterns let shouldRemove = patternsToRemove.contains { pattern in filename.contains(pattern) || filename.hasPrefix("default") } - + if shouldRemove { do { try FileManager.default.removeItem(at: fileURL) @@ -141,7 +141,7 @@ struct ModelContainerHelper { } } } - + // Clear any cached SwiftData files let cacheURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first if let cacheURL = cacheURL { @@ -155,22 +155,22 @@ struct ModelContainerHelper { } } } - + print("Store cleanup completed") } - + /// Check if the current store needs migration static func needsMigration(for container: ModelContainer) -> Bool { // This would check the model version or schema changes // For now, return false as we handle migration errors automatically return false } - + /// Export wallet data before migration static func exportDataForMigration(from context: ModelContext) throws -> Data? { do { let wallets = try context.fetch(FetchDescriptor()) - + // Create export structure let exportData = MigrationExportData( wallets: wallets.map { wallet in @@ -184,7 +184,7 @@ struct ModelContainerHelper { ) } ) - + return try JSONEncoder().encode(exportData) } catch { print("Failed to export data for migration: \(error)") diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift index e0f769e2f..7377bf713 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift @@ -14,7 +14,7 @@ struct PlatformColor { return Color(NSColor.controlBackgroundColor) #endif } - + static var textBackground: Color { #if os(iOS) return Color(UIColor.secondarySystemGroupedBackground) @@ -22,7 +22,7 @@ struct PlatformColor { return Color(NSColor.textBackgroundColor) #endif } - + static var secondarySystemBackground: Color { #if os(iOS) return Color(UIColor.secondarySystemBackground) @@ -30,7 +30,7 @@ struct PlatformColor { return Color(NSColor.controlBackgroundColor) #endif } - + static var secondaryLabel: Color { #if os(iOS) return Color(UIColor.secondaryLabel) @@ -38,7 +38,7 @@ struct PlatformColor { return Color(NSColor.secondaryLabelColor) #endif } - + static var tertiaryLabel: Color { #if os(iOS) return Color(UIColor.tertiaryLabel) @@ -46,7 +46,7 @@ struct PlatformColor { return Color(NSColor.tertiaryLabelColor) #endif } - + static var systemRed: Color { #if os(iOS) return Color(UIColor.systemRed) @@ -54,7 +54,7 @@ struct PlatformColor { return Color(NSColor.systemRed) #endif } - + static var systemGreen: Color { #if os(iOS) return Color(UIColor.systemGreen) @@ -62,7 +62,7 @@ struct PlatformColor { return Color(NSColor.systemGreen) #endif } - + static var systemBlue: Color { #if os(iOS) return Color(UIColor.systemBlue) @@ -70,7 +70,7 @@ struct PlatformColor { return Color(NSColor.systemBlue) #endif } - + static var systemOrange: Color { #if os(iOS) return Color(UIColor.systemOrange) @@ -78,7 +78,7 @@ struct PlatformColor { return Color(NSColor.systemOrange) #endif } - + static var tertiarySystemBackground: Color { #if os(iOS) return Color(UIColor.tertiarySystemBackground) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift index ee58d1262..8de19f6c6 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift @@ -5,12 +5,12 @@ import SwiftDashCoreSDK struct AccountDetailView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.modelContext) private var modelContext - + let account: HDAccount @State private var selectedTab = 0 @State private var showReceiveAddress = false @State private var showSendTransaction = false - + var body: some View { VStack(spacing: 0) { // Account Header @@ -19,9 +19,9 @@ struct AccountDetailView: View { onReceive: { showReceiveAddress = true }, onSend: { showSendTransaction = true } ) - + Divider() - + // Tab View TabView(selection: $selectedTab) { // Transactions Tab @@ -30,14 +30,14 @@ struct AccountDetailView: View { Label("Transactions", systemImage: "list.bullet") } .tag(0) - + // Addresses Tab AddressesTabView(account: account) .tabItem { Label("Addresses", systemImage: "qrcode") } .tag(1) - + // UTXOs Tab UTXOsTabView(account: account) .tabItem { @@ -62,7 +62,7 @@ struct AccountHeaderView: View { let account: HDAccount let onReceive: () -> Void let onSend: () -> Void - + var body: some View { VStack(spacing: 16) { // Account Info @@ -70,26 +70,26 @@ struct AccountHeaderView: View { Text(account.displayName) .font(.title2) .fontWeight(.semibold) - + Text(account.derivationPath) .font(.caption) .foregroundColor(.secondary) .fontDesign(.monospaced) } - + // Balance if let balance = account.balance { BalanceView(balance: balance) } - + // Mempool Status if walletService.mempoolTransactionCount > 0 { MempoolStatusView(count: walletService.mempoolTransactionCount) } - + // Watch Status WatchStatusView(status: walletService.watchVerificationStatus) - + // Watch Errors if !walletService.watchAddressErrors.isEmpty || walletService.pendingWatchCount > 0 { WatchErrorsView( @@ -97,14 +97,14 @@ struct AccountHeaderView: View { pendingCount: walletService.pendingWatchCount ) } - + // Action Buttons HStack(spacing: 16) { Button(action: onReceive) { Label("Receive", systemImage: "arrow.down.circle.fill") } .buttonStyle(.borderedProminent) - + Button(action: onSend) { Label("Send", systemImage: "arrow.up.circle.fill") } @@ -121,19 +121,19 @@ struct AccountHeaderView: View { struct BalanceView: View { let balance: Balance - + var body: some View { VStack(spacing: 8) { Text(balance.formattedTotal) .font(.system(size: 32, weight: .medium, design: .monospaced)) - + HStack(spacing: 20) { BalanceComponent( label: "Available", amount: formatDash(balance.available), color: .green ) - + if balance.pending > 0 { BalanceComponent( label: "Pending", @@ -141,7 +141,7 @@ struct BalanceView: View { color: .orange ) } - + if balance.instantLocked > 0 { BalanceComponent( label: "InstantSend", @@ -149,7 +149,7 @@ struct BalanceView: View { color: .blue ) } - + if balance.mempool > 0 { BalanceComponent( label: "Mempool", @@ -160,7 +160,7 @@ struct BalanceView: View { } } } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f", dash) @@ -171,13 +171,13 @@ struct BalanceComponent: View { let label: String let amount: String let color: Color - + var body: some View { VStack(spacing: 4) { Text(label) .font(.caption) .foregroundColor(.secondary) - + Text(amount) .font(.system(.body, design: .monospaced)) .foregroundColor(color) @@ -191,7 +191,7 @@ struct TransactionsTabView: View { let account: HDAccount @State private var searchText = "" @Environment(\.modelContext) private var modelContext - + var filteredTransactions: [SwiftDashCoreSDK.Transaction] { // Fetch transactions by IDs let txIds = account.transactionIds @@ -201,9 +201,9 @@ struct TransactionsTabView: View { }, sortBy: [SortDescriptor(\.timestamp, order: .reverse)] ) - + let allTransactions = (try? modelContext.fetch(descriptor)) ?? [] - + if searchText.isEmpty { return allTransactions } else { @@ -212,7 +212,7 @@ struct TransactionsTabView: View { } } } - + var body: some View { VStack { if account.transactionIds.isEmpty { @@ -239,11 +239,11 @@ struct AddressesTabView: View { @EnvironmentObject private var walletService: WalletService let account: HDAccount @State private var showingExternal = true - + var addresses: [HDWatchedAddress] { showingExternal ? account.externalAddresses : account.internalAddresses } - + var body: some View { VStack { // Address Type Picker @@ -253,7 +253,7 @@ struct AddressesTabView: View { } .pickerStyle(SegmentedPickerStyle()) .padding() - + if addresses.isEmpty { EmptyStateView( icon: "qrcode", @@ -267,7 +267,7 @@ struct AddressesTabView: View { } } } - + // Generate New Address Button HStack { Spacer() @@ -278,7 +278,7 @@ struct AddressesTabView: View { } } } - + private func generateNewAddress() { Task { do { @@ -298,25 +298,25 @@ struct AddressesTabView: View { struct UTXOsTabView: View { let account: HDAccount @Environment(\.modelContext) private var modelContext - + var utxos: [UTXO] { // Collect all UTXO outpoints from addresses let allOutpoints = account.addresses.flatMap { $0.utxoOutpoints } - + // Fetch UTXOs by outpoints let descriptor = FetchDescriptor( predicate: #Predicate { utxo in allOutpoints.contains(utxo.outpoint) && !utxo.isSpent } ) - + return (try? modelContext.fetch(descriptor)) ?? [] } - + var totalValue: UInt64 { utxos.reduce(0) { $0 + $1.value } } - + var body: some View { VStack { if utxos.isEmpty { @@ -337,7 +337,7 @@ struct UTXOsTabView: View { .monospacedDigit() } .padding() - + // UTXO List List { ForEach(utxos.sorted { $0.value > $1.value }) { utxo in @@ -348,7 +348,7 @@ struct UTXOsTabView: View { } } } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) @@ -361,17 +361,17 @@ struct EmptyStateView: View { let icon: String let title: String let message: String - + var body: some View { VStack(spacing: 20) { Image(systemName: icon) .font(.system(size: 60)) .foregroundColor(.secondary) - + Text(title) .font(.title3) .fontWeight(.medium) - + Text(message) .font(.body) .foregroundColor(.secondary) @@ -386,14 +386,14 @@ struct EmptyStateView: View { struct TransactionRowView: View { let transaction: SwiftDashCoreSDK.Transaction - + var body: some View { HStack { // Direction Icon Image(systemName: transaction.amount >= 0 ? "arrow.down.circle.fill" : "arrow.up.circle.fill") .foregroundColor(transaction.amount >= 0 ? .green : .red) .font(.title2) - + // Transaction Info VStack(alignment: .leading, spacing: 4) { Text(transaction.txid) @@ -401,20 +401,20 @@ struct TransactionRowView: View { .fontDesign(.monospaced) .lineLimit(1) .truncationMode(.middle) - + Text(transaction.timestamp, style: .relative) .font(.caption) .foregroundColor(.secondary) } - + Spacer() - + // Amount and Status VStack(alignment: .trailing, spacing: 4) { Text(formatAmount(transaction.amount)) .font(.system(.body, design: .monospaced)) .foregroundColor(transaction.amount >= 0 ? .green : .red) - + if transaction.isInstantLocked { Label("InstantSend", systemImage: "bolt.fill") .font(.caption2) @@ -432,7 +432,7 @@ struct TransactionRowView: View { } .padding(.vertical, 4) } - + private func formatAmount(_ satoshis: Int64) -> String { let dash = Double(abs(satoshis)) / 100_000_000.0 let sign = satoshis >= 0 ? "+" : "-" @@ -443,7 +443,7 @@ struct TransactionRowView: View { struct AddressRowView: View { let address: HDWatchedAddress @State private var isCopied = false - + var body: some View { HStack { VStack(alignment: .leading, spacing: 4) { @@ -452,28 +452,28 @@ struct AddressRowView: View { .font(.system(.caption, design: .monospaced)) .lineLimit(1) .truncationMode(.middle) - + if address.transactionIds.count > 0 { Text("(\(address.transactionIds.count) tx)") .font(.caption2) .foregroundColor(.secondary) } } - + Text("Index: \(address.index)") .font(.caption2) .foregroundColor(.secondary) } - + Spacer() - + if let balance = address.balance { Text(balance.formattedTotal) .font(.caption) .monospacedDigit() .foregroundColor(.secondary) } - + Button(action: copyAddress) { Image(systemName: isCopied ? "checkmark" : "doc.on.doc") .font(.caption) @@ -482,14 +482,14 @@ struct AddressRowView: View { } .padding(.vertical, 4) } - + private func copyAddress() { Clipboard.copy(address.address) - + withAnimation { isCopied = true } - + DispatchQueue.main.asyncAfter(deadline: .now() + 2) { withAnimation { isCopied = false @@ -500,7 +500,7 @@ struct AddressRowView: View { struct UTXORowView: View { let utxo: UTXO - + var body: some View { HStack { VStack(alignment: .leading, spacing: 4) { @@ -508,7 +508,7 @@ struct UTXORowView: View { .font(.system(.caption, design: .monospaced)) .lineLimit(1) .truncationMode(.middle) - + HStack { Text("Height: \(utxo.height)") Text("•") @@ -517,13 +517,13 @@ struct UTXORowView: View { .font(.caption2) .foregroundColor(.secondary) } - + Spacer() - + VStack(alignment: .trailing, spacing: 4) { Text(utxo.formattedValue) .font(.system(.body, design: .monospaced)) - + if utxo.isInstantLocked { Text("InstantSend") .font(.caption2) @@ -539,12 +539,12 @@ struct UTXORowView: View { struct MempoolStatusView: View { let count: Int - + var body: some View { HStack { Image(systemName: "clock.arrow.circlepath") .foregroundColor(.purple) - + Text("\(count) unconfirmed transaction\(count == 1 ? "" : "s") in mempool") .font(.caption) .foregroundColor(.secondary) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift index 1419ef87f..6072ca5c9 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift @@ -6,11 +6,11 @@ struct ContentView: View { @Environment(\.modelContext) private var modelContext @EnvironmentObject private var walletService: WalletService @Query private var wallets: [HDWallet] - + @State private var showCreateWallet = false @State private var showImportWallet = false @State private var selectedWallet: HDWallet? - + var body: some View { #if os(iOS) NavigationStack { @@ -45,12 +45,12 @@ struct ContentView: View { .tag(wallet) } } - + Section { Button(action: { showCreateWallet = true }) { Label("Create New Wallet", systemImage: "plus.circle") } - + Button(action: { showImportWallet = true }) { Label("Import Wallet", systemImage: "square.and.arrow.down") } @@ -86,9 +86,9 @@ struct WalletListView: View { let wallets: [HDWallet] let onCreateWallet: () -> Void let onImportWallet: () -> Void - + @State private var showingSettings = false - + var body: some View { #if os(iOS) List { @@ -98,11 +98,11 @@ struct WalletListView: View { Image(systemName: "wallet.pass") .font(.system(size: 50)) .foregroundColor(.secondary) - + Text("No wallets yet") .font(.headline) .foregroundColor(.secondary) - + Text("Create or import a wallet to get started") .font(.caption) .foregroundColor(.secondary) @@ -121,12 +121,12 @@ struct WalletListView: View { } } } - + Section { Button(action: onCreateWallet) { Label("Create New Wallet", systemImage: "plus.circle") } - + Button(action: onImportWallet) { Label("Import Wallet", systemImage: "square.and.arrow.down") } @@ -154,12 +154,12 @@ struct WalletListView: View { .tag(wallet) } } - + Section { Button(action: onCreateWallet) { Label("Create New Wallet", systemImage: "plus.circle") } - + Button(action: onImportWallet) { Label("Import Wallet", systemImage: "square.and.arrow.down") } @@ -175,25 +175,25 @@ struct WalletListView: View { struct WalletRowView: View { let wallet: HDWallet - + var body: some View { VStack(alignment: .leading, spacing: 4) { HStack { Text(wallet.name) .font(.headline) - + Spacer() - + NetworkBadge(network: wallet.network) } - + HStack { Text("\(wallet.accounts.count) accounts") .font(.caption) .foregroundColor(.secondary) - + Spacer() - + Text(wallet.totalBalance.formattedTotal) .font(.caption) .monospacedDigit() @@ -208,7 +208,7 @@ struct WalletRowView: View { struct NetworkBadge: View { let network: DashNetwork - + var body: some View { Text(network.rawValue.capitalized) .font(.caption2) @@ -219,7 +219,7 @@ struct NetworkBadge: View { .foregroundColor(.white) .cornerRadius(4) } - + private var backgroundColor: Color { switch network { case .mainnet: @@ -242,11 +242,11 @@ struct EmptyWalletView: View { Image(systemName: "wallet.pass") .font(.system(size: 80)) .foregroundColor(.secondary) - + Text("No Wallet Selected") .font(.title2) .foregroundColor(.secondary) - + Text("Create or import a wallet to get started") .font(.body) .foregroundColor(.secondary) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift index d953e523b..c5d20ab6b 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift @@ -3,16 +3,16 @@ import SwiftUI struct CreateAccountView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + let wallet: HDWallet let onComplete: (HDAccount) -> Void - + @State private var accountLabel = "" @State private var accountIndex: UInt32 = 1 @State private var password = "" @State private var isCreating = false @State private var errorMessage = "" - + var nextAvailableIndex: UInt32 { let usedIndices = wallet.accounts.map { $0.accountIndex } var index: UInt32 = 0 @@ -21,42 +21,42 @@ struct CreateAccountView: View { } return index } - + var isValid: Bool { !password.isEmpty && password.count >= 8 } - + var body: some View { NavigationView { Form { Section("Account Details") { TextField("Account Label (Optional)", text: $accountLabel) .textFieldStyle(.roundedBorder) - + HStack { Text("Account Index") Spacer() Text("\(accountIndex)") .monospacedDigit() } - + Text("Derivation Path: \(derivationPath)") .font(.caption) .foregroundColor(.secondary) .fontDesign(.monospaced) } - + Section("Security") { SecureField("Wallet Password", text: $password) .textFieldStyle(.roundedBorder) - + if !password.isEmpty && password.count < 8 { Text("Password must be at least 8 characters") .font(.caption) .foregroundColor(.red) } } - + if !errorMessage.isEmpty { Section { Text(errorMessage) @@ -71,7 +71,7 @@ struct CreateAccountView: View { dismiss() } } - + ToolbarItem(placement: .confirmationAction) { Button("Create") { createAccount() @@ -87,36 +87,36 @@ struct CreateAccountView: View { accountIndex = nextAvailableIndex } } - + private var derivationPath: String { let coinType = BIP44.coinType(for: wallet.network) return "m/44'/\(coinType)'/\(accountIndex)'" } - + private func createAccount() { isCreating = true errorMessage = "" - + do { let label = accountLabel.isEmpty ? "Account #\(accountIndex)" : accountLabel - + let account = try walletService.createAccount( for: wallet, index: accountIndex, label: label, password: password ) - + wallet.accounts.append(account) - + // Save to storage if let context = walletService.modelContext { try context.save() } - + onComplete(account) dismiss() - + } catch { errorMessage = error.localizedDescription isCreating = false diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift index a0466d39d..f4da70a54 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift @@ -4,7 +4,7 @@ import SwiftDashCoreSDK struct CreateWalletView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + @State private var walletName = "Dev Wallet \(Int.random(in: 1000...9999))" @State private var selectedNetwork: DashNetwork = .testnet @State private var password = "password123" @@ -14,9 +14,9 @@ struct CreateWalletView: View { @State private var mnemonicConfirmed = true @State private var isCreating = false @State private var errorMessage = "" - + let onComplete: (HDWallet) -> Void - + var isValid: Bool { !walletName.isEmpty && !password.isEmpty && @@ -24,7 +24,7 @@ struct CreateWalletView: View { password.count >= 8 && mnemonicConfirmed } - + var body: some View { VStack(spacing: 0) { // Header @@ -36,7 +36,7 @@ struct CreateWalletView: View { } .padding() .background(PlatformColor.controlBackground) - + // Content ScrollView { VStack(alignment: .leading, spacing: 20) { @@ -44,10 +44,10 @@ struct CreateWalletView: View { VStack(alignment: .leading, spacing: 12) { Text("Wallet Details") .font(.headline) - + TextField("Wallet Name", text: $walletName) .textFieldStyle(.roundedBorder) - + HStack { Text("Network:") Picker("", selection: $selectedNetwork) { @@ -64,47 +64,47 @@ struct CreateWalletView: View { Spacer() } } - + Divider() - + // Security VStack(alignment: .leading, spacing: 12) { Text("Security") .font(.headline) - + SecureField("Password (min 8 characters)", text: $password) .textFieldStyle(.roundedBorder) - + SecureField("Confirm Password", text: $confirmPassword) .textFieldStyle(.roundedBorder) - + // Password validation warnings if !password.isEmpty && password.count < 8 { Text("Password must be at least 8 characters") .font(.caption) .foregroundColor(.orange) } - + if !password.isEmpty && !confirmPassword.isEmpty && password != confirmPassword { Text("Passwords don't match") .font(.caption) .foregroundColor(.red) } - + if password.isEmpty && confirmPassword.isEmpty && !walletName.isEmpty { Text("Please set a password to protect your wallet") .font(.caption) .foregroundColor(.secondary) } } - + Divider() - + // Recovery Phrase VStack(alignment: .leading, spacing: 12) { Text("Recovery Phrase") .font(.headline) - + if mnemonic.isEmpty { Button("Generate Recovery Phrase") { generateMnemonic() @@ -114,17 +114,17 @@ struct CreateWalletView: View { Text("Write down these words in order. You'll need them to recover your wallet.") .font(.caption) .foregroundColor(.orange) - + MnemonicGridView( words: mnemonic, showWords: showMnemonic ) - + HStack { Toggle("Show words", isOn: $showMnemonic) - + Spacer() - + Button("Copy") { copyMnemonic() } @@ -134,7 +134,7 @@ struct CreateWalletView: View { .buttonStyle(.link) #endif } - + Toggle("I have written down my recovery phrase", isOn: $mnemonicConfirmed) #if os(macOS) .toggleStyle(.checkbox) @@ -143,7 +143,7 @@ struct CreateWalletView: View { #endif } } - + // Error Message if !errorMessage.isEmpty { Text(errorMessage) @@ -153,18 +153,18 @@ struct CreateWalletView: View { } .padding() } - + Divider() - + // Footer buttons HStack { Button("Cancel") { dismiss() } .keyboardShortcut(.escape) - + Spacer() - + // Show what's missing if button is disabled if !isValid && !walletName.isEmpty { VStack(alignment: .trailing, spacing: 4) { @@ -187,7 +187,7 @@ struct CreateWalletView: View { } } } - + Button("Create") { createWallet() } @@ -207,20 +207,20 @@ struct CreateWalletView: View { } } } - + private func generateMnemonic() { mnemonic = HDWalletService.generateMnemonic() } - + private func copyMnemonic() { let phrase = mnemonic.joined(separator: " ") Clipboard.copy(phrase) } - + private func createWallet() { isCreating = true errorMessage = "" - + do { let wallet = try walletService.createWallet( name: walletName, @@ -228,7 +228,7 @@ struct CreateWalletView: View { password: password, network: selectedNetwork ) - + onComplete(wallet) dismiss() } catch { @@ -243,13 +243,13 @@ struct CreateWalletView: View { struct MnemonicGridView: View { let words: [String] let showWords: Bool - + private let columns = [ GridItem(.flexible()), GridItem(.flexible()), GridItem(.flexible()) ] - + var body: some View { LazyVGrid(columns: columns, spacing: 8) { ForEach(Array(words.enumerated()), id: \.offset) { index, word in @@ -258,7 +258,7 @@ struct MnemonicGridView: View { .font(.caption) .foregroundColor(.secondary) .frame(width: 20, alignment: .trailing) - + Text(showWords ? word : "•••••") .font(.system(.body, design: .monospaced)) .frame(maxWidth: .infinity, alignment: .leading) @@ -277,7 +277,7 @@ struct MnemonicGridView: View { struct ImportWalletView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + @State private var walletName = "" @State private var mnemonicText = "" @State private var selectedNetwork: DashNetwork = .testnet @@ -285,9 +285,9 @@ struct ImportWalletView: View { @State private var confirmPassword = "" @State private var isImporting = false @State private var errorMessage = "" - + let onComplete: (HDWallet) -> Void - + var isValid: Bool { !walletName.isEmpty && !mnemonicText.isEmpty && @@ -295,7 +295,7 @@ struct ImportWalletView: View { password == confirmPassword && password.count >= 8 } - + var body: some View { VStack(spacing: 0) { // Header @@ -307,7 +307,7 @@ struct ImportWalletView: View { } .padding() .background(PlatformColor.controlBackground) - + // Content ScrollView { VStack(alignment: .leading, spacing: 20) { @@ -315,10 +315,10 @@ struct ImportWalletView: View { VStack(alignment: .leading, spacing: 12) { Text("Wallet Details") .font(.headline) - + TextField("Wallet Name", text: $walletName) .textFieldStyle(.roundedBorder) - + HStack { Text("Network:") Picker("", selection: $selectedNetwork) { @@ -335,18 +335,18 @@ struct ImportWalletView: View { Spacer() } } - + Divider() - + // Recovery Phrase VStack(alignment: .leading, spacing: 12) { Text("Recovery Phrase") .font(.headline) - + Text("Enter your 12 or 24 word recovery phrase") .font(.caption) .foregroundColor(.secondary) - + TextEditor(text: $mnemonicText) .font(.system(.body, design: .monospaced)) .frame(height: 100) @@ -355,40 +355,40 @@ struct ImportWalletView: View { .stroke(Color.secondary.opacity(0.3), lineWidth: 1) ) } - + Divider() - + // Security VStack(alignment: .leading, spacing: 12) { Text("Security") .font(.headline) - + SecureField("Password (min 8 characters)", text: $password) .textFieldStyle(.roundedBorder) - + SecureField("Confirm Password", text: $confirmPassword) .textFieldStyle(.roundedBorder) - + // Password validation warnings if !password.isEmpty && password.count < 8 { Text("Password must be at least 8 characters") .font(.caption) .foregroundColor(.orange) } - + if !password.isEmpty && !confirmPassword.isEmpty && password != confirmPassword { Text("Passwords don't match") .font(.caption) .foregroundColor(.red) } - + if password.isEmpty && confirmPassword.isEmpty && !walletName.isEmpty { Text("Please set a password to protect your wallet") .font(.caption) .foregroundColor(.secondary) } } - + // Error Message if !errorMessage.isEmpty { Text(errorMessage) @@ -398,18 +398,18 @@ struct ImportWalletView: View { } .padding() } - + Divider() - + // Footer buttons HStack { Button("Cancel") { dismiss() } .keyboardShortcut(.escape) - + Spacer() - + Button("Import") { importWallet() } @@ -423,24 +423,24 @@ struct ImportWalletView: View { .frame(width: 600, height: 500) #endif } - + private func importWallet() { isImporting = true errorMessage = "" - + // Parse mnemonic let words = mnemonicText .trimmingCharacters(in: .whitespacesAndNewlines) .split(separator: " ") .map { String($0) } - + // Validate word count guard words.count == 12 || words.count == 24 else { errorMessage = "Recovery phrase must be 12 or 24 words" isImporting = false return } - + do { let wallet = try walletService.createWallet( name: walletName, @@ -448,7 +448,7 @@ struct ImportWalletView: View { password: password, network: selectedNetwork ) - + onComplete(wallet) dismiss() } catch { diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift index 135530772..2bcb62a4e 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift @@ -4,11 +4,11 @@ import SwiftDashCoreSDK struct EnhancedSyncProgressView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + @State private var hasStarted = false @State private var showStatistics = false @State private var useCallbackSync = true - + var body: some View { NavigationView { VStack(spacing: 20) { @@ -32,7 +32,7 @@ struct EnhancedSyncProgressView: View { .progressViewStyle(.circular) .scaleEffect(1.5) } - + // Filter Sync Status Warning (if not available) if let syncProgress = walletService.syncProgress, !syncProgress.filterSyncAvailable { @@ -48,7 +48,7 @@ struct EnhancedSyncProgressView: View { .background(Color.orange.opacity(0.1)) .cornerRadius(8) } - + // Statistics Toggle if walletService.detailedSyncProgress != nil { Button(showStatistics ? "Hide Statistics" : "Show Statistics") { @@ -58,7 +58,7 @@ struct EnhancedSyncProgressView: View { } .buttonStyle(.bordered) } - + // Detailed Statistics if showStatistics, !walletService.syncStatistics.isEmpty { DetailedStatisticsView(statistics: walletService.syncStatistics) @@ -80,7 +80,7 @@ struct EnhancedSyncProgressView: View { dismiss() } } - + if walletService.isSyncing { ToolbarItem(placement: .primaryAction) { Menu { @@ -88,7 +88,7 @@ struct EnhancedSyncProgressView: View { // Future: Implement pause functionality } .disabled(true) - + Button("Cancel Sync", systemImage: "xmark.circle") { walletService.stopSync() } @@ -105,7 +105,7 @@ struct EnhancedSyncProgressView: View { .frame(width: 700, height: showStatistics ? 700 : 600) #endif } - + private func startSync() { hasStarted = true Task { @@ -126,7 +126,7 @@ struct EnhancedSyncProgressView: View { struct DetailedProgressContent: View { let progress: DetailedSyncProgress - + var body: some View { VStack(spacing: 24) { // Stage Icon and Status @@ -134,17 +134,17 @@ struct DetailedProgressContent: View { Text(progress.stage.icon) .font(.system(size: 80)) .symbolEffect(.pulse, isActive: progress.stage.isActive) - + Text(progress.stage.description) .font(.title2) .fontWeight(.semibold) - + Text(progress.stageMessage) .font(.body) .foregroundColor(.secondary) .multilineTextAlignment(.center) } - + // Progress Circle CircularProgressView( progress: progress.percentage / 100.0, @@ -152,7 +152,7 @@ struct DetailedProgressContent: View { speed: progress.formattedSpeed ) .frame(width: 200, height: 200) - + // Block Progress VStack(spacing: 16) { HStack(spacing: 30) { @@ -161,20 +161,20 @@ struct DetailedProgressContent: View { value: "\(progress.currentHeight)", icon: "arrow.up.square" ) - + ProgressStatView( title: "Target Height", value: "\(progress.totalHeight)", icon: "flag.checkered" ) - + ProgressStatView( title: "Connected Peers", value: "\(progress.connectedPeers)", icon: "network" ) } - + // ETA and Duration HStack(spacing: 30) { VStack(spacing: 4) { @@ -185,7 +185,7 @@ struct DetailedProgressContent: View { .font(.headline) .monospacedDigit() } - + VStack(spacing: 4) { Label("Sync Duration", systemImage: "timer") .font(.caption) @@ -209,13 +209,13 @@ struct CircularProgressView: View { let progress: Double let formattedPercentage: String let speed: String - + var body: some View { ZStack { // Background circle Circle() .stroke(Color.gray.opacity(0.2), lineWidth: 20) - + // Progress circle Circle() .trim(from: 0, to: progress) @@ -229,14 +229,14 @@ struct CircularProgressView: View { ) .rotationEffect(.degrees(-90)) .animation(.easeInOut(duration: 0.5), value: progress) - + // Center content VStack(spacing: 8) { Text(formattedPercentage) .font(.largeTitle) .fontWeight(.bold) .monospacedDigit() - + Text(speed) .font(.caption) .foregroundColor(.secondary) @@ -251,17 +251,17 @@ struct ProgressStatView: View { let title: String let value: String let icon: String - + var body: some View { VStack(spacing: 8) { Image(systemName: icon) .font(.title2) .foregroundColor(.accentColor) - + Text(value) .font(.headline) .monospacedDigit() - + Text(title) .font(.caption) .foregroundColor(.secondary) @@ -274,32 +274,32 @@ struct ProgressStatView: View { struct StartSyncContent: View { @Binding var useCallbackSync: Bool let onStart: () -> Void - + var body: some View { VStack(spacing: 30) { Image(systemName: "arrow.triangle.2.circlepath.circle") .font(.system(size: 100)) .foregroundColor(.accentColor) .symbolEffect(.pulse) - + VStack(spacing: 12) { Text("Ready to Sync") .font(.largeTitle) .fontWeight(.bold) - + Text("Synchronize your wallet with the Dash blockchain to see your latest balance and transactions") .font(.body) .foregroundColor(.secondary) .multilineTextAlignment(.center) .frame(maxWidth: 400) } - + // Sync Method Toggle VStack(spacing: 12) { Toggle("Use Callback-based Sync", isOn: $useCallbackSync) .toggleStyle(.switch) .frame(width: 250) - + Text(useCallbackSync ? "Real-time updates via callbacks" : "Stream-based async iteration") .font(.caption) .foregroundColor(.secondary) @@ -307,7 +307,7 @@ struct StartSyncContent: View { .padding() .background(Color(PlatformColor.secondarySystemBackground)) .cornerRadius(8) - + Button(action: onStart) { Label("Start Sync", systemImage: "play.circle.fill") .font(.headline) @@ -322,7 +322,7 @@ struct StartSyncContent: View { struct LegacyProgressContent: View { let progress: SyncProgress - + var body: some View { VStack(spacing: 20) { // Status Icon @@ -330,23 +330,23 @@ struct LegacyProgressContent: View { .font(.system(size: 60)) .foregroundColor(statusColor(for: progress.status)) .symbolEffect(.pulse, isActive: progress.status.isActive) - + // Status Text Text(progress.status.description) .font(.title2) .fontWeight(.medium) - + // Progress Bar VStack(alignment: .leading, spacing: 8) { ProgressView(value: progress.progress) .progressViewStyle(.linear) - + HStack { Text("\(progress.percentageComplete)%") .monospacedDigit() - + Spacer() - + if let eta = progress.formattedTimeRemaining { Text("ETA: \(eta)") } @@ -355,7 +355,7 @@ struct LegacyProgressContent: View { .foregroundColor(.secondary) } .frame(maxWidth: 400) - + // Message if let message = progress.message { Text(message) @@ -365,7 +365,7 @@ struct LegacyProgressContent: View { } } } - + private func statusIcon(for status: SyncStatus) -> String { switch status { case .idle: @@ -384,7 +384,7 @@ struct LegacyProgressContent: View { return "exclamationmark.triangle.fill" } } - + private func statusColor(for status: SyncStatus) -> Color { switch status { case .idle: @@ -403,13 +403,13 @@ struct LegacyProgressContent: View { struct DetailedStatisticsView: View { let statistics: [String: String] - + var body: some View { VStack(alignment: .leading, spacing: 12) { Label("Detailed Statistics", systemImage: "chart.line.uptrend.xyaxis") .font(.headline) .padding(.bottom, 8) - + LazyVGrid(columns: [ GridItem(.flexible()), GridItem(.flexible()), @@ -420,7 +420,7 @@ struct DetailedStatisticsView: View { Text(key) .font(.caption) .foregroundColor(.secondary) - + Text(value) .font(.body) .fontWeight(.medium) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift index d9c9d3499..91988f8fc 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift @@ -4,12 +4,12 @@ import CoreImage.CIFilterBuiltins struct ReceiveAddressView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + let account: HDAccount @State private var currentAddress: HDWatchedAddress? @State private var isCopied = false @State private var showNewAddressConfirm = false - + var body: some View { NavigationView { VStack(spacing: 20) { @@ -18,18 +18,18 @@ struct ReceiveAddressView: View { QRCodeView(content: address.address) .frame(width: 200, height: 200) .cornerRadius(12) - + // Address Display VStack(spacing: 12) { Text("Your Dash Address") .font(.headline) .foregroundColor(.secondary) - + HStack { Text(address.address) .font(.system(.body, design: .monospaced)) .textSelection(.enabled) - + Button(action: copyAddress) { Image(systemName: isCopied ? "checkmark.circle.fill" : "doc.on.doc") .foregroundColor(isCopied ? .green : .accentColor) @@ -39,14 +39,14 @@ struct ReceiveAddressView: View { .padding() .background(Color.secondary.opacity(0.1)) .cornerRadius(8) - + // Derivation Path Text(address.derivationPath) .font(.caption) .foregroundColor(.secondary) .fontDesign(.monospaced) } - + // Address Info VStack(spacing: 8) { if address.transactionIds.isEmpty { @@ -58,32 +58,32 @@ struct ReceiveAddressView: View { .font(.caption) .foregroundColor(.orange) } - + if let balance = address.balance { Text("Balance: \(balance.formattedTotal)") .font(.caption) .monospacedDigit() } } - + Spacer() - + // Generate New Address Button Button("Generate New Address") { showNewAddressConfirm = true } .disabled(address.transactionIds.isEmpty) - + } else { // No address available VStack(spacing: 20) { Image(systemName: "qrcode") .font(.system(size: 60)) .foregroundColor(.secondary) - + Text("No receive address available") .font(.title3) - + Button("Generate Address") { generateNewAddress() } @@ -113,23 +113,23 @@ struct ReceiveAddressView: View { Text("The current address has been used. Generate a new address for better privacy?") } } - + private func copyAddress() { guard let address = currentAddress ?? account.receiveAddress else { return } - + Clipboard.copy(address.address) - + withAnimation { isCopied = true } - + DispatchQueue.main.asyncAfter(deadline: .now() + 2) { withAnimation { isCopied = false } } } - + private func generateNewAddress() { do { let newAddress = try walletService.generateNewAddress(for: account, isChange: false) @@ -144,13 +144,13 @@ struct ReceiveAddressView: View { struct QRCodeView: View { let content: String - + #if os(iOS) @State private var qrImage: UIImage? #elseif os(macOS) @State private var qrImage: NSImage? #endif - + var body: some View { Group { if let image = qrImage { @@ -173,18 +173,18 @@ struct QRCodeView: View { generateQRCode() } } - + private func generateQRCode() { let context = CIContext() let filter = CIFilter.qrCodeGenerator() - + filter.message = Data(content.utf8) filter.correctionLevel = "M" - + guard let outputImage = filter.outputImage else { return } - + let scaledImage = outputImage.transformed(by: CGAffineTransform(scaleX: 10, y: 10)) - + if let cgImage = context.createCGImage(scaledImage, from: scaledImage.extent) { #if os(iOS) qrImage = UIImage(cgImage: cgImage) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift index 92d34535d..d29c2c2ff 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift @@ -4,9 +4,9 @@ import SwiftDashCoreSDK struct SendTransactionView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + let account: HDAccount - + @State private var recipientAddress = "" @State private var amountString = "" @State private var feeRate: UInt64 = 1000 @@ -14,20 +14,20 @@ struct SendTransactionView: View { @State private var isSending = false @State private var errorMessage = "" @State private var successTxid = "" - + private var amount: UInt64? { guard let dash = Double(amountString) else { return nil } return UInt64(dash * 100_000_000) } - + private var availableBalance: UInt64 { account.balance?.available ?? 0 } - + private var totalAmount: UInt64 { (amount ?? 0) + estimatedFee } - + private var isValid: Bool { !recipientAddress.isEmpty && amount != nil && @@ -35,7 +35,7 @@ struct SendTransactionView: View { totalAmount <= availableBalance && walletService.sdk?.validateAddress(recipientAddress) ?? false } - + var body: some View { NavigationView { Form { @@ -49,7 +49,7 @@ struct SendTransactionView: View { .fontWeight(.medium) } } - + // Recipient Section Section("Recipient") { TextField("Dash Address", text: $recipientAddress) @@ -58,14 +58,14 @@ struct SendTransactionView: View { .onChange(of: recipientAddress) { _ in validateAddress() } - + if !recipientAddress.isEmpty && !(walletService.sdk?.validateAddress(recipientAddress) ?? false) { Label("Invalid Dash address", systemImage: "exclamationmark.circle") .foregroundColor(.red) .font(.caption) } } - + // Amount Section Section("Amount") { HStack { @@ -74,10 +74,10 @@ struct SendTransactionView: View { .onChange(of: amountString) { _ in updateEstimatedFee() } - + Text("DASH") .foregroundColor(.secondary) - + Button("Max") { setMaxAmount() } @@ -87,7 +87,7 @@ struct SendTransactionView: View { .buttonStyle(.link) #endif } - + if let amount = amount { HStack { Text("Amount in satoshis") @@ -99,7 +99,7 @@ struct SendTransactionView: View { .font(.caption) } } - + // Fee Section Section("Network Fee") { Picker("Fee Rate", selection: $feeRate) { @@ -110,7 +110,7 @@ struct SendTransactionView: View { .onChange(of: feeRate) { _ in updateEstimatedFee() } - + HStack { Text("Estimated Fee") Spacer() @@ -118,7 +118,7 @@ struct SendTransactionView: View { .monospacedDigit() } } - + // Summary Section Section("Summary") { HStack { @@ -129,14 +129,14 @@ struct SendTransactionView: View { .monospacedDigit() .fontWeight(.medium) } - + if totalAmount > availableBalance { Label("Insufficient balance", systemImage: "exclamationmark.triangle") .foregroundColor(.red) .font(.caption) } } - + // Error/Success Messages if !errorMessage.isEmpty { Section { @@ -144,13 +144,13 @@ struct SendTransactionView: View { .foregroundColor(.red) } } - + if !successTxid.isEmpty { Section("Transaction Sent") { VStack(alignment: .leading, spacing: 8) { Label("Transaction broadcast successfully", systemImage: "checkmark.circle.fill") .foregroundColor(.green) - + HStack { Text("Transaction ID:") .font(.caption) @@ -170,7 +170,7 @@ struct SendTransactionView: View { dismiss() } } - + ToolbarItem(placement: .confirmationAction) { Button("Send") { sendTransaction() @@ -183,17 +183,17 @@ struct SendTransactionView: View { .frame(width: 500, height: 600) #endif } - + private func validateAddress() { errorMessage = "" } - + private func updateEstimatedFee() { guard let amount = amount, amount > 0 else { estimatedFee = 0 return } - + Task { do { estimatedFee = try await walletService.sdk?.estimateFee( @@ -207,47 +207,47 @@ struct SendTransactionView: View { } } } - + private func setMaxAmount() { // Calculate max amount (balance - estimated fee) let maxAmount = availableBalance > estimatedFee ? availableBalance - estimatedFee : 0 let dash = Double(maxAmount) / 100_000_000.0 amountString = String(format: "%.8f", dash) } - + private func sendTransaction() { guard let amount = amount, isValid else { return } - + isSending = true errorMessage = "" - + Task { do { guard let sdk = walletService.sdk else { throw WalletError.notConnected } - + let txid = try await sdk.sendTransaction( to: recipientAddress, amount: amount, feeRate: feeRate ) - + successTxid = txid - + // Clear form after success DispatchQueue.main.asyncAfter(deadline: .now() + 3) { dismiss() } - + } catch { errorMessage = error.localizedDescription } - + isSending = false } } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift index ba0c04387..c86944f0a 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift @@ -8,7 +8,7 @@ struct SettingsView: View { @State private var showingResetConfirmation = false @State private var showingResetAlert = false @State private var resetMessage = "" - + var body: some View { NavigationView { Form { @@ -19,7 +19,7 @@ struct SettingsView: View { Label("Reset All Data", systemImage: "trash") } } - + Section("About") { HStack { Text("Version") @@ -27,7 +27,7 @@ struct SettingsView: View { Text("1.0.0") .foregroundColor(.secondary) } - + HStack { Text("Build") Spacer() @@ -67,7 +67,7 @@ struct SettingsView: View { } } } - + private func resetAllData() { do { // Delete all SwiftData models @@ -79,13 +79,13 @@ struct SettingsView: View { try modelContext.delete(model: SwiftDashCoreSDK.Balance.self) try modelContext.delete(model: SwiftDashCoreSDK.WatchedAddress.self) try modelContext.delete(model: SyncState.self) - + // Save the context try modelContext.save() - + // Clean up the persistent store ModelContainerHelper.cleanupCorruptStore() - + resetMessage = "All data has been reset. The app will now restart." showingResetAlert = true } catch { diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift index ded504601..313a35e67 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift @@ -4,9 +4,9 @@ import SwiftDashCoreSDK struct SyncProgressView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.dismiss) private var dismiss - + @State private var hasStarted = false - + var body: some View { NavigationView { VStack(spacing: 20) { @@ -18,23 +18,23 @@ struct SyncProgressView: View { .font(.system(size: 60)) .foregroundColor(statusColor(for: progress.status)) .symbolEffect(.pulse, isActive: progress.status.isActive) - + // Status Text Text(progress.status.description) .font(.title2) .fontWeight(.medium) - + // Progress Bar VStack(alignment: .leading, spacing: 8) { ProgressView(value: progress.progress) .progressViewStyle(.linear) - + HStack { Text("\(progress.percentageComplete)%") .monospacedDigit() - + Spacer() - + if let eta = progress.formattedTimeRemaining { Text("ETA: \(eta)") } @@ -43,14 +43,14 @@ struct SyncProgressView: View { .foregroundColor(.secondary) } .frame(maxWidth: 400) - + // Block Progress BlockProgressView( current: progress.currentHeight, total: progress.totalHeight, remaining: progress.blocksRemaining ) - + // Message if let message = progress.message { Text(message) @@ -65,17 +65,17 @@ struct SyncProgressView: View { Image(systemName: "arrow.triangle.2.circlepath.circle") .font(.system(size: 80)) .foregroundColor(.blue) - + Text("Ready to Sync") .font(.title2) .fontWeight(.medium) - + Text("This will synchronize your wallet with the Dash blockchain") .font(.body) .foregroundColor(.secondary) .multilineTextAlignment(.center) .frame(maxWidth: 300) - + Button("Start Sync") { Task { do { @@ -86,7 +86,7 @@ struct SyncProgressView: View { } else { print("⚠️ No stats available") } - + startSync() } catch { print("Failed to test SDK: \(error)") @@ -101,7 +101,7 @@ struct SyncProgressView: View { ProgressView("Starting sync...") .progressViewStyle(.circular) } - + // Network Stats if let stats = walletService.sdk?.stats { NetworkStatsView(stats: stats) @@ -126,14 +126,14 @@ struct SyncProgressView: View { .frame(width: 600, height: 500) #endif } - + private func startSync() { hasStarted = true Task { try? await walletService.startSync() } } - + private func statusIcon(for status: SyncStatus) -> String { switch status { case .idle: @@ -152,7 +152,7 @@ struct SyncProgressView: View { return "exclamationmark.triangle.fill" } } - + private func statusColor(for status: SyncStatus) -> Color { switch status { case .idle: @@ -173,7 +173,7 @@ struct BlockProgressView: View { let current: UInt32 let total: UInt32 let remaining: UInt32 - + var body: some View { VStack(spacing: 12) { HStack(spacing: 20) { @@ -182,13 +182,13 @@ struct BlockProgressView: View { value: "\(current)", icon: "cube" ) - + BlockStatView( label: "Total Blocks", value: "\(total)", icon: "cube.fill" ) - + BlockStatView( label: "Remaining", value: "\(remaining)", @@ -206,17 +206,17 @@ struct BlockStatView: View { let label: String let value: String let icon: String - + var body: some View { VStack(spacing: 4) { Image(systemName: icon) .font(.title2) .foregroundColor(.blue) - + Text(value) .font(.headline) .monospacedDigit() - + Text(label) .font(.caption) .foregroundColor(.secondary) @@ -228,29 +228,29 @@ struct BlockStatView: View { struct NetworkStatsView: View { let stats: SPVStats - + var body: some View { VStack(alignment: .leading, spacing: 8) { Text("Network Statistics") .font(.caption) .foregroundColor(.secondary) - + HStack(spacing: 20) { StatItemView( label: "Peers", value: "\(stats.connectedPeers)/\(stats.totalPeers)" ) - + StatItemView( label: "Downloaded", value: stats.formattedBytesReceived ) - + StatItemView( label: "Uploaded", value: stats.formattedBytesSent ) - + StatItemView( label: "Uptime", value: stats.formattedUptime @@ -266,13 +266,13 @@ struct NetworkStatsView: View { struct StatItemView: View { let label: String let value: String - + var body: some View { VStack(alignment: .leading, spacing: 2) { Text(label) .font(.caption2) .foregroundColor(.secondary) - + Text(value) .font(.caption) .fontWeight(.medium) diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift index 5ad01ed35..3d370c7bf 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift @@ -5,7 +5,7 @@ import SwiftDashCoreSDK struct WalletDetailView: View { @EnvironmentObject private var walletService: WalletService @Environment(\.modelContext) private var modelContext - + let wallet: HDWallet @State private var selectedAccount: HDAccount? @State private var showCreateAccount = false @@ -16,7 +16,7 @@ struct WalletDetailView: View { @State private var lastSyncProgress: SyncProgress? // Store last sync state @State private var showConnectionError = false @State private var connectionError: String = "" - + var body: some View { #if os(iOS) Group { @@ -43,7 +43,7 @@ struct WalletDetailView: View { isConnected: walletService.isConnected && walletService.activeWallet == wallet, isSyncing: walletService.isSyncing ) - + // Sync and View Results Buttons if walletService.isConnected && walletService.activeWallet == wallet { // View Sync Results Button (shown when sync was completed) @@ -52,11 +52,11 @@ struct WalletDetailView: View { Label("View Last Sync", systemImage: "clock.arrow.circlepath") } } - + // Main Sync Button - Button(action: { + Button(action: { syncWasCompleted = false // Reset on new sync - showSyncProgress = true + showSyncProgress = true }) { Label("Sync", systemImage: "arrow.triangle.2.circlepath") } @@ -109,7 +109,7 @@ struct WalletDetailView: View { onCreateAccount: { showCreateAccount = true } ) .frame(minWidth: 200, idealWidth: 250) - + // Account Detail if let account = selectedAccount { AccountDetailView(account: account) @@ -126,7 +126,7 @@ struct WalletDetailView: View { isConnected: walletService.isConnected && walletService.activeWallet == wallet, isSyncing: walletService.isSyncing ) - + // Sync and View Results Buttons if walletService.isConnected && walletService.activeWallet == wallet { // View Sync Results Button (shown when sync was completed) @@ -135,11 +135,11 @@ struct WalletDetailView: View { Label("View Last Sync", systemImage: "clock.arrow.circlepath") } } - + // Main Sync Button - Button(action: { + Button(action: { syncWasCompleted = false // Reset on new sync - showSyncProgress = true + showSyncProgress = true }) { Label("Sync", systemImage: "arrow.triangle.2.circlepath") } @@ -187,7 +187,7 @@ struct WalletDetailView: View { // Monitor sync completion if let progress = newValue { lastSyncProgress = progress - + // Check if sync just completed if progress.status == .synced && oldValue?.status != .synced { syncWasCompleted = true @@ -204,10 +204,10 @@ struct WalletDetailView: View { } #endif } - + private func connectWallet() { guard let firstAccount = wallet.accounts.first else { return } - + isConnecting = true Task { do { @@ -232,7 +232,7 @@ struct AccountListView: View { let wallet: HDWallet @Binding var selectedAccount: HDAccount? let onCreateAccount: () -> Void - + var body: some View { #if os(iOS) List { @@ -243,7 +243,7 @@ struct AccountListView: View { } } } - + Section { Button(action: onCreateAccount) { Label("Add Account", systemImage: "plus.circle") @@ -259,7 +259,7 @@ struct AccountListView: View { .tag(account) } } - + Section { Button(action: onCreateAccount) { Label("Add Account", systemImage: "plus.circle") @@ -275,17 +275,17 @@ struct AccountListView: View { struct AccountRowView: View { let account: HDAccount - + var body: some View { VStack(alignment: .leading, spacing: 4) { Text(account.displayName) .font(.headline) - + Text(account.derivationPath) .font(.caption) .foregroundColor(.secondary) .fontDesign(.monospaced) - + if let balance = account.balance { Text(balance.formattedTotal) .font(.caption) @@ -305,7 +305,7 @@ struct EmptyAccountView: View { Image(systemName: "person.crop.circle.dashed") .font(.system(size: 80)) .foregroundColor(.secondary) - + Text("No Account Selected") .font(.title2) .foregroundColor(.secondary) @@ -319,17 +319,17 @@ struct EmptyAccountView: View { struct ConnectionStatusView: View { let isConnected: Bool let isSyncing: Bool - + var body: some View { HStack(spacing: 8) { Circle() .fill(statusColor) .frame(width: 8, height: 8) - + Text(statusText) .font(.caption) .foregroundColor(.secondary) - + if isSyncing { ProgressView() .scaleEffect(0.7) @@ -340,7 +340,7 @@ struct ConnectionStatusView: View { .background(Color.secondary.opacity(0.1)) .cornerRadius(6) } - + private var statusColor: Color { if isSyncing { return .orange @@ -350,7 +350,7 @@ struct ConnectionStatusView: View { return .red } } - + private var statusText: String { if isSyncing { return "Syncing" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift index fad2517b6..252c86766 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift @@ -3,7 +3,7 @@ import SwiftDashCoreSDK struct WatchStatusView: View { let status: WatchVerificationStatus - + var body: some View { HStack { switch status { @@ -42,7 +42,7 @@ struct WatchStatusView: View { struct WatchErrorsView: View { let errors: [WatchAddressError] let pendingCount: Int - + var body: some View { if !errors.isEmpty || pendingCount > 0 { VStack(alignment: .leading, spacing: 8) { @@ -54,7 +54,7 @@ struct WatchErrorsView: View { .font(.caption) } } - + ForEach(Array(errors.prefix(3).enumerated()), id: \.offset) { _, error in HStack { Image(systemName: "exclamationmark.circle.fill") @@ -65,7 +65,7 @@ struct WatchErrorsView: View { .lineLimit(2) } } - + if errors.count > 3 { Text("And \(errors.count - 3) more errors...") .font(.caption) @@ -86,7 +86,7 @@ struct WatchErrorsView: View { WatchStatusView(status: .verified(total: 20, watching: 20)) WatchStatusView(status: .verified(total: 20, watching: 15)) WatchStatusView(status: .failed(error: "Network error")) - + WatchErrorsView( errors: [ WatchAddressError.networkError("Connection timeout"), diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh index 0dde8ed1f..f14771aef 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh @@ -13,10 +13,10 @@ if [ -d "$SIMULATOR_DIR" ]; then find "$SIMULATOR_DIR" -name "*.store" -type f -exec rm -f {} \; 2>/dev/null find "$SIMULATOR_DIR" -name "*.store-shm" -type f -exec rm -f {} \; 2>/dev/null find "$SIMULATOR_DIR" -name "*.store-wal" -type f -exec rm -f {} \; 2>/dev/null - + # Remove SwiftData directories find "$SIMULATOR_DIR" -name "SwiftData" -type d -exec rm -rf {} \; 2>/dev/null - + echo "✅ Cleanup completed!" echo "" echo "Please rebuild and run your app in the simulator." diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh index 440dfa25e..18c699444 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh @@ -18,7 +18,7 @@ mkdir -p "$PROJECT_ROOT/target/ios-simulator-universal/release" if [ -f "$SCRIPT_DIR/libdash_spv_ffi.a" ]; then echo "Creating symlink in target/release..." ln -sf "$SCRIPT_DIR/libdash_spv_ffi.a" "$PROJECT_ROOT/target/release/libdash_spv_ffi.a" - + echo "Creating symlink in ios-simulator-universal..." ln -sf "$SCRIPT_DIR/libdash_spv_ffi.a" "$PROJECT_ROOT/target/ios-simulator-universal/release/libdash_spv_ffi.a" fi @@ -27,7 +27,7 @@ fi if [ -f "$SCRIPT_DIR/libdash_spv_ffi_sim.a" ]; then echo "Creating symlink in aarch64-apple-ios-sim..." ln -sf "$SCRIPT_DIR/libdash_spv_ffi_sim.a" "$PROJECT_ROOT/target/aarch64-apple-ios-sim/release/libdash_spv_ffi.a" - + echo "Creating symlink in x86_64-apple-ios..." ln -sf "$SCRIPT_DIR/libdash_spv_ffi_sim.a" "$PROJECT_ROOT/target/x86_64-apple-ios/release/libdash_spv_ffi.a" fi diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift index 9e4b5e7a5..f647e9fd1 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift @@ -7,14 +7,14 @@ import Foundation // Try to load the library dynamically if let handle = dlopen("libdash_spv_ffi.a", RTLD_NOW) { print("✅ Successfully loaded libdash_spv_ffi.a") - + // Try to find a symbol if let symbol = dlsym(handle, "dash_spv_ffi_client_new") { print("✅ Found symbol: dash_spv_ffi_client_new") } else { print("❌ Could not find symbol: dash_spv_ffi_client_new") } - + dlclose(handle) } else { print("❌ Could not load libdash_spv_ffi.a") @@ -30,7 +30,7 @@ let libraryPath = "\(currentPath)/libdash_spv_ffi.a" if fileManager.fileExists(atPath: libraryPath) { print("✅ Library file exists at: \(libraryPath)") - + // Get file attributes if let attrs = try? fileManager.attributesOfItem(atPath: libraryPath) { if let size = attrs[.size] as? Int { diff --git a/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift b/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift index 3035c47c9..b9c45582c 100644 --- a/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift +++ b/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift @@ -5,29 +5,29 @@ struct ContentView: View { @StateObject private var viewModel = WalletViewModel() @State private var showAddAddress = false @State private var showSendTransaction = false - + var body: some View { NavigationView { List { // Connection Status ConnectionSection(viewModel: viewModel) - + // Balance Section if viewModel.isConnected { BalanceSection(balance: viewModel.totalBalance) - + // Sync Progress if let progress = viewModel.syncProgress { SyncProgressSection(progress: progress) } - + // Watched Addresses WatchedAddressesSection( addresses: Array(viewModel.watchedAddresses), onAdd: { showAddAddress = true }, onRemove: viewModel.unwatchAddress ) - + // Recent Transactions TransactionsSection(transactions: viewModel.recentTransactions) } @@ -39,19 +39,19 @@ struct ContentView: View { Button("Add Address") { showAddAddress = true } - + Button("Send Transaction") { showSendTransaction = true } - + Button("Refresh") { Task { await viewModel.refreshData() } } - + Divider() - + Button("Export Wallet Data") { Task { await viewModel.exportWallet() @@ -82,7 +82,7 @@ struct ContentView: View { struct ConnectionSection: View { @ObservedObject var viewModel: WalletViewModel - + var body: some View { Section("Connection") { HStack { @@ -96,7 +96,7 @@ struct ConnectionSection: View { .foregroundColor(.red) } } - + if viewModel.isConnected { if let stats = viewModel.stats { HStack { @@ -104,7 +104,7 @@ struct ConnectionSection: View { Spacer() Text("\(stats.connectedPeers)") } - + HStack { Text("Block Height") Spacer() @@ -126,7 +126,7 @@ struct ConnectionSection: View { struct BalanceSection: View { let balance: Balance - + var body: some View { Section("Balance") { VStack(alignment: .leading, spacing: 8) { @@ -138,7 +138,7 @@ struct BalanceSection: View { .font(.headline) .monospacedDigit() } - + HStack { Text("Available") .foregroundColor(.secondary) @@ -147,7 +147,7 @@ struct BalanceSection: View { .foregroundColor(.secondary) .monospacedDigit() } - + if balance.pending > 0 { HStack { Text("Pending") @@ -158,7 +158,7 @@ struct BalanceSection: View { .monospacedDigit() } } - + if balance.instantLocked > 0 { HStack { Text("InstantSend") @@ -173,7 +173,7 @@ struct BalanceSection: View { .padding(.vertical, 4) } } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) @@ -184,7 +184,7 @@ struct BalanceSection: View { struct SyncProgressSection: View { let progress: SyncProgress - + var body: some View { Section("Sync Progress") { VStack(alignment: .leading, spacing: 8) { @@ -193,16 +193,16 @@ struct SyncProgressSection: View { Spacer() Text("\(progress.percentageComplete)%") } - + ProgressView(value: progress.progress) - + HStack { Text("Block \(progress.currentHeight) of \(progress.totalHeight)") .font(.caption) .foregroundColor(.secondary) - + Spacer() - + if let eta = progress.formattedTimeRemaining { Text("ETA: \(eta)") .font(.caption) @@ -221,7 +221,7 @@ struct WatchedAddressesSection: View { let addresses: [String] let onAdd: () -> Void let onRemove: (String) async -> Void - + var body: some View { Section("Watched Addresses") { if addresses.isEmpty { @@ -247,13 +247,13 @@ struct WatchedAddressesSection: View { } } } - + Button(action: onAdd) { Label("Add Address", systemImage: "plus.circle") } } } - + private func shortenAddress(_ address: String) -> String { guard address.count > 12 else { return address } let prefix = address.prefix(8) @@ -266,7 +266,7 @@ struct WatchedAddressesSection: View { struct TransactionsSection: View { let transactions: [Transaction] - + var body: some View { Section("Recent Transactions") { if transactions.isEmpty { @@ -283,38 +283,38 @@ struct TransactionsSection: View { struct TransactionRow: View { let transaction: Transaction - + var body: some View { HStack { VStack(alignment: .leading, spacing: 4) { Text(shortenTxid(transaction.txid)) .font(.system(.caption, design: .monospaced)) - + Text(transaction.timestamp, style: .relative) .font(.caption) .foregroundColor(.secondary) } - + Spacer() - + VStack(alignment: .trailing, spacing: 4) { Text(formatAmount(transaction.amount)) .font(.system(.body, design: .monospaced)) .foregroundColor(transaction.amount >= 0 ? .green : .red) - + StatusBadge(status: transaction.status) } } .padding(.vertical, 2) } - + private func shortenTxid(_ txid: String) -> String { guard txid.count > 12 else { return txid } let prefix = txid.prefix(6) let suffix = txid.suffix(4) return "\(prefix)...\(suffix)" } - + private func formatAmount(_ satoshis: Int64) -> String { let dash = Double(abs(satoshis)) / 100_000_000.0 let sign = satoshis >= 0 ? "+" : "-" @@ -324,7 +324,7 @@ struct TransactionRow: View { struct StatusBadge: View { let status: TransactionStatus - + var body: some View { Text(status.description) .font(.caption2) @@ -334,7 +334,7 @@ struct StatusBadge: View { .foregroundColor(.white) .cornerRadius(4) } - + private var backgroundColor: Color { switch status { case .pending: @@ -354,10 +354,10 @@ struct StatusBadge: View { struct AddAddressView: View { @ObservedObject var viewModel: WalletViewModel @Environment(\.dismiss) var dismiss - + @State private var address = "" @State private var label = "" - + var body: some View { NavigationView { Form { @@ -365,10 +365,10 @@ struct AddAddressView: View { TextField("Dash Address", text: $address) .autocapitalization(.none) .disableAutocorrection(true) - + TextField("Label (Optional)", text: $label) } - + Section { Button("Add Address") { Task { @@ -397,11 +397,11 @@ struct AddAddressView: View { struct SendTransactionView: View { @ObservedObject var viewModel: WalletViewModel @Environment(\.dismiss) var dismiss - + @State private var recipientAddress = "" @State private var amount = "" @State private var estimatedFee: UInt64 = 0 - + var body: some View { NavigationView { Form { @@ -409,14 +409,14 @@ struct SendTransactionView: View { TextField("Recipient Address", text: $recipientAddress) .autocapitalization(.none) .disableAutocorrection(true) - + TextField("Amount (DASH)", text: $amount) .keyboardType(.decimalPad) .onChange(of: amount) { _ in updateEstimatedFee() } } - + Section("Fee") { HStack { Text("Estimated Fee") @@ -424,7 +424,7 @@ struct SendTransactionView: View { Text(formatDash(estimatedFee)) } } - + Section { Button("Send Transaction") { Task { @@ -445,11 +445,11 @@ struct SendTransactionView: View { } } } - + private func updateEstimatedFee() { guard let dashAmount = Double(amount) else { return } let satoshis = UInt64(dashAmount * 100_000_000) - + Task { estimatedFee = await viewModel.estimateFee( to: recipientAddress, @@ -457,19 +457,19 @@ struct SendTransactionView: View { ) } } - + private func sendTransaction() async { guard let dashAmount = Double(amount) else { return } let satoshis = UInt64(dashAmount * 100_000_000) - + await viewModel.sendTransaction( to: recipientAddress, amount: satoshis ) - + dismiss() } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) diff --git a/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift b/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift index f555ebed0..ed3dae406 100644 --- a/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift +++ b/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift @@ -12,27 +12,27 @@ class WalletViewModel: ObservableObject { @Published var recentTransactions: [Transaction] = [] @Published var showError = false @Published var errorMessage = "" - + private var sdk: DashSDK? private var cancellables = Set() private var syncTask: Task? - + init() { setupSDK() } - + deinit { syncTask?.cancel() } - + // MARK: - Setup - + private func setupSDK() { do { // Use testnet for example let config = SPVClientConfiguration.testnet() sdk = try DashSDK(configuration: config) - + // Setup event handling sdk?.eventPublisher .receive(on: DispatchQueue.main) @@ -40,214 +40,214 @@ class WalletViewModel: ObservableObject { self?.handleEvent(event) } .store(in: &cancellables) - + } catch { showError(error) } } - + // MARK: - Connection - + func connect() async { do { guard let sdk = sdk else { return } - + try await sdk.connect() isConnected = true - + // Start monitoring startMonitoring() - + // Load initial data await refreshData() - + } catch { showError(error) } } - + func disconnect() async { do { guard let sdk = sdk else { return } - + stopMonitoring() try await sdk.disconnect() isConnected = false - + // Clear data syncProgress = nil stats = nil - + } catch { showError(error) } } - + // MARK: - Wallet Operations - + func watchAddress(_ address: String, label: String?) async { do { guard let sdk = sdk else { return } - + try await sdk.watchAddress(address, label: label) watchedAddresses.insert(address) - + // Refresh balance await updateBalance() - + } catch { showError(error) } } - + func unwatchAddress(_ address: String) async { do { guard let sdk = sdk else { return } - + try await sdk.unwatchAddress(address) watchedAddresses.remove(address) - + // Refresh balance await updateBalance() - + } catch { showError(error) } } - + func sendTransaction(to address: String, amount: UInt64) async { do { guard let sdk = sdk else { return } - + let txid = try await sdk.sendTransaction( to: address, amount: amount ) - + // Show success errorMessage = "Transaction sent! TXID: \(txid)" showError = true - + // Refresh data await refreshData() - + } catch { showError(error) } } - + func estimateFee(to address: String, amount: UInt64) async -> UInt64 { do { guard let sdk = sdk else { return 0 } - + return try await sdk.estimateFee( to: address, amount: amount ) - + } catch { return 0 } } - + // MARK: - Data Management - + func refreshData() async { await updateBalance() await updateTransactions() await updateStats() } - + private func updateBalance() async { do { guard let sdk = sdk else { return } - + totalBalance = try await sdk.getBalance() - + } catch { print("Failed to update balance: \(error)") } } - + private func updateTransactions() async { do { guard let sdk = sdk else { return } - + recentTransactions = try await sdk.getTransactions(limit: 20) - + } catch { print("Failed to update transactions: \(error)") } } - + private func updateStats() async { guard let sdk = sdk else { return } - + stats = sdk.stats syncProgress = sdk.syncProgress } - + func exportWallet() async { do { guard let sdk = sdk else { return } - + let exportData = try sdk.exportWalletData() - + // In a real app, you would save this to a file errorMessage = "Wallet data exported (\(exportData.formattedSize))" showError = true - + } catch { showError(error) } } - + // MARK: - Monitoring - + private func startMonitoring() { syncTask = Task { while !Task.isCancelled { await updateStats() - + try? await Task.sleep(nanoseconds: 1_000_000_000) // 1 second } } } - + private func stopMonitoring() { syncTask?.cancel() syncTask = nil } - + // MARK: - Event Handling - + private func handleEvent(_ event: SPVEvent) { switch event { case .blockReceived(let height, let hash): print("New block: \(height) - \(hash)") - + case .transactionReceived(let txid, let confirmed): print("Transaction: \(txid) - Confirmed: \(confirmed)") Task { await updateTransactions() } - + case .balanceUpdated(let balance): self.totalBalance = balance - + case .syncProgressUpdated(let progress): self.syncProgress = progress - + case .connectionStatusChanged(let connected): self.isConnected = connected - + case .error(let error): showError(error) } } - + // MARK: - Error Handling - + private func showError(_ error: Error) { if let dashError = error as? DashSDKError { errorMessage = dashError.localizedDescription diff --git a/swift-dash-core-sdk/Sources/KeyWalletFFI/include/key_wallet_ffi.h b/swift-dash-core-sdk/Sources/KeyWalletFFI/include/key_wallet_ffi.h index 344fa14f8..e52ab01a9 100644 --- a/swift-dash-core-sdk/Sources/KeyWalletFFI/include/key_wallet_ffi.h +++ b/swift-dash-core-sdk/Sources/KeyWalletFFI/include/key_wallet_ffi.h @@ -1,9 +1,9 @@ /** * Key Wallet FFI - C Header File - * + * * This header provides C-compatible function declarations for the key-wallet * Rust library FFI bindings. - * + * * AUTO-GENERATED FILE - DO NOT EDIT * Generated using cbindgen */ diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift index d2ab4c76c..f387fa86e 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift @@ -4,47 +4,47 @@ actor AsyncBridge { private var progressContinuations: [UUID: AsyncThrowingStream.Continuation] = [:] private var completionContinuations: [UUID: CheckedContinuation] = [:] private var dataContinuations: [UUID: CheckedContinuation] = [:] - + // MARK: - Progress Stream - + func syncProgressStream( operation: @escaping (UUID, @escaping (Double, String?) -> Void, @escaping (Bool, String?) -> Void) -> T ) -> (T, AsyncThrowingStream) { let id = UUID() - + let stream = AsyncThrowingStream { continuation in self.addProgressContinuation(id: id, continuation: continuation) } - + let progressCallback: (Double, String?) -> Void = { [weak self] progress, message in Task { [weak self] in await self?.handleProgress(id: id, progress: progress, message: message) } } - + let completionCallback: (Bool, String?) -> Void = { [weak self] success, error in Task { [weak self] in await self?.handleProgressCompletion(id: id, success: success, error: error) } } - + let result = operation(id, progressCallback, completionCallback) - + return (result, stream) } - + // MARK: - Simple Async Operations - + func withAsyncCallback( operation: @escaping (@escaping (Bool, String?) -> Void) -> Void ) async throws { let id = UUID() - + try await withCheckedThrowingContinuation { continuation in Task { await self.addCompletionContinuation(id: id, continuation: continuation) } - + operation { [weak self] success, error in Task { [weak self] in await self?.handleCompletion(id: id, success: success, error: error) @@ -52,17 +52,17 @@ actor AsyncBridge { } } } - + func withDataCallback( operation: @escaping (@escaping (Data?, String?) -> Void) -> Void ) async throws -> Data { let id = UUID() - + return try await withCheckedThrowingContinuation { continuation in Task { await self.addDataContinuation(id: id, continuation: continuation) } - + operation { [weak self] data, error in Task { [weak self] in await self?.handleData(id: id, data: data, error: error) @@ -70,26 +70,26 @@ actor AsyncBridge { } } } - + // MARK: - Private Continuation Management - + private func addProgressContinuation(id: UUID, continuation: AsyncThrowingStream.Continuation) { progressContinuations[id] = continuation } - + private func addCompletionContinuation(id: UUID, continuation: CheckedContinuation) { completionContinuations[id] = continuation } - + private func addDataContinuation(id: UUID, continuation: CheckedContinuation) { dataContinuations[id] = continuation } - + // MARK: - Private Handlers - + private func handleProgress(id: UUID, progress: Double, message: String?) { guard let continuation = progressContinuations[id] else { return } - + let syncProgress = SyncProgress( currentHeight: 0, totalHeight: 0, @@ -97,13 +97,13 @@ actor AsyncBridge { status: .scanning, message: message ) - + continuation.yield(syncProgress) } - + private func handleProgressCompletion(id: UUID, success: Bool, error: String?) { guard let continuation = progressContinuations.removeValue(forKey: id) else { return } - + if success { continuation.finish() } else { @@ -111,10 +111,10 @@ actor AsyncBridge { continuation.finish(throwing: err) } } - + private func handleCompletion(id: UUID, success: Bool, error: String?) { guard let continuation = completionContinuations.removeValue(forKey: id) else { return } - + if success { continuation.resume() } else { @@ -122,10 +122,10 @@ actor AsyncBridge { continuation.resume(throwing: err) } } - + private func handleData(id: UUID, data: Data?, error: String?) { guard let continuation = dataContinuations.removeValue(forKey: id) else { return } - + if let data = data { continuation.resume(returning: data) } else { @@ -133,20 +133,20 @@ actor AsyncBridge { continuation.resume(throwing: err) } } - + // MARK: - Cleanup - + func cancelAll() { for (_, continuation) in progressContinuations { continuation.finish(throwing: CancellationError()) } progressContinuations.removeAll() - + for (_, continuation) in completionContinuations { continuation.resume(throwing: CancellationError()) } completionContinuations.removeAll() - + for (_, continuation) in dataContinuations { continuation.resume(throwing: CancellationError()) } diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift index 22bb0e1a5..3b32da055 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift @@ -18,7 +18,7 @@ public enum DashSDKError: LocalizedError { case invalidArgument(String) case unknownError(String) case notImplemented(String) - + public var errorDescription: String? { switch self { case .invalidConfiguration(let message): @@ -59,7 +59,7 @@ public enum DashSDKError: LocalizedError { return "Not implemented: \(message)" } } - + public var recoverySuggestion: String? { switch self { case .invalidConfiguration: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFIBridge.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFIBridge.swift index 722060862..d7cf9e2f7 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFIBridge.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFIBridge.swift @@ -4,78 +4,78 @@ import DashSPVFFI // FFI types are imported directly from the C header internal enum FFIBridge { - + // MARK: - String Conversions - + static func toString(_ ffiString: FFIString?) -> String? { guard let ffiString = ffiString, let ptr = ffiString.ptr else { return nil } - + return String(cString: ptr) } - + static func fromString(_ string: String) -> UnsafePointer { return (string as NSString).utf8String! } - + // MARK: - Array Conversions - + static func toArray(_ ffiArray: FFIArray?) -> [T]? { guard let ffiArray = ffiArray, let data = ffiArray.data else { return nil } - + let count = Int(ffiArray.len) let buffer = data.bindMemory(to: T.self, capacity: count) let array = Array(UnsafeBufferPointer(start: buffer, count: count)) - + // Note: Caller is responsible for calling dash_spv_ffi_array_destroy return array } - + static func toDataArray(_ ffiArray: FFIArray?) -> [Data]? { guard let ffiArray = ffiArray, let data = ffiArray.data else { return nil } - + let count = Int(ffiArray.len) var result: [Data] = [] - + for i in 0.. String? { guard let errorPtr = dash_spv_ffi_get_last_error() else { return nil } - + let error = String(cString: errorPtr) dash_spv_ffi_clear_error() return error } - + // MARK: - Callback Helpers - + // C callbacks that extract the Swift callback from userData static let progressCallbackWrapper: @convention(c) (Double, UnsafePointer?, UnsafeMutableRawPointer?) -> Void = { progress, message, userData in guard let userData = userData else { return } @@ -83,20 +83,20 @@ internal enum FFIBridge { let msg = message.map { String(cString: $0) } callback(progress, msg) } - + static let completionCallbackWrapper: @convention(c) (Bool, UnsafePointer?, UnsafeMutableRawPointer?) -> Void = { success, error, userData in guard let userData = userData else { return } let callback = Unmanaged.fromOpaque(userData).takeUnretainedValue() as! (Bool, String?) -> Void let err = error.map { String(cString: $0) } callback(success, err) } - + static let blockCallbackWrapper: @convention(c) (UInt32, UnsafePointer?, UnsafeMutableRawPointer?) -> Void = { height, hash, userData in guard let userData = userData, let hash = hash else { return } let callback = Unmanaged.fromOpaque(userData).takeUnretainedValue() as! (UInt32, String) -> Void callback(height, String(cString: hash)) } - + static let transactionCallbackWrapper: @convention(c) (UnsafePointer?, Bool, Int64, UnsafePointer?, UInt32, UnsafeMutableRawPointer?) -> Void = { txid, confirmed, amount, addresses, blockHeight, userData in guard let userData = userData, let txid = txid else { return } let callback = Unmanaged.fromOpaque(userData).takeUnretainedValue() as! (String, Bool, Int64, [String], UInt32) -> Void @@ -110,28 +110,28 @@ internal enum FFIBridge { }() callback(txidString, confirmed, amount, addressArray, blockHeight) } - + static let balanceCallbackWrapper: @convention(c) (UInt64, UInt64, UnsafeMutableRawPointer?) -> Void = { confirmed, unconfirmed, userData in guard let userData = userData else { return } let callback = Unmanaged.fromOpaque(userData).takeUnretainedValue() as! (UInt64, UInt64) -> Void callback(confirmed, unconfirmed) } - + // Helper to create userData from callback static func createUserData(from object: T) -> UnsafeMutableRawPointer { return Unmanaged.passRetained(object).toOpaque() } - + static func releaseUserData(_ userData: UnsafeMutableRawPointer) { Unmanaged.fromOpaque(userData).release() } - + // MARK: - Memory Management - + static func withCString(_ string: String, _ body: (UnsafePointer) throws -> T) rethrows -> T { return try string.withCString(body) } - + static func withOptionalCString(_ string: String?, _ body: (UnsafePointer?) throws -> T) rethrows -> T { if let string = string { return try string.withCString { cString in @@ -141,7 +141,7 @@ internal enum FFIBridge { return try body(nil) } } - + static func withData(_ data: Data, _ body: (UnsafePointer, size_t) throws -> T) rethrows -> T { return try data.withUnsafeBytes { bytes in let ptr = bytes.bindMemory(to: UInt8.self).baseAddress! diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift index 7a59811dd..f985c42c4 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift @@ -19,7 +19,7 @@ enum FFIError: Error { case configError case runtimeError case unknown - + init(code: FFIErrorCode) { switch code { case 0: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift index 6e4cd2126..d03acd122 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift @@ -7,11 +7,11 @@ extension SPVClient { // For now, we'll track watched addresses locally since the FFI doesn't expose a way to query them private static var watchedAddresses = Set() private static let watchedAddressesLock = NSLock() - + /// Override addWatchItem to track addresses locally public func addWatchItemWithTracking(type: WatchItemType, data: String) async throws { try await addWatchItem(type: type, data: data) - + // Track addresses locally if type == .address { Self.watchedAddressesLock.lock() @@ -19,11 +19,11 @@ extension SPVClient { Self.watchedAddressesLock.unlock() } } - + /// Override removeWatchItem to update local tracking public func removeWatchItemWithTracking(type: WatchItemType, data: String) async throws { try await removeWatchItem(type: type, data: data) - + // Update local tracking if type == .address { Self.watchedAddressesLock.lock() @@ -31,33 +31,33 @@ extension SPVClient { Self.watchedAddressesLock.unlock() } } - + /// Verifies that an address is being watched (using local tracking) public func isWatchingAddress(_ address: String) async throws -> Bool { Self.watchedAddressesLock.lock() defer { Self.watchedAddressesLock.unlock() } return Self.watchedAddresses.contains(address) } - + /// Verifies all addresses in a list are being watched public func verifyWatchedAddresses(_ addresses: [String]) async throws -> [String: Bool] { Self.watchedAddressesLock.lock() defer { Self.watchedAddressesLock.unlock() } - + var results: [String: Bool] = [:] for address in addresses { results[address] = Self.watchedAddresses.contains(address) } return results } - + /// Gets all watched addresses public func getWatchedAddresses() async throws -> Set { Self.watchedAddressesLock.lock() defer { Self.watchedAddressesLock.unlock() } return Self.watchedAddresses } - + /// Clears the local watch tracking (does not affect actual watch items in SPV) public func clearLocalWatchTracking() { Self.watchedAddressesLock.lock() diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient.swift index 110e6b55f..f6840c685 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient.swift @@ -17,46 +17,46 @@ public struct DetailedSyncProgress: Sendable, Equatable { public let stageMessage: String public let totalHeadersProcessed: UInt64 public let syncStartTimestamp: Date - + public var currentHeight: UInt32 { overview.currentHeight } public var connectedPeers: UInt32 { overview.peerCount } - + /// Calculated properties public var blocksRemaining: UInt32 { guard totalHeight > currentHeight else { return 0 } return totalHeight - currentHeight } - + public var isComplete: Bool { return percentage >= 100.0 || stage == .complete } - + public var formattedPercentage: String { return String(format: "%.1f%%", percentage) } - + public var formattedSpeed: String { if headersPerSecond > 0 { return String(format: "%.0f headers/sec", headersPerSecond) } return "Calculating..." } - + public var formattedTimeRemaining: String { guard estimatedSecondsRemaining > 0 else { return stage == .complete ? "Complete" : "Calculating..." } - + let formatter = DateComponentsFormatter() formatter.allowedUnits = [.hour, .minute, .second] formatter.unitsStyle = .abbreviated return formatter.string(from: TimeInterval(estimatedSecondsRemaining)) ?? "Unknown" } - + public var syncDuration: TimeInterval { return Date().timeIntervalSince(syncStartTimestamp) } - + public var formattedSyncDuration: String { let formatter = DateComponentsFormatter() formatter.allowedUnits = [.hour, .minute, .second] @@ -64,7 +64,7 @@ public struct DetailedSyncProgress: Sendable, Equatable { formatter.zeroFormattingBehavior = .pad return formatter.string(from: syncDuration) ?? "00:00:00" } - + /// Public initializer for creating DetailedSyncProgress public init( overview: SyncProgress, @@ -87,7 +87,7 @@ public struct DetailedSyncProgress: Sendable, Equatable { self.totalHeadersProcessed = totalHeadersProcessed self.syncStartTimestamp = syncStartTimestamp } - + /// Initialize from FFI type internal init(ffiProgress: FFIDetailedSyncProgress) { self.overview = SyncProgress(ffiProgress: ffiProgress.overview) @@ -142,7 +142,7 @@ public enum SyncStage: Equatable, Sendable { self = .failed } } - + public var description: String { switch self { case .connecting: @@ -167,7 +167,7 @@ public enum SyncStage: Equatable, Sendable { return "Synchronization failed" } } - + public var isActive: Bool { switch self { case .complete, .failed: @@ -176,7 +176,7 @@ public enum SyncStage: Equatable, Sendable { return true } } - + public var icon: String { switch self { case .connecting: @@ -206,11 +206,11 @@ public enum SyncStage: Equatable, Sendable { /// Sync progress stream for async iteration public struct SyncProgressStream: AsyncSequence { public typealias Element = DetailedSyncProgress - + private let client: SPVClient private let progressCallback: (@Sendable (DetailedSyncProgress) -> Void)? private let completionCallback: (@Sendable (Bool, String?) -> Void)? - + internal init( client: SPVClient, progressCallback: (@Sendable (DetailedSyncProgress) -> Void)? = nil, @@ -220,7 +220,7 @@ public struct SyncProgressStream: AsyncSequence { self.progressCallback = progressCallback self.completionCallback = completionCallback } - + public func makeAsyncIterator() -> AsyncIterator { return AsyncIterator( client: client, @@ -228,7 +228,7 @@ public struct SyncProgressStream: AsyncSequence { completionCallback: completionCallback ) } - + public final class AsyncIterator: AsyncIteratorProtocol, @unchecked Sendable { private let client: SPVClient private let progressCallback: (@Sendable (DetailedSyncProgress) -> Void)? @@ -237,7 +237,7 @@ public struct SyncProgressStream: AsyncSequence { private let progressContinuation: AsyncStream.Continuation private var progressStream: AsyncStream private var progressIterator: AsyncStream.AsyncIterator - + init( client: SPVClient, progressCallback: (@Sendable (DetailedSyncProgress) -> Void)?, @@ -246,20 +246,20 @@ public struct SyncProgressStream: AsyncSequence { self.client = client self.progressCallback = progressCallback self.completionCallback = completionCallback - + var continuation: AsyncStream.Continuation! self.progressStream = AsyncStream { cont in continuation = cont } self.progressContinuation = continuation self.progressIterator = progressStream.makeAsyncIterator() - + // Start sync operation Task { await self.startSync() } } - + private func startSync() async { // Start sync with progress tracking using client callbacks do { @@ -267,14 +267,14 @@ public struct SyncProgressStream: AsyncSequence { progressCallback: { progress in // Send to stream self.progressContinuation.yield(progress) - + // Call user callback if provided self.progressCallback?(progress) }, completionCallback: { success, error in // Call user callback if provided self.completionCallback?(success, error) - + // Complete the stream self.progressContinuation.finish() } @@ -285,10 +285,10 @@ public struct SyncProgressStream: AsyncSequence { progressContinuation.finish() } } - + public func next() async -> DetailedSyncProgress? { guard !isComplete else { return nil } - + if let progress = await progressIterator.next() { return progress } else { @@ -306,7 +306,7 @@ extension DetailedSyncProgress { public var hasError: Bool { return stage == .failed } - + /// Get a user-friendly status message public var statusMessage: String { if isComplete { @@ -317,7 +317,7 @@ extension DetailedSyncProgress { return "\(stage.icon) \(stageMessage) - \(formattedPercentage)" } } - + /// Get detailed statistics as a dictionary public var statistics: [String: String] { return [ @@ -342,7 +342,7 @@ extension DetailedSyncProgress { private class CallbackHolder { let progressCallback: ((Double, String?) -> Void)? let completionCallback: ((Bool, String?) -> Void)? - + init(progressCallback: ((Double, String?) -> Void)? = nil, completionCallback: ((Bool, String?) -> Void)? = nil) { self.progressCallback = progressCallback @@ -354,7 +354,7 @@ private class CallbackHolder { private class DetailedCallbackHolder { let progressCallback: (@Sendable (Any) -> Void)? let completionCallback: (@Sendable (Bool, String?) -> Void)? - + init(progressCallback: (@Sendable (Any) -> Void)? = nil, completionCallback: (@Sendable (Bool, String?) -> Void)? = nil) { self.progressCallback = progressCallback @@ -365,7 +365,7 @@ private class DetailedCallbackHolder { // Event callback holder for persistent event callbacks private class EventCallbackHolder { weak var client: SPVClient? - + init(client: SPVClient) { self.client = client } @@ -392,11 +392,11 @@ private let syncCompletionCallback: @convention(c) (Bool, UnsafePointer?, private let detailedSyncProgressCallback: @convention(c) (UnsafePointer?, UnsafeMutableRawPointer?) -> Void = { ffiProgress, userData in print("🟢 detailedSyncProgressCallback called from FFI") guard let userData = userData, - let ffiProgress = ffiProgress else { + let ffiProgress = ffiProgress else { print("🟢 userData or ffiProgress is nil") - return + return } - + print("🟢 Getting holder from userData") let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() print("🟢 Calling holder.progressCallback") @@ -417,16 +417,16 @@ private let detailedSyncCompletionCallback: @convention(c) (Bool, UnsafePointer< private let eventBlockCallback: BlockCallback = { height, hashBytes, userData in guard let userData = userData, let hashBytes = hashBytes else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + // Convert byte array to hex string let hashArray = withUnsafeBytes(of: hashBytes.pointee) { bytes in Array(bytes) } let hashHex = hashArray.map { String(format: "%02x", $0) }.joined() - + let event = SPVEvent.blockReceived( height: height, hash: hashHex @@ -437,16 +437,16 @@ private let eventBlockCallback: BlockCallback = { height, hashBytes, userData in private let eventTransactionCallback: TransactionCallback = { txidBytes, confirmed, amount, addresses, blockHeight, userData in guard let userData = userData, let txidBytes = txidBytes else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + // Convert byte array to hex string let txidArray = withUnsafeBytes(of: txidBytes.pointee) { bytes in Array(bytes) } let txidString = txidArray.map { String(format: "%02x", $0) }.joined() - + let addressArray: [String] = { if let addresses = addresses { let addressesString = String(cString: addresses) @@ -454,7 +454,7 @@ private let eventTransactionCallback: TransactionCallback = { txidBytes, confirm } return [] }() - + let event = SPVEvent.transactionReceived( txid: txidString, confirmed: confirmed, @@ -467,10 +467,10 @@ private let eventTransactionCallback: TransactionCallback = { txidBytes, confirm private let eventBalanceCallback: BalanceCallback = { confirmed, unconfirmed, userData in guard let userData = userData else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + let balance = Balance( confirmed: confirmed, pending: unconfirmed, @@ -485,16 +485,16 @@ private let eventBalanceCallback: BalanceCallback = { confirmed, unconfirmed, us private let eventMempoolTransactionAddedCallback: MempoolTransactionCallback = { txidBytes, amount, addresses, isInstantSend, userData in guard let userData = userData, let txidBytes = txidBytes else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + // Convert byte array to hex string let txidArray = withUnsafeBytes(of: txidBytes.pointee) { bytes in Array(bytes) } let txidString = txidArray.map { String(format: "%02x", $0) }.joined() - + let addressArray: [String] = { if let addresses = addresses { let addressesString = String(cString: addresses) @@ -502,7 +502,7 @@ private let eventMempoolTransactionAddedCallback: MempoolTransactionCallback = { } return [] }() - + let event = SPVEvent.mempoolTransactionAdded( txid: txidString, amount: amount, @@ -514,19 +514,19 @@ private let eventMempoolTransactionAddedCallback: MempoolTransactionCallback = { private let eventMempoolTransactionConfirmedCallback: MempoolConfirmedCallback = { txidBytes, blockHeight, blockHashBytes, userData in guard let userData = userData, let txidBytes = txidBytes else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + // Convert byte array to hex string let txidArray = withUnsafeBytes(of: txidBytes.pointee) { bytes in Array(bytes) } let txidString = txidArray.map { String(format: "%02x", $0) }.joined() - + // For now, we're using blockHeight as confirmations (1 confirmation when just confirmed) let confirmations: UInt32 = 1 - + let event = SPVEvent.mempoolTransactionConfirmed( txid: txidString, blockHeight: blockHeight, @@ -538,16 +538,16 @@ private let eventMempoolTransactionConfirmedCallback: MempoolConfirmedCallback = private let eventMempoolTransactionRemovedCallback: MempoolRemovedCallback = { txidBytes, reason, userData in guard let userData = userData, let txidBytes = txidBytes else { return } - + let holder = Unmanaged.fromOpaque(userData).takeUnretainedValue() guard let client = holder.client else { return } - + // Convert byte array to hex string let txidArray = withUnsafeBytes(of: txidBytes.pointee) { bytes in Array(bytes) } let txidString = txidArray.map { String(format: "%02x", $0) }.joined() - + let removalReason: MempoolRemovalReason = { switch reason { case 0: return .expired @@ -558,7 +558,7 @@ private let eventMempoolTransactionRemovedCallback: MempoolRemovedCallback = { t default: return .unknown } }() - + let event = SPVEvent.mempoolTransactionRemoved( txid: txidString, reason: removalReason @@ -574,29 +574,29 @@ public final class SPVClient { private let asyncBridge = AsyncBridge() private var eventCallbacksSet = false private var eventCallbackHolder: EventCallbackHolder? - + public private(set) var isConnected: Bool = false public private(set) var syncProgress: SyncProgress? public private(set) var stats: SPVStats? - + internal let eventSubject = PassthroughSubject() public var eventPublisher: AnyPublisher { eventSubject.eraseToAnyPublisher() } - + public init(configuration: SPVClientConfiguration = .default) { self.configuration = configuration - + print("\n🚧 Initializing SPV Client...") print(" - Network: \(configuration.network.rawValue)") print(" - Log level: \(configuration.logLevel)") - + // Initialize Rust logging with configured level print("🔧 Initializing Rust FFI logging...") let logResult = FFIBridge.withCString(configuration.logLevel) { logLevel in dash_spv_ffi_init_logging(logLevel) } - + if logResult != 0 { print("⚠️ Failed to initialize logging with level '\(configuration.logLevel)', defaulting to 'info'") let _ = dash_spv_ffi_init_logging("info") @@ -604,68 +604,68 @@ public final class SPVClient { print("✅ Rust logging initialized with level: \(configuration.logLevel)") } } - + /// Expose FFI client handle for Platform SDK integration /// This is needed for Platform SDK to access Core chain data for proof verification /// Note: This will be nil until start() has been called public var ffiClientHandle: UnsafeMutablePointer? { return client } - + deinit { Task { [asyncBridge] in await asyncBridge.cancelAll() } - + // Clean up event callback holder if needed if eventCallbackHolder != nil { // The userData was retained, so we need to release it // Note: This is only needed if client is destroyed before callbacks complete } - + if let client = client { dash_spv_ffi_client_destroy(client) } } - + // MARK: - Network Information - + public func isFilterSyncAvailable() async -> Bool { guard let client = client else { return false } return dash_spv_ffi_client_is_filter_sync_available(client) } - - + + // MARK: - Lifecycle - + public func start() async throws { guard !isConnected else { throw DashSDKError.alreadyConnected } - + print("🚀 Starting SPV client...") print("📡 Network: \(configuration.network.rawValue)") print("👥 Configured peers: \(configuration.additionalPeers.count)") for (index, peer) in configuration.additionalPeers.enumerated() { print(" \(index + 1). \(peer)") } - + // Log network reachability status if available logNetworkReachability() - + print("\n📋 Creating FFI configuration...") print(" - Max peers: \(configuration.maxPeers)") print(" - Validation mode: \(configuration.validationMode)") print(" - Filter load enabled: \(configuration.enableFilterLoad)") print(" - User agent: \(configuration.userAgent)") print(" - Log level: \(configuration.logLevel)") - + let ffiConfig = try configuration.createFFIConfig() defer { print("🧹 Cleaning up FFI config") dash_spv_ffi_config_destroy(OpaquePointer(ffiConfig)) } - + print("\n🏗️ Creating SPV client with FFI...") guard let newClient = dash_spv_ffi_client_new(OpaquePointer(ffiConfig)) else { let error = FFIBridge.getLastError() ?? "Unknown error" @@ -673,69 +673,69 @@ public final class SPVClient { throw DashSDKError.invalidConfiguration("Failed to create SPV client: \(error)") } print("✅ SPV client created successfully") - + self.client = newClient - + // Always set up event callbacks before starting the client // This is required by the FFI layer to avoid InvalidArgument error print("🎯 Setting up event callbacks...") setupEventCallbacks() - + print("\n🔌 Starting SPV client (calling dash_spv_ffi_client_start)...") let startTime = Date() let result = dash_spv_ffi_client_start(client) let startDuration = Date().timeIntervalSince(startTime) print("⏱️ FFI start call completed in \(String(format: "%.3f", startDuration)) seconds") - + if result != 0 { let error = FFIBridge.getLastError() ?? "Unknown error" print("❌ Failed to start SPV client: \(error) (code: \(result))") throw DashSDKError.ffiError(code: result, message: error) } - + try FFIBridge.checkError(result) - + isConnected = true print("✅ SPV client started successfully") - + // Monitor peer connections with multiple checks print("\n🔍 Monitoring peer connections...") var totalWaitTime = 0 let maxWaitTime = 30 // 30 seconds max var lastPeerCount: UInt32 = 0 - + while totalWaitTime < maxWaitTime { await updateStats() - + if let stats = self.stats { if stats.connectedPeers != lastPeerCount { print(" [\(totalWaitTime)s] Connected peers: \(stats.connectedPeers) (change: +\(Int(stats.connectedPeers) - Int(lastPeerCount)))") lastPeerCount = stats.connectedPeers } - + if stats.connectedPeers > 0 { print("\n🎉 Successfully connected to \(stats.connectedPeers) peer(s)!") break } } - + // Wait 1 second before next check try await Task.sleep(nanoseconds: 1_000_000_000) totalWaitTime += 1 - + // Log every 5 seconds if still no peers if totalWaitTime % 5 == 0 && (stats?.connectedPeers ?? 0) == 0 { print(" [\(totalWaitTime)s] Still waiting for peer connections...") - + // Try to get more detailed error info if let error = FFIBridge.getLastError() { print(" ⚠️ Last FFI error: \(error)") } } } - + await updateStats() - + if let stats = self.stats { print("\n📊 Final connection stats:") print(" - Connected peers: \(stats.connectedPeers)") @@ -743,7 +743,7 @@ public final class SPVClient { print(" - Filter height: \(stats.filterHeight)") print(" - Total headers: \(stats.totalHeaders)") print(" - Network: \(configuration.network.rawValue)") - + if stats.connectedPeers == 0 { print("\n⚠️ WARNING: No peers connected after \(totalWaitTime) seconds!") print("Possible issues:") @@ -756,109 +756,109 @@ public final class SPVClient { print("\n❌ Failed to retrieve stats after starting") } } - + public func stop() async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = dash_spv_ffi_client_stop(client) try FFIBridge.checkError(result) - + isConnected = false syncProgress = nil stats = nil } - + // MARK: - Sync Operations - + public func syncToTip() async throws -> AsyncThrowingStream { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let (_, stream) = await asyncBridge.syncProgressStream { id, progressCallback, completionCallback in // Create a callback holder that wraps the Swift callbacks let callbackHolder = CallbackHolder( progressCallback: progressCallback, completionCallback: completionCallback ) - + let userData = Unmanaged.passRetained(callbackHolder).toOpaque() - + let result = dash_spv_ffi_client_sync_to_tip( client, syncCompletionCallback, userData ) - + if result != 0 { completionCallback(false, "Failed to start sync") Unmanaged.fromOpaque(userData).release() } } - + return stream } - + public func rescanBlockchain(from height: UInt32) async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = dash_spv_ffi_client_rescan_blockchain(client, height) try FFIBridge.checkError(result) } - + public func getCurrentSyncProgress() -> SyncProgress? { guard isConnected, let client = client else { return nil } - + guard let ffiProgress = dash_spv_ffi_client_get_sync_progress(client) else { return nil } defer { dash_spv_ffi_sync_progress_destroy(ffiProgress) } - + let progress = SyncProgress(ffiProgress: ffiProgress.pointee) self.syncProgress = progress return progress } - + // MARK: - Enhanced Sync Operations with Detailed Progress - - + + /// Cancel ongoing sync operation public func cancelSync() async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = dash_spv_ffi_client_cancel_sync(client) try FFIBridge.checkError(result) } - + // MARK: - Balance Operations - + public func getAddressBalance(_ address: String) async throws -> Balance { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let balancePtr = FFIBridge.withCString(address) { addressCStr in dash_spv_ffi_client_get_address_balance(client, addressCStr) } - + guard let balancePtr = balancePtr else { throw DashSDKError.ffiError(code: -1, message: FFIBridge.getLastError() ?? "Failed to get address balance") } - + defer { dash_spv_ffi_balance_destroy(balancePtr) } - + let ffiBalance = balancePtr.pointee return Balance( confirmed: ffiBalance.confirmed, @@ -867,20 +867,20 @@ public final class SPVClient { total: ffiBalance.total ) } - + public func getTotalBalance() async throws -> Balance { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + guard let balancePtr = dash_spv_ffi_client_get_total_balance(client) else { throw DashSDKError.ffiError(code: -1, message: FFIBridge.getLastError() ?? "Failed to get total balance") } - + defer { dash_spv_ffi_balance_destroy(balancePtr) } - + let ffiBalance = balancePtr.pointee return Balance( confirmed: ffiBalance.confirmed, @@ -889,31 +889,31 @@ public final class SPVClient { total: ffiBalance.total ) } - + // MARK: - Mempool Operations - + public func enableMempoolTracking(strategy: MempoolStrategy) async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = dash_spv_ffi_client_enable_mempool_tracking(client, strategy.ffiValue) try FFIBridge.checkError(result) } - + public func getBalanceWithMempool() async throws -> Balance { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + guard let balancePtr = dash_spv_ffi_client_get_balance_with_mempool(client) else { throw DashSDKError.ffiError(code: -1, message: FFIBridge.getLastError() ?? "Failed to get balance with mempool") } - + defer { dash_spv_ffi_balance_destroy(balancePtr) } - + let ffiBalance = balancePtr.pointee return Balance( confirmed: ffiBalance.confirmed, @@ -922,77 +922,77 @@ public final class SPVClient { total: ffiBalance.total ) } - + public func getMempoolBalance(for address: String) async throws -> MempoolBalance { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let balancePtr = FFIBridge.withCString(address) { addressCStr in dash_spv_ffi_client_get_mempool_balance(client, addressCStr) } - + guard let balancePtr = balancePtr else { throw DashSDKError.ffiError(code: -1, message: FFIBridge.getLastError() ?? "Failed to get mempool balance") } - + defer { dash_spv_ffi_balance_destroy(balancePtr) } - + let ffiBalance = balancePtr.pointee return MempoolBalance( pending: ffiBalance.mempool, pendingInstant: ffiBalance.mempool_instant ) } - + public func getMempoolTransactionCount() async throws -> Int { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let count = dash_spv_ffi_client_get_mempool_transaction_count(client) if count < 0 { throw DashSDKError.ffiError(code: -1, message: FFIBridge.getLastError() ?? "Failed to get mempool transaction count") } - + return Int(count) } - + public func recordSend(txid: String) async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = FFIBridge.withCString(txid) { txidCStr in dash_spv_ffi_client_record_send(client, txidCStr) } - + try FFIBridge.checkError(result) } - + // MARK: - Network Operations - + public func broadcastTransaction(_ transactionHex: String) async throws { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + let result = FFIBridge.withCString(transactionHex) { txHex in dash_spv_ffi_client_broadcast_transaction(client, txHex) } - + try FFIBridge.checkError(result) } - + // MARK: - Stats - + /// Debug method to print detailed connection information public func debugConnectionState() async { print("\n🔍 SPV Client Debug Information:") print("================================") - + print("\n📋 Configuration:") print(" - Network: \(configuration.network.rawValue)") print(" - Max peers: \(configuration.maxPeers)") @@ -1003,15 +1003,15 @@ public final class SPVClient { print(" - Data directory: \(configuration.dataDirectory?.path ?? "None")") print(" - Validation mode: \(configuration.validationMode)") print(" - Filter load enabled: \(configuration.enableFilterLoad)") - + print("\n🔌 Connection State:") print(" - Is connected: \(isConnected)") print(" - Client pointer: \(client != nil ? "Valid" : "Nil")") print(" - Event callbacks set: \(eventCallbacksSet)") - + if isConnected { await updateStats() - + if let stats = self.stats { print("\n📊 Current Stats:") print(" - Connected peers: \(stats.connectedPeers)") @@ -1022,24 +1022,24 @@ public final class SPVClient { } else { print("\n⚠️ Unable to retrieve stats") } - + // Check FFI error state if let error = FFIBridge.getLastError() { print("\n❌ Last FFI Error: \(error)") } } - + // Network reachability check logNetworkReachability() - + print("\n================================") } - + public func updateStats() async { guard isConnected, let client = client else { return } - + guard let ffiStats = dash_spv_ffi_client_get_stats(client) else { let error = FFIBridge.getLastError() if let error = error { @@ -1050,19 +1050,19 @@ public final class SPVClient { defer { dash_spv_ffi_spv_stats_destroy(ffiStats) } - + let previousPeerCount = self.stats?.connectedPeers ?? 0 let ffiStatsValue = ffiStats.pointee - + // Debug log the raw FFI values print("🔍 FFI Stats Debug:") print(" - connected_peers: \(ffiStatsValue.connected_peers)") print(" - total_peers: \(ffiStatsValue.total_peers)") print(" - header_height: \(ffiStatsValue.header_height)") print(" - filter_height: \(ffiStatsValue.filter_height)") - + self.stats = SPVStats(ffiStats: ffiStatsValue) - + // Log significant changes if let stats = self.stats { if stats.connectedPeers != previousPeerCount { @@ -1070,26 +1070,26 @@ public final class SPVClient { } } } - + // MARK: - Private - + private func logNetworkReachability() { let monitor = NWPathMonitor() let queue = DispatchQueue(label: "NetworkMonitor") - + monitor.pathUpdateHandler = { path in print("\n🌐 Network Status:") print(" - Status: \(path.status == .satisfied ? "✅ Connected" : "❌ Disconnected")") - + if path.status == .satisfied { print(" - Is expensive: \(path.isExpensive ? "Yes" : "No")") print(" - Is constrained: \(path.isConstrained ? "Yes" : "No")") - + print(" - Available interfaces:") for interface in path.availableInterfaces { print(" • \(interface.name) (\(interface.type))") } - + if path.usesInterfaceType(.wifi) { print(" - Using: WiFi") } else if path.usesInterfaceType(.cellular) { @@ -1102,30 +1102,30 @@ public final class SPVClient { } else { print(" ⚠️ No network connection available!") } - + // Stop monitoring after first check monitor.cancel() } - + monitor.start(queue: queue) - + // Give it a moment to report Thread.sleep(forTimeInterval: 0.1) } - + private func setupEventCallbacks() { - guard let client = client else { + guard let client = client else { print("❌ Cannot setup event callbacks - client is nil") - return + return } - + print("📢 Setting up event callbacks...") - + // Create event callback holder with weak reference to self let eventHolder = EventCallbackHolder(client: self) self.eventCallbackHolder = eventHolder let userData = Unmanaged.passRetained(eventHolder).toOpaque() - + let callbacks = FFIEventCallbacks( on_block: eventBlockCallback, on_transaction: eventTransactionCallback, @@ -1135,12 +1135,12 @@ public final class SPVClient { on_mempool_transaction_removed: eventMempoolTransactionRemovedCallback, user_data: userData ) - + print(" - Block callback: ✅") print(" - Transaction callback: ✅") print(" - Balance callback: ✅") print(" - Mempool callbacks: ✅") - + let result = dash_spv_ffi_client_set_event_callbacks(client, callbacks) if result != 0 { let error = FFIBridge.getLastError() ?? "Unknown error" @@ -1182,20 +1182,20 @@ extension SPVClient { guard isConnected, let client = client else { throw DashSDKError.notConnected } - + // Check if we have peers before starting sync await updateStats() if let stats = self.stats, stats.connectedPeers == 0 { print("⚠️ Warning: No peers connected. Waiting for peer connections...") print(" Current network: \(configuration.network.rawValue)") print(" Total headers: \(stats.totalHeaders)") - + // Wait up to 10 seconds for peers to connect var waitTime = 0 while waitTime < 10 { try await Task.sleep(nanoseconds: 1_000_000_000) // 1 second waitTime += 1 - + await updateStats() if let updatedStats = self.stats { print(" [\(waitTime)s] Peers: \(updatedStats.connectedPeers), Headers: \(updatedStats.headerHeight)") @@ -1205,7 +1205,7 @@ extension SPVClient { } } } - + // Final check if let finalStats = self.stats, finalStats.connectedPeers == 0 { let error = "No peers connected after 10 seconds. Check network connectivity and peer configuration." @@ -1215,12 +1215,12 @@ extension SPVClient { throw DashSDKError.networkError(error) } } - + print("\n📡 Starting blockchain sync...") print(" - Connected peers: \(stats?.connectedPeers ?? 0)") print(" - Current height: \(stats?.headerHeight ?? 0)") print(" - Filter height: \(stats?.filterHeight ?? 0)") - + // Create a callback holder with type-erased callbacks let wrappedProgressCallback: (@Sendable (Any) -> Void)? = progressCallback.map { callback in { progress in @@ -1233,21 +1233,21 @@ extension SPVClient { } } } - + let callbackHolder = DetailedCallbackHolder( progressCallback: wrappedProgressCallback, completionCallback: completionCallback ) - + let userData = Unmanaged.passRetained(callbackHolder).toOpaque() - + let result = dash_spv_ffi_client_sync_to_tip_with_progress( client, detailedSyncProgressCallback, detailedSyncCompletionCallback, userData ) - + if result != 0 { let error = FFIBridge.getLastError() ?? "Failed to start sync" print("❌ Sync failed: \(error)") @@ -1258,7 +1258,7 @@ extension SPVClient { print("✅ Sync started successfully") } } - + /// Create a sync progress stream with detailed progress information public func syncProgressStream() -> SyncProgressStream { return SyncProgressStream(client: self) diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClientConfiguration.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClientConfiguration.swift index df677d179..90c1acbb6 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClientConfiguration.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClientConfiguration.swift @@ -16,41 +16,41 @@ public final class SPVClientConfiguration { public var logLevel: String = "info" // Options: "error", "warn", "info", "debug", "trace" public var startFromHeight: UInt32? = nil // Start syncing from a specific block height (uses nearest checkpoint) public var walletCreationTime: UInt32? = nil // Wallet creation time as Unix timestamp (for checkpoint selection) - + public init() { setupDefaultDataDirectory() } - + public static var `default`: SPVClientConfiguration { return SPVClientConfiguration() } - + private func setupDefaultDataDirectory() { if let documentsPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first { self.dataDirectory = documentsPath.appendingPathComponent("DashSPV").appendingPathComponent(network.rawValue) print("📁 SPV data directory set to: \(self.dataDirectory?.path ?? "nil")") } } - + public func validate() throws { if let dataDir = dataDirectory { if !FileManager.default.fileExists(atPath: dataDir.path) { try FileManager.default.createDirectory(at: dataDir, withIntermediateDirectories: true) } } - + for peer in additionalPeers { guard peer.contains(":") else { throw DashSDKError.invalidConfiguration("Invalid peer address format: \(peer)") } } } - + internal func createFFIConfig() throws -> FFIClientConfig { try validate() - + print("Creating FFI config for network: \(network.name) (value: \(network.ffiValue))") - + guard let config = dash_spv_ffi_config_new(network.ffiValue) else { // Check for error if let errorMsg = dash_spv_ffi_get_last_error() { @@ -61,14 +61,14 @@ public final class SPVClientConfiguration { } throw DashSDKError.invalidConfiguration("Failed to create FFI config") } - + if let dataDir = dataDirectory { print("📂 Setting SPV data directory for persistence: \(dataDir.path)") let result = FFIBridge.withCString(dataDir.path) { path in dash_spv_ffi_config_set_data_dir(config, path) } try FFIBridge.checkError(result) - + // Check if sync state already exists let syncStateFile = dataDir.appendingPathComponent("sync_state.json") if FileManager.default.fileExists(atPath: syncStateFile.path) { @@ -77,61 +77,61 @@ public final class SPVClientConfiguration { print("📝 No existing sync state found, will start fresh sync") } } - + var result = dash_spv_ffi_config_set_validation_mode(config, validationMode.ffiValue) try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_max_peers(config, maxPeers) try FFIBridge.checkError(result) - + // Configure user agent advertised during P2P handshake result = FFIBridge.withCString(userAgent) { agent in dash_spv_ffi_config_set_user_agent(config, agent) } try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_filter_load(config, enableFilterLoad) try FFIBridge.checkError(result) - + for peer in additionalPeers { result = FFIBridge.withCString(peer) { peerStr in dash_spv_ffi_config_add_peer(config, peerStr) } try FFIBridge.checkError(result) } - + // Configure mempool settings result = dash_spv_ffi_config_set_mempool_tracking(config, mempoolConfig.enabled) try FFIBridge.checkError(result) - + if mempoolConfig.enabled { result = dash_spv_ffi_config_set_mempool_strategy(config, FFIMempoolStrategy(rawValue: mempoolConfig.strategy.rawValue)) try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_max_mempool_transactions(config, mempoolConfig.maxTransactions) try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_mempool_timeout(config, mempoolConfig.timeoutSeconds) try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_fetch_mempool_transactions(config, mempoolConfig.fetchTransactions) try FFIBridge.checkError(result) - + result = dash_spv_ffi_config_set_persist_mempool(config, mempoolConfig.persistMempool) try FFIBridge.checkError(result) } - + // Configure checkpoint sync if specified if let height = startFromHeight { result = dash_spv_ffi_config_set_start_from_height(config, height) try FFIBridge.checkError(result) } - + if let timestamp = walletCreationTime { result = dash_spv_ffi_config_set_wallet_creation_time(config, timestamp) try FFIBridge.checkError(result) } - + return UnsafeMutableRawPointer(config) } } @@ -142,26 +142,26 @@ extension SPVClientConfiguration { config.network = .mainnet return config } - + public static func testnet() -> SPVClientConfiguration { let config = SPVClientConfiguration() config.network = .testnet return config } - + public static func regtest() -> SPVClientConfiguration { let config = SPVClientConfiguration() config.network = .regtest config.validationMode = .none return config } - + public static func devnet() -> SPVClientConfiguration { let config = SPVClientConfiguration() config.network = .devnet return config } - + /// Configure the SPV client to use checkpoint sync for faster initial synchronization. /// For testnet, this will sync from the latest checkpoint at height 1088640 instead of genesis. /// For mainnet, this will sync from the latest checkpoint at height 1100000 instead of genesis. @@ -176,13 +176,13 @@ extension SPVClientConfiguration { break } } - + /// Configure checkpoint sync for a specific wallet creation time. /// The client will automatically select the appropriate checkpoint. public func setWalletCreationTime(_ timestamp: UInt32) { walletCreationTime = timestamp } - + /// Configure checkpoint sync to start from a specific height. /// The client will use the nearest checkpoint at or before this height. public func setStartFromHeight(_ height: UInt32) { diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift index d698aa545..4050a772d 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift @@ -6,66 +6,66 @@ public final class DashSDK { private let client: SPVClient private let wallet: PersistentWalletManager private let storage: StorageManager - + public var isConnected: Bool { client.isConnected } - + public var syncProgress: SyncProgress? { client.syncProgress } - + public var stats: SPVStats? { client.stats } - + public var watchedAddresses: Set { wallet.watchedAddresses } - + public var totalBalance: Balance { wallet.totalBalance } - + public var eventPublisher: AnyPublisher { client.eventPublisher } - + @MainActor public init(configuration: SPVClientConfiguration = .default) throws { self.storage = try StorageManager() self.client = SPVClient(configuration: configuration) self.wallet = PersistentWalletManager(client: client, storage: storage) } - + // MARK: - Connection Management - + public func connect() async throws { try await client.start() - + // Re-sync persisted addresses with SPV client await syncPersistedAddresses() - + wallet.startPeriodicSync() } - + public func disconnect() async throws { wallet.stopPeriodicSync() try await client.stop() } - + // MARK: - Synchronization - + public func syncToTip() async throws -> AsyncThrowingStream { return try await client.syncToTip() } - + public func rescanBlockchain(from height: UInt32 = 0) async throws { try await client.rescanBlockchain(from: height) } - + // MARK: - Enhanced Sync Operations - + public func syncToTipWithProgress( progressCallback: (@Sendable (DetailedSyncProgress) -> Void)? = nil, completionCallback: (@Sendable (Bool, String?) -> Void)? = nil @@ -75,73 +75,73 @@ public final class DashSDK { completionCallback: completionCallback ) } - + public func syncProgressStream() -> SyncProgressStream { return client.syncProgressStream() } - + // MARK: - Wallet Operations - + public func watchAddress(_ address: String, label: String? = nil) async throws { try await wallet.watchAddress(address, label: label) } - + public func watchAddresses(_ addresses: [String]) async throws { for address in addresses { try await wallet.watchAddress(address) } } - + public func unwatchAddress(_ address: String) async throws { try await wallet.unwatchAddress(address) } - + public func getBalance() async throws -> Balance { return try await wallet.getTotalBalance() } - + public func getBalance(for address: String) async throws -> Balance { return try await wallet.getBalance(for: address) } - + public func getBalanceWithMempool() async throws -> Balance { return try await client.getBalanceWithMempool() } - + public func getBalanceWithMempool(for address: String) async throws -> Balance { // For now, get regular balance as mempool tracking may not be enabled // TODO: Implement address-specific mempool balance return try await wallet.getBalance(for: address) } - + public func getTransactions(limit: Int = 100) async throws -> [Transaction] { return try await wallet.getTransactions(limit: limit) } - + public func getTransactions(for address: String, limit: Int = 100) async throws -> [Transaction] { return try await wallet.getTransactions(for: address, limit: limit) } - + public func getUTXOs() async throws -> [UTXO] { return try await wallet.getUTXOs() } - + // MARK: - Mempool Operations - + public func enableMempoolTracking(strategy: MempoolStrategy) async throws { try await client.enableMempoolTracking(strategy: strategy) } - + public func getMempoolBalance(for address: String) async throws -> MempoolBalance { return try await client.getMempoolBalance(for: address) } - + public func getMempoolTransactionCount() async throws -> Int { return try await client.getMempoolTransactionCount() } - + // MARK: - Transaction Management - + public func sendTransaction( to address: String, amount: UInt64, @@ -153,15 +153,15 @@ public final class DashSDK { amount: amount, feeRate: feeRate ) - + // Broadcast transaction let txHex = txData.map { String(format: "%02x", $0) }.joined() try await client.broadcastTransaction(txHex) - + // For now, return a placeholder - the actual txid should come from parsing the transaction return "transaction_sent" } - + public func estimateFee( to address: String, amount: UInt64, @@ -169,63 +169,63 @@ public final class DashSDK { ) async throws -> UInt64 { let utxos = try await wallet.getSpendableUTXOs() let builder = TransactionBuilder() - + // Estimate inputs needed var inputCount = 0 var totalInput: UInt64 = 0 - + for utxo in utxos.sorted(by: { $0.value > $1.value }) { inputCount += 1 totalInput += utxo.value - + if totalInput >= amount { break } } - + // 1 output for recipient, 1 for change let outputCount = 2 - + return builder.estimateFee( inputs: inputCount, outputs: outputCount, feeRate: feeRate ) } - + // MARK: - Data Management - + public func refreshData() async { await wallet.syncAllData() } - + public func getStorageStatistics() throws -> StorageStatistics { return try wallet.getStorageStatistics() } - + public func clearAllData() throws { try wallet.clearAllData() } - + public func exportWalletData() throws -> WalletExportData { return try wallet.exportWalletData() } - + public func importWalletData(_ data: WalletExportData) async throws { try await wallet.importWalletData(data) } - + // MARK: - Network Information - + public func isFilterSyncAvailable() async -> Bool { return await client.isFilterSyncAvailable() } - + public func validateAddress(_ address: String) -> Bool { // Basic validation - would call FFI function return address.starts(with: "X") || address.starts(with: "y") } - + public func getNetworkInfo() -> NetworkInfo { return NetworkInfo( network: client.configuration.network, @@ -234,9 +234,9 @@ public final class DashSDK { blockHeight: client.stats?.headerHeight ?? 0 ) } - + // MARK: - Private Helpers - + private func syncPersistedAddresses() async { // This triggers the PersistentWalletManager to reload addresses // and re-watch them in the SPV client @@ -251,7 +251,7 @@ public struct NetworkInfo { public let isConnected: Bool public let connectedPeers: UInt32 public let blockHeight: UInt32 - + public var description: String { """ Network: \(network.name) @@ -269,17 +269,17 @@ extension DashSDK { public static func mainnet() throws -> DashSDK { return try DashSDK(configuration: .mainnet()) } - + @MainActor public static func testnet() throws -> DashSDK { return try DashSDK(configuration: .testnet()) } - + @MainActor public static func regtest() throws -> DashSDK { return try DashSDK(configuration: .regtest()) } - + @MainActor public static func devnet() throws -> DashSDK { return try DashSDK(configuration: .devnet()) diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift index 1e2428bdf..81ea5e41c 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift @@ -7,7 +7,7 @@ public enum WatchAddressError: Error, LocalizedError { case networkError(String) case alreadyWatching(String) case unknownError(String) - + public var errorDescription: String? { switch self { case .clientNotConnected: @@ -24,7 +24,7 @@ public enum WatchAddressError: Error, LocalizedError { return "Unknown error: \(reason)" } } - + public var isRecoverable: Bool { switch self { case .clientNotConnected, .networkError, .storageFailure: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift index 456b9e51a..94b608bc7 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift @@ -13,7 +13,7 @@ public final class Balance { public var mempoolInstant: UInt64 public var total: UInt64 public var lastUpdated: Date - + public init( confirmed: UInt64 = 0, pending: UInt64 = 0, @@ -31,7 +31,7 @@ public final class Balance { self.total = total self.lastUpdated = lastUpdated } - + internal convenience init(ffiBalance: FFIBalance) { self.init( confirmed: ffiBalance.confirmed, @@ -43,15 +43,15 @@ public final class Balance { lastUpdated: .now ) } - + public var available: UInt64 { return confirmed + instantLocked + mempoolInstant } - + public var unconfirmed: UInt64 { return pending } - + public func update(from other: Balance) { self.confirmed = other.confirmed self.pending = other.pending @@ -67,27 +67,27 @@ extension Balance { public var formattedConfirmed: String { return formatDash(confirmed) } - + public var formattedPending: String { return formatDash(pending) } - + public var formattedInstantLocked: String { return formatDash(instantLocked) } - + public var formattedTotal: String { return formatDash(total) } - + public var formattedMempool: String { return formatDash(mempool) } - + public var formattedMempoolInstant: String { return formatDash(mempoolInstant) } - + private func formatDash(_ satoshis: UInt64) -> String { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Network.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Network.swift index 4a23dfbc9..4a65c3f8b 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Network.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Network.swift @@ -6,7 +6,7 @@ public enum DashNetwork: String, Codable, CaseIterable, Sendable { case testnet = "testnet" case regtest = "regtest" case devnet = "devnet" - + public var defaultPort: UInt16 { switch self { case .mainnet: @@ -19,15 +19,15 @@ public enum DashNetwork: String, Codable, CaseIterable, Sendable { return 29999 } } - + public var protocolVersion: UInt32 { return 70230 } - + public var name: String { return self.rawValue } - + internal var ffiValue: FFINetwork { switch self { case .mainnet: @@ -40,7 +40,7 @@ public enum DashNetwork: String, Codable, CaseIterable, Sendable { return FFINetwork(3) } } - + internal init?(ffiNetwork: FFINetwork) { switch ffiNetwork { case FFINetwork(0): diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift index 6b42fe8a0..a973f8e90 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift @@ -15,7 +15,7 @@ public struct SPVStats: Sendable { public let startTime: Date public let bytesReceived: UInt64 public let bytesSent: UInt64 - + public init( connectedPeers: UInt32 = 0, totalPeers: UInt32 = 0, @@ -41,7 +41,7 @@ public struct SPVStats: Sendable { self.bytesReceived = bytesReceived self.bytesSent = bytesSent } - + internal init(ffiStats: FFISpvStats) { self.connectedPeers = ffiStats.connected_peers self.totalPeers = ffiStats.total_peers @@ -55,38 +55,38 @@ public struct SPVStats: Sendable { self.bytesReceived = ffiStats.bytes_received self.bytesSent = ffiStats.bytes_sent } - + public var uptime: TimeInterval { return Date.now.timeIntervalSince(startTime) } - + public var formattedUptime: String { let formatter = DateComponentsFormatter() formatter.allowedUnits = [.day, .hour, .minute, .second] formatter.unitsStyle = .abbreviated return formatter.string(from: uptime) ?? "0s" } - + public var totalBytesTransferred: UInt64 { return bytesReceived + bytesSent } - + public var formattedBytesReceived: String { return ByteCountFormatter.string(fromByteCount: Int64(bytesReceived), countStyle: .binary) } - + public var formattedBytesSent: String { return ByteCountFormatter.string(fromByteCount: Int64(bytesSent), countStyle: .binary) } - + public var formattedTotalBytes: String { return ByteCountFormatter.string(fromByteCount: Int64(totalBytesTransferred), countStyle: .binary) } - + public var isConnected: Bool { return connectedPeers > 0 } - + public var connectionStatus: String { if connectedPeers == 0 { return "Disconnected" diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SyncProgress.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SyncProgress.swift index e4795c1da..7db7d46df 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SyncProgress.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SyncProgress.swift @@ -16,7 +16,7 @@ public struct SyncProgress: Sendable, Equatable { public let peerCount: UInt32 public let filtersDownloaded: UInt32 public let lastSyncedFilterHeight: UInt32 - + public init( currentHeight: UInt32, totalHeight: UInt32, @@ -44,7 +44,7 @@ public struct SyncProgress: Sendable, Equatable { self.filtersDownloaded = filtersDownloaded self.lastSyncedFilterHeight = lastSyncedFilterHeight } - + internal init(ffiProgress: FFISyncProgress) { self.currentHeight = ffiProgress.header_height self.totalHeight = 0 // FFISyncProgress doesn't provide total height @@ -59,23 +59,23 @@ public struct SyncProgress: Sendable, Equatable { self.filtersDownloaded = ffiProgress.filters_downloaded self.lastSyncedFilterHeight = ffiProgress.last_synced_filter_height } - + public var blocksRemaining: UInt32 { guard totalHeight > currentHeight else { return 0 } return totalHeight - currentHeight } - + public var isComplete: Bool { return currentHeight >= totalHeight || progress >= 1.0 } - + public var percentageComplete: Int { return Int(progress * 100) } - + public var formattedTimeRemaining: String? { guard let eta = estimatedTimeRemaining else { return nil } - + let formatter = DateComponentsFormatter() formatter.allowedUnits = [.hour, .minute, .second] formatter.unitsStyle = .abbreviated @@ -91,7 +91,7 @@ public enum SyncStatus: String, Codable, Sendable { case scanning = "scanning" case synced = "synced" case error = "error" - + internal init?(ffiStatus: UInt32) { switch ffiStatus { case 0: @@ -112,7 +112,7 @@ public enum SyncStatus: String, Codable, Sendable { return nil } } - + public var description: String { switch self { case .idle: @@ -131,7 +131,7 @@ public enum SyncStatus: String, Codable, Sendable { return "Sync error" } } - + public var isActive: Bool { switch self { case .idle, .synced, .error: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift index eb4353270..91c8d008d 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift @@ -16,10 +16,10 @@ public final class Transaction { public var raw: Data public var size: UInt32 public var version: UInt32 - + // Inverse relationship to WatchedAddress @Relationship(inverse: \WatchedAddress.transactions) public var watchedAddress: WatchedAddress? - + public init( txid: String, height: UInt32? = nil, @@ -45,7 +45,7 @@ public final class Transaction { self.version = version self.watchedAddress = watchedAddress } - + internal convenience init(ffiTransaction: FFITransaction) { self.init( txid: String(cString: ffiTransaction.txid.ptr), @@ -60,15 +60,15 @@ public final class Transaction { version: UInt32(ffiTransaction.version) ) } - + public var isConfirmed: Bool { return confirmations > 0 } - + public var isPending: Bool { return confirmations == 0 && !isInstantLocked } - + public var status: TransactionStatus { if isInstantLocked { return .instantLocked @@ -87,7 +87,7 @@ public enum TransactionStatus: Equatable { case confirming(UInt32) case confirmed case instantLocked - + public var description: String { switch self { case .pending: @@ -100,7 +100,7 @@ public enum TransactionStatus: Equatable { return "InstantSend" } } - + public var isSettled: Bool { switch self { case .confirmed, .instantLocked: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift index 6f80254ac..dbb83f4ca 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift @@ -16,7 +16,7 @@ public final class UTXO { public var isSpent: Bool public var confirmations: UInt32 public var isInstantLocked: Bool - + public init( outpoint: String, txid: String, @@ -40,12 +40,12 @@ public final class UTXO { self.confirmations = confirmations self.isInstantLocked = isInstantLocked } - + internal convenience init(ffiUtxo: FFIUtxo) { let txidStr = String(cString: ffiUtxo.txid.ptr) let outpoint = "\(txidStr):\(ffiUtxo.vout)" let scriptData = Data(bytes: ffiUtxo.script_pubkey.ptr, count: strlen(ffiUtxo.script_pubkey.ptr)) - + self.init( outpoint: outpoint, txid: txidStr, @@ -59,11 +59,11 @@ public final class UTXO { isInstantLocked: ffiUtxo.is_instantlocked ) } - + public var isSpendable: Bool { return !isSpent && (confirmations > 0 || isInstantLocked) } - + public var formattedValue: String { let dash = Double(value) / 100_000_000.0 return String(format: "%.8f DASH", dash) @@ -74,7 +74,7 @@ extension UTXO { public static func createOutpoint(txid: String, vout: UInt32) -> String { return "\(txid):\(vout)" } - + public func parseOutpoint() -> (txid: String, vout: UInt32)? { let components = outpoint.split(separator: ":") guard components.count == 2, diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift index 274077f0e..08577f5f3 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift @@ -7,7 +7,7 @@ public enum ValidationMode: String, Codable, CaseIterable, Sendable { case none = "none" case basic = "basic" case full = "full" - + public var description: String { switch self { case .none: @@ -18,7 +18,7 @@ public enum ValidationMode: String, Codable, CaseIterable, Sendable { return "Full validation - verify everything including ChainLocks" } } - + internal var ffiValue: FFIValidationMode { switch self { case .none: @@ -29,7 +29,7 @@ public enum ValidationMode: String, Codable, CaseIterable, Sendable { return FFIValidationMode(rawValue: 2) } } - + internal init?(ffiMode: FFIValidationMode) { switch ffiMode.rawValue { case 0: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift index c7761d088..cfea95852 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift @@ -8,11 +8,11 @@ public final class WatchedAddress { public var createdAt: Date public var lastActivity: Date? public var isActive: Bool - + @Relationship(deleteRule: .cascade) public var balance: Balance? @Relationship(deleteRule: .cascade) public var transactions: [Transaction] @Relationship(deleteRule: .cascade) public var utxos: [UTXO] - + public init( address: String, label: String? = nil, @@ -26,38 +26,38 @@ public final class WatchedAddress { self.transactions = [] self.utxos = [] } - + public var displayName: String { return label ?? address } - + public var shortAddress: String { guard address.count > 12 else { return address } let prefix = address.prefix(6) let suffix = address.suffix(4) return "\(prefix)...\(suffix)" } - + public var totalReceived: UInt64 { return transactions .filter { $0.amount > 0 } .reduce(0) { $0 + UInt64($1.amount) } } - + public var totalSent: UInt64 { return transactions .filter { $0.amount < 0 } .reduce(0) { $0 + UInt64(abs($1.amount)) } } - + public var spendableUTXOs: [UTXO] { return utxos.filter { $0.isSpendable } } - + public var pendingTransactions: [Transaction] { return transactions.filter { $0.isPending } } - + public func updateActivity() { self.lastActivity = .now } @@ -70,7 +70,7 @@ extension WatchedAddress { case balance = "balance" case activity = "activity" case created = "created" - + public var description: String { switch self { case .label: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift index 430700a56..9e94359fd 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift @@ -8,43 +8,43 @@ public final class PersistentWalletManager: WalletManager { private let storage: StorageManager private var syncTask: Task? private let logger = Logger(subsystem: "com.dash.sdk", category: "PersistentWalletManager") - + public init(client: SPVClient, storage: StorageManager) { self.storage = storage - + super.init(client: client) - + Task { await loadPersistedData() } } - + deinit { syncTask?.cancel() } - + // MARK: - Overrides - + public override func watchAddress(_ address: String, label: String? = nil) async throws { try await super.watchAddress(address, label: label) - + // Persist to storage let watchedAddress = WatchedAddress(address: address, label: label) try storage.saveWatchedAddress(watchedAddress) - + // Start syncing data for this address await syncAddressData(address) } - + public override func unwatchAddress(_ address: String) async throws { try await super.unwatchAddress(address) - + // Remove from storage if let watchedAddress = try storage.fetchWatchedAddress(by: address) { try storage.deleteWatchedAddress(watchedAddress) } } - + public override func getBalance(for address: String) async throws -> Balance { // Try to get from storage first if let cachedBalance = try storage.fetchBalance(for: address) { @@ -53,48 +53,48 @@ public final class PersistentWalletManager: WalletManager { return cachedBalance } } - + // Fetch fresh balance let balance = try await super.getBalance(for: address) - + // Save to storage try storage.saveBalance(balance, for: address) - + return balance } - + public override func getUTXOs(for address: String? = nil) async throws -> [UTXO] { // Get from storage let cachedUTXOs = try storage.fetchUTXOs(for: address) - + // If we have recent data, return it if !cachedUTXOs.isEmpty { return cachedUTXOs } - + // Otherwise fetch fresh data let utxos = try await super.getUTXOs(for: address) - + // Save to storage try await storage.saveUTXOs(utxos) - + return utxos } - + public override func getTransactions(for address: String? = nil, limit: Int = 100) async throws -> [Transaction] { // First get from parent's in-memory storage (which has real-time data) let currentTransactions = try await super.getTransactions(for: address, limit: limit) - + // Save any new transactions to storage for transaction in currentTransactions { if try storage.fetchTransaction(by: transaction.txid) == nil { try storage.saveTransaction(transaction) } } - + // Also get from storage to include any historical transactions let cachedTransactions = try storage.fetchTransactions(for: address, limit: limit) - + // Merge and deduplicate var allTransactions = currentTransactions for cached in cachedTransactions { @@ -102,29 +102,29 @@ public final class PersistentWalletManager: WalletManager { allTransactions.append(cached) } } - + // Sort and limit allTransactions.sort { $0.timestamp > $1.timestamp } if allTransactions.count > limit { allTransactions = Array(allTransactions.prefix(limit)) } - + return allTransactions } - + // MARK: - Persistence Methods - + private func loadPersistedData() async { do { // Load watched addresses let addresses = try storage.fetchWatchedAddresses() - + watchedAddresses = Set(addresses.map { $0.address }) - + // Re-watch addresses in SPV client if connected if client.isConnected { var watchErrors: [Error] = [] - + for address in addresses { do { try await client.addWatchItem(type: .address, data: address.address) @@ -134,18 +134,18 @@ public final class PersistentWalletManager: WalletManager { watchErrors.append(error) } } - + // If any addresses failed to watch, throw aggregate error if !watchErrors.isEmpty { throw WalletManagerError.partialWatchFailure(addresses: addresses.count, failures: watchErrors.count) } } - + // Load total balance var totalConfirmed: UInt64 = 0 var totalPending: UInt64 = 0 var totalInstantLocked: UInt64 = 0 - + for address in addresses { if let balance = address.balance { totalConfirmed += balance.confirmed @@ -153,7 +153,7 @@ public final class PersistentWalletManager: WalletManager { totalInstantLocked += balance.instantLocked } } - + totalBalance = Balance( confirmed: totalConfirmed, pending: totalPending, @@ -164,21 +164,21 @@ public final class PersistentWalletManager: WalletManager { print("Failed to load persisted data: \(error)") } } - + private func syncAddressData(_ address: String) async { do { // Sync balance let balance = try await getBalance(for: address) try storage.saveBalance(balance, for: address) - + // Sync UTXOs let utxos = try await getUTXOs(for: address) try await storage.saveUTXOs(utxos) - + // Sync transactions let transactions = try await getTransactions(for: address) try await storage.saveTransactions(transactions) - + // Update activity timestamp if let watchedAddress = try storage.fetchWatchedAddress(by: address) { watchedAddress.updateActivity() @@ -188,11 +188,11 @@ public final class PersistentWalletManager: WalletManager { print("Failed to sync address data: \(error)") } } - + private func syncTransactions(for address: String?) async { do { let transactions = try await super.getTransactions(for: address) - + // Update or insert transactions for transaction in transactions { if let existing = try storage.fetchTransaction(by: transaction.txid) { @@ -207,7 +207,7 @@ public final class PersistentWalletManager: WalletManager { try storage.saveTransaction(transaction) } } - + // Also save address-transaction associations if we have them if let address = address { // Store which transactions belong to which addresses @@ -217,49 +217,49 @@ public final class PersistentWalletManager: WalletManager { print("Failed to sync transactions: \(error)") } } - + // MARK: - Public Persistence Methods - + public func startPeriodicSync(interval: TimeInterval = 30) { syncTask?.cancel() - + syncTask = Task { while !Task.isCancelled { await syncAllData() - + try? await Task.sleep(nanoseconds: UInt64(interval * 1_000_000_000)) } } } - + public func stopPeriodicSync() { syncTask?.cancel() syncTask = nil } - + public func syncAllData() async { for address in watchedAddresses { await syncAddressData(address) } - + await updateTotalBalance() } - + public func getStorageStatistics() throws -> StorageStatistics { return try storage.getStorageStatistics() } - + public func clearAllData() throws { try storage.deleteAllData() watchedAddresses.removeAll() totalBalance = Balance() } - + public func exportWalletData() throws -> WalletExportData { let addresses = try storage.fetchWatchedAddresses() let transactions = try storage.fetchTransactions() let utxos = try storage.fetchUTXOs() - + // Convert SwiftData models to Codable types let exportedAddresses = addresses.map { address in WalletExportData.ExportedAddress( @@ -277,7 +277,7 @@ public final class PersistentWalletManager: WalletManager { } ) } - + let exportedTransactions = transactions.map { tx in WalletExportData.ExportedTransaction( txid: tx.txid, @@ -291,7 +291,7 @@ public final class PersistentWalletManager: WalletManager { version: tx.version ) } - + let exportedUTXOs = utxos.map { utxo in WalletExportData.ExportedUTXO( txid: utxo.txid, @@ -303,7 +303,7 @@ public final class PersistentWalletManager: WalletManager { isInstantLocked: utxo.isInstantLocked ) } - + return WalletExportData( addresses: exportedAddresses, transactions: exportedTransactions, @@ -311,11 +311,11 @@ public final class PersistentWalletManager: WalletManager { exportDate: .now ) } - + public func importWalletData(_ data: WalletExportData) async throws { // Clear existing data try clearAllData() - + // Import addresses for exportedAddress in data.addresses { let address = WatchedAddress( @@ -324,7 +324,7 @@ public final class PersistentWalletManager: WalletManager { createdAt: exportedAddress.createdAt, isActive: exportedAddress.isActive ) - + // Create balance if present if let exportedBalance = exportedAddress.balance { let balance = Balance( @@ -334,11 +334,11 @@ public final class PersistentWalletManager: WalletManager { ) address.balance = balance } - + try storage.saveWatchedAddress(address) watchedAddresses.insert(address.address) } - + // Import transactions let transactions = data.transactions.map { exportedTx in Transaction( @@ -354,7 +354,7 @@ public final class PersistentWalletManager: WalletManager { ) } try await storage.saveTransactions(transactions) - + // Import UTXOs let utxos = data.utxos.map { exportedUTXO in let outpoint = "\(exportedUTXO.txid):\(exportedUTXO.vout)" @@ -371,7 +371,7 @@ public final class PersistentWalletManager: WalletManager { ) } try await storage.saveUTXOs(utxos) - + // Update balances await updateTotalBalance() } @@ -387,14 +387,14 @@ public struct WalletExportData: Codable { public let isActive: Bool public let balance: ExportedBalance? } - + public struct ExportedBalance: Codable { public let confirmed: UInt64 public let pending: UInt64 public let instantLocked: UInt64 public let total: UInt64 } - + public struct ExportedTransaction: Codable { public let txid: String public let height: UInt32? @@ -406,7 +406,7 @@ public struct WalletExportData: Codable { public let size: UInt32 public let version: UInt32 } - + public struct ExportedUTXO: Codable { public let txid: String public let vout: UInt32 @@ -416,20 +416,20 @@ public struct WalletExportData: Codable { public let confirmations: UInt32 public let isInstantLocked: Bool } - + public let addresses: [ExportedAddress] public let transactions: [ExportedTransaction] public let utxos: [ExportedUTXO] public let exportDate: Date - + public var formattedSize: String { let encoder = JSONEncoder() encoder.outputFormatting = .prettyPrinted - + if let data = try? encoder.encode(self) { return ByteCountFormatter.string(fromByteCount: Int64(data.count), countStyle: .binary) } - + return "Unknown" } } @@ -438,7 +438,7 @@ public struct WalletExportData: Codable { public enum WalletManagerError: LocalizedError { case partialWatchFailure(addresses: Int, failures: Int) - + public var errorDescription: String? { switch self { case .partialWatchFailure(let addresses, let failures): diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift index 35b946b2d..972b41f6c 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift @@ -6,7 +6,7 @@ public final class StorageManager { private let modelContainer: ModelContainer private let modelContext: ModelContext private let backgroundContext: ModelContext - + @MainActor public init() throws { let schema = Schema([ @@ -15,69 +15,69 @@ public final class StorageManager { UTXO.self, Balance.self ]) - + let configuration = ModelConfiguration( schema: schema, isStoredInMemoryOnly: false, groupContainer: .automatic, cloudKitDatabase: .none ) - + self.modelContainer = try ModelContainer( for: schema, configurations: [configuration] ) - + self.modelContext = modelContainer.mainContext self.backgroundContext = ModelContext(modelContainer) - + // Configure contexts modelContext.autosaveEnabled = true backgroundContext.autosaveEnabled = false } - + // MARK: - Watched Addresses - + public func saveWatchedAddress(_ address: WatchedAddress) throws { modelContext.insert(address) try modelContext.save() } - + public func fetchWatchedAddresses() throws -> [WatchedAddress] { let descriptor = FetchDescriptor( sortBy: [SortDescriptor(\.createdAt, order: .reverse)] ) return try modelContext.fetch(descriptor) } - + public func fetchWatchedAddress(by address: String) throws -> WatchedAddress? { let predicate = #Predicate { watchedAddress in watchedAddress.address == address } - + let descriptor = FetchDescriptor(predicate: predicate) return try modelContext.fetch(descriptor).first } - + public func deleteWatchedAddress(_ address: WatchedAddress) throws { modelContext.delete(address) try modelContext.save() } - + // MARK: - Transactions - + public func saveTransaction(_ transaction: Transaction) throws { modelContext.insert(transaction) try modelContext.save() } - + public func saveTransactions(_ transactions: [Transaction]) async throws { for transaction in transactions { backgroundContext.insert(transaction) } try backgroundContext.save() } - + public func fetchTransactions( for address: String? = nil, limit: Int = 100, @@ -86,51 +86,51 @@ public final class StorageManager { var descriptor = FetchDescriptor( sortBy: [SortDescriptor(\.timestamp, order: .reverse)] ) - + if let address = address { // This would need a relationship or additional field to filter by address // For now, fetch all transactions } - + descriptor.fetchLimit = limit descriptor.fetchOffset = offset - + return try modelContext.fetch(descriptor) } - + public func fetchTransaction(by txid: String) throws -> Transaction? { let predicate = #Predicate { transaction in transaction.txid == txid } - + let descriptor = FetchDescriptor(predicate: predicate) return try modelContext.fetch(descriptor).first } - + public func updateTransaction(_ transaction: Transaction) throws { try modelContext.save() } - + // MARK: - UTXOs - + public func saveUTXO(_ utxo: UTXO) throws { modelContext.insert(utxo) try modelContext.save() } - + public func saveUTXOs(_ utxos: [UTXO]) async throws { for utxo in utxos { backgroundContext.insert(utxo) } try backgroundContext.save() } - + public func fetchUTXOs( for address: String? = nil, includeSpent: Bool = false ) throws -> [UTXO] { var predicate: Predicate? - + if let address = address { if includeSpent { predicate = #Predicate { utxo in @@ -146,43 +146,43 @@ public final class StorageManager { !utxo.isSpent } } - + let descriptor = FetchDescriptor( predicate: predicate, sortBy: [SortDescriptor(\.value, order: .reverse)] ) - + return try modelContext.fetch(descriptor) } - + public func markUTXOAsSpent(outpoint: String) throws { let predicate = #Predicate { utxo in utxo.outpoint == outpoint } - + let descriptor = FetchDescriptor(predicate: predicate) if let utxo = try modelContext.fetch(descriptor).first { utxo.isSpent = true try modelContext.save() } } - + // MARK: - Balance - + public func saveBalance(_ balance: Balance, for address: String) throws { if let watchedAddress = try fetchWatchedAddress(by: address) { watchedAddress.balance = balance try modelContext.save() } } - + public func fetchBalance(for address: String) throws -> Balance? { let watchedAddress = try fetchWatchedAddress(by: address) return watchedAddress?.balance } - + // MARK: - Batch Operations - + public func performBatchUpdate( _ updates: @escaping () throws -> T ) async throws -> T { @@ -190,9 +190,9 @@ public final class StorageManager { try backgroundContext.save() return result } - + // MARK: - Cleanup - + public func deleteAllData() throws { try modelContext.delete(model: WatchedAddress.self) try modelContext.delete(model: Transaction.self) @@ -200,28 +200,28 @@ public final class StorageManager { try modelContext.delete(model: Balance.self) try modelContext.save() } - + public func pruneOldTransactions(olderThan date: Date) throws { let predicate = #Predicate { transaction in transaction.timestamp < date } - + try modelContext.delete(model: Transaction.self, where: predicate) try modelContext.save() } - + // MARK: - Statistics - + public func getStorageStatistics() throws -> StorageStatistics { let addressCount = try modelContext.fetchCount(FetchDescriptor()) let transactionCount = try modelContext.fetchCount(FetchDescriptor()) let utxoCount = try modelContext.fetchCount(FetchDescriptor()) - + let spentUTXOPredicate = #Predicate { $0.isSpent } let spentUTXOCount = try modelContext.fetchCount( FetchDescriptor(predicate: spentUTXOPredicate) ) - + return StorageStatistics( watchedAddressCount: addressCount, transactionCount: transactionCount, @@ -240,7 +240,7 @@ public struct StorageStatistics { public let totalUTXOCount: Int public let spentUTXOCount: Int public let unspentUTXOCount: Int - + public var description: String { """ Storage Statistics: diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift index 2dcd97f50..7ab000751 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift @@ -9,7 +9,7 @@ public enum MempoolStrategy: UInt32, CaseIterable, Sendable { case bloomFilter = 1 /// Only fetch when recently sent or from known addresses (good privacy) case selective = 2 - + internal var ffiValue: FFIMempoolStrategy { return FFIMempoolStrategy(rawValue: self.rawValue) } @@ -19,22 +19,22 @@ public enum MempoolStrategy: UInt32, CaseIterable, Sendable { public struct MempoolConfig { /// Whether mempool tracking is enabled public let enabled: Bool - + /// Strategy for handling mempool transactions public let strategy: MempoolStrategy - + /// Maximum number of transactions to track public let maxTransactions: UInt32 - + /// Time after which unconfirmed transactions are pruned (in seconds) public let timeoutSeconds: UInt64 - + /// Whether to fetch transaction data from INV messages public let fetchTransactions: Bool - + /// Whether to persist mempool transactions across restarts public let persistMempool: Bool - + /// Initialize with custom configuration public init( enabled: Bool, @@ -51,7 +51,7 @@ public struct MempoolConfig { self.fetchTransactions = fetchTransactions self.persistMempool = persistMempool } - + /// Create a FetchAll configuration public static func fetchAll(maxTransactions: UInt32 = 5000) -> MempoolConfig { return MempoolConfig( @@ -63,7 +63,7 @@ public struct MempoolConfig { persistMempool: false ) } - + /// Create a Selective configuration (recommended) public static func selective(maxTransactions: UInt32 = 1000) -> MempoolConfig { return MempoolConfig( @@ -75,7 +75,7 @@ public struct MempoolConfig { persistMempool: false ) } - + /// Create a disabled configuration public static var disabled: MempoolConfig { return MempoolConfig(enabled: false) @@ -86,31 +86,31 @@ public struct MempoolConfig { public struct MempoolTransaction { /// Transaction ID public let txid: String - + /// Raw transaction data public let rawTransaction: Data - + /// Time when first seen public let firstSeen: Date - + /// Transaction fee in satoshis public let fee: UInt64 - + /// Whether this is an InstantSend transaction public let isInstantSend: Bool - + /// Whether this is an outgoing transaction public let isOutgoing: Bool - + /// Addresses affected by this transaction public let affectedAddresses: [String] - + /// Net amount change (positive for incoming, negative for outgoing) public let netAmount: Int64 - + /// Size of the transaction in bytes public let size: UInt32 - + /// Fee rate in satoshis per byte public var feeRate: Double { guard size > 0 else { return 0 } @@ -122,10 +122,10 @@ public struct MempoolTransaction { public struct MempoolBalance { /// Pending balance from regular mempool transactions public let pending: UInt64 - + /// Pending balance from InstantSend transactions public let pendingInstant: UInt64 - + /// Total pending balance public var total: UInt64 { return pending + pendingInstant @@ -152,10 +152,10 @@ public enum MempoolRemovalReason: UInt8, Equatable, Sendable { public enum MempoolEvent { /// New transaction added to mempool case transactionAdded(MempoolTransaction) - + /// Transaction confirmed in a block case transactionConfirmed(txid: String, blockHeight: UInt32, blockHash: String) - + /// Transaction removed from mempool case transactionRemoved(txid: String, reason: MempoolRemovalReason) } diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift index 46ad2a1df..35593d87e 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift @@ -6,7 +6,7 @@ public struct WatchAddressResult { public let error: WatchAddressError? public let timestamp: Date public let retryCount: Int - + public init(address: String, success: Bool, error: WatchAddressError? = nil, timestamp: Date = Date(), retryCount: Int = 0) { self.address = address self.success = success diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift index 6582b171f..87079b8af 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift @@ -7,13 +7,13 @@ extension Data { var hexString: String { return map { String(format: "%02x", $0) }.joined() } - + /// Create data from hex string init?(hexString: String) { let len = hexString.count / 2 var data = Data(capacity: len) var index = hexString.startIndex - + for _ in 0..= 26 && count <= 35 else { return false } - + let firstChar = String(prefix(1)) return mainnetPrefixes.contains(firstChar) || testnetPrefixes.contains(firstChar) } - + /// Shorten string for display (e.g., addresses, txids) func shortened(prefix: Int = 6, suffix: Int = 4) -> String { guard count > prefix + suffix + 3 else { return self } - + let prefixStr = self.prefix(prefix) let suffixStr = self.suffix(suffix) return "\(prefixStr)...\(suffixStr)" @@ -59,7 +59,7 @@ extension UInt64 { var dashValue: Double { return Double(self) / 100_000_000.0 } - + /// Format as Dash string var formattedDash: String { return String(format: "%.8f DASH", dashValue) diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift index 5e7c32ea3..eec7ee3fa 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift @@ -8,22 +8,22 @@ public class WatchAddressRetryManager { private let retryDelay: TimeInterval = 5.0 private let logger = Logger(subsystem: "com.dash.sdk", category: "WatchAddressRetryManager") private weak var client: SPVClient? - + struct WatchRetryItem { let address: String let accountId: String var retryCount: Int let firstAttempt: Date } - + public init(client: SPVClient) { self.client = client } - + deinit { retryTimer?.invalidate() } - + public func scheduleRetry(address: String, accountId: String) { let item = WatchRetryItem( address: address, @@ -31,17 +31,17 @@ public class WatchAddressRetryManager { retryCount: 0, firstAttempt: Date() ) - + retryQueue.append(item) startRetryTimer() } - + private func startRetryTimer() { guard retryTimer == nil else { return } - + DispatchQueue.main.async { [weak self] in guard let self = self else { return } - + self.retryTimer = Timer.scheduledTimer(withTimeInterval: self.retryDelay, repeats: true) { _ in Task { await self.processRetryQueue() @@ -49,21 +49,21 @@ public class WatchAddressRetryManager { } } } - + private func processRetryQueue() async { guard let client = client else { logger.error("Client is nil, cannot process retry queue") return } - + var remainingItems: [WatchRetryItem] = [] - + for var item in retryQueue { if item.retryCount >= maxRetries { logger.error("Max retries exceeded for address: \(item.address)") continue } - + do { try await client.addWatchItem(type: .address, data: item.address) logger.info("Successfully watched address on retry: \(item.address)") @@ -73,9 +73,9 @@ public class WatchAddressRetryManager { logger.warning("Retry \(item.retryCount) failed for address: \(item.address)") } } - + retryQueue = remainingItems - + if retryQueue.isEmpty { DispatchQueue.main.async { [weak self] in self?.retryTimer?.invalidate() @@ -83,20 +83,20 @@ public class WatchAddressRetryManager { } } } - + public func getPendingRetries() -> [String] { return retryQueue.map { $0.address } } - + public func clearRetryQueue() { retryQueue.removeAll() retryTimer?.invalidate() retryTimer = nil } - + public func removeAddress(_ address: String) { retryQueue.removeAll { $0.address == address } - + if retryQueue.isEmpty { retryTimer?.invalidate() retryTimer = nil diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift index 85028842a..7a9fdeb1e 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift @@ -5,184 +5,184 @@ import DashSPVFFI @Observable public class WalletManager { internal let client: SPVClient - + public internal(set) var watchedAddresses: Set = [] public internal(set) var totalBalance: Balance = Balance() public internal(set) var totalMempoolBalance: MempoolBalance = MempoolBalance(pending: 0, pendingInstant: 0) public internal(set) var transactions: [String: Transaction] = [:] // txid -> Transaction public internal(set) var addressTransactions: [String: Set] = [:] // address -> Set of txids public internal(set) var mempoolTransactions: Set = [] // txids of mempool transactions - + private var cancellables = Set() - + public init(client: SPVClient) { self.client = client setupEventHandlers() } - + // MARK: - Address Management - + public func watchAddress(_ address: String, label: String? = nil) async throws { guard client.isConnected else { throw DashSDKError.notConnected } - + try validateAddress(address) - + // Add address to SPV client watch list try await client.addWatchItem(type: .address, data: address) - + watchedAddresses.insert(address) - + // Update balance for new address try await updateBalance(for: address) } - + public func unwatchAddress(_ address: String) async throws { guard client.isConnected else { throw DashSDKError.notConnected } - + // Remove address from SPV client watch list try await client.removeWatchItem(type: .address, data: address) - + watchedAddresses.remove(address) await updateTotalBalance() } - + public func watchScript(_ script: Data) async throws { guard client.isConnected else { throw DashSDKError.notConnected } - + // Convert script data to hex string let scriptHex = script.map { String(format: "%02x", $0) }.joined() - + // Add script to SPV client watch list try await client.addWatchItem(type: .script, data: scriptHex) } - + // MARK: - Balance Queries - + public func getBalance(for address: String) async throws -> Balance { guard client.isConnected else { throw DashSDKError.notConnected } - + return try await client.getAddressBalance(address) } - + public func getTotalBalance() async throws -> Balance { guard client.isConnected else { throw DashSDKError.notConnected } - + return try await client.getTotalBalance() } - + public func getBalanceWithMempool() async throws -> Balance { guard client.isConnected else { throw DashSDKError.notConnected } - + return try await client.getBalanceWithMempool() } - + public func getMempoolBalance(for address: String) async throws -> MempoolBalance { guard client.isConnected else { throw DashSDKError.notConnected } - + return try await client.getMempoolBalance(for: address) } - + public func getTotalMempoolBalance() async throws -> MempoolBalance { guard client.isConnected else { throw DashSDKError.notConnected } - + var totalPending: UInt64 = 0 var totalPendingInstant: UInt64 = 0 - + for address in watchedAddresses { let mempoolBalance = try await getMempoolBalance(for: address) totalPending += mempoolBalance.pending totalPendingInstant += mempoolBalance.pendingInstant } - + return MempoolBalance(pending: totalPending, pendingInstant: totalPendingInstant) } - + /// Combined balance including confirmed and mempool public func getCombinedBalance() async throws -> (confirmed: Balance, mempool: MempoolBalance, total: UInt64) { let confirmedBalance = try await getTotalBalance() let mempoolBalance = try await getTotalMempoolBalance() let total = confirmedBalance.total + mempoolBalance.total - + return (confirmed: confirmedBalance, mempool: mempoolBalance, total: total) } - + // MARK: - UTXO Management - + public func getUTXOs(for address: String? = nil) async throws -> [UTXO] { guard client.isConnected else { throw DashSDKError.notConnected } - + // This would call the FFI function to get UTXOs return [] } - + public func getSpendableUTXOs(minConfirmations: UInt32 = 1) async throws -> [UTXO] { let allUTXOs = try await getUTXOs() return allUTXOs.filter { utxo in utxo.confirmations >= minConfirmations || utxo.isInstantLocked } } - + // MARK: - Transaction History - + public func getTransactions(for address: String? = nil, limit: Int = 100) async throws -> [Transaction] { guard client.isConnected else { throw DashSDKError.notConnected } - + var result: [Transaction] - + // Filter by address if provided if let address = address { // Get transaction IDs for this address let txids = addressTransactions[address] ?? Set() - + // Get the actual transaction objects result = txids.compactMap { transactions[$0] } } else { // Return all transactions result = Array(transactions.values) } - + // Sort by timestamp, newest first result.sort { $0.timestamp > $1.timestamp } - + // Apply limit if result.count > limit { result = Array(result.prefix(limit)) } - + return result } - + public func getTransaction(txid: String) async throws -> Transaction? { guard client.isConnected else { throw DashSDKError.notConnected } - + // Return from local storage return transactions[txid] } - + // MARK: - Transaction Building - + public func createTransaction( to address: String, amount: UInt64, @@ -192,19 +192,19 @@ public class WalletManager { guard client.isConnected else { throw DashSDKError.notConnected } - + try validateAddress(address) - + let utxos = try await getSpendableUTXOs() let totalAvailable = utxos.reduce(0) { $0 + $1.value } - + guard totalAvailable >= amount else { throw DashSDKError.insufficientFunds(required: amount, available: totalAvailable) } - + // Select UTXOs for the transaction let selectedUTXOs = selectUTXOs(from: utxos, targetAmount: amount, feeRate: feeRate) - + // Build transaction let builder = TransactionBuilder() return try builder.buildTransaction( @@ -214,9 +214,9 @@ public class WalletManager { feeRate: feeRate ) } - + // MARK: - Private - + private func setupEventHandlers() { client.eventPublisher .sink { [weak self] event in @@ -226,7 +226,7 @@ public class WalletManager { } .store(in: &cancellables) } - + private func handleEvent(_ event: SPVEvent) async { switch event { case .balanceUpdated(let balance): @@ -247,13 +247,13 @@ public class WalletManager { break } } - + private func updateBalance(for address: String) async throws { _ = try await getBalance(for: address) // Update total balance after adding new address await updateTotalBalance() } - + internal func updateTotalBalance() async { do { totalBalance = try await getTotalBalance() @@ -261,7 +261,7 @@ public class WalletManager { print("Failed to update total balance: \(error)") } } - + private func handleTransactionDetected(txid: String, confirmed: Bool, amount: Int64, addresses: [String], blockHeight: UInt32?) async { // Check if we already have this transaction if var existingTx = transactions[txid] { @@ -273,7 +273,7 @@ public class WalletManager { } return } - + // Create transaction with real data let transaction = Transaction( txid: txid, @@ -284,10 +284,10 @@ public class WalletManager { confirmations: confirmed ? 1 : 0, isInstantLocked: false // Could be determined from confirmation speed ) - + // Store the transaction transactions[txid] = transaction - + // Associate transaction with addresses for address in addresses { // Add to address-transaction mapping @@ -296,10 +296,10 @@ public class WalletManager { } addressTransactions[address]?.insert(txid) } - + // Update balance await updateTotalBalance() - + // Log for debugging print("💸 New transaction detected: \(txid)") print(" Amount: \(amount) satoshis (\(Double(amount) / 100_000_000) DASH)") @@ -307,11 +307,11 @@ public class WalletManager { print(" Confirmed: \(confirmed), Height: \(blockHeight ?? 0)") print("📊 Total transactions stored: \(transactions.count)") } - + private func handleMempoolTransactionAdded(txid: String, amount: Int64, addresses: [String]) async { // Add to mempool transactions set mempoolTransactions.insert(txid) - + // Create unconfirmed transaction let transaction = Transaction( txid: txid, @@ -322,10 +322,10 @@ public class WalletManager { confirmations: 0, isInstantLocked: false ) - + // Store the transaction transactions[txid] = transaction - + // Associate with addresses for address in addresses { if addressTransactions[address] == nil { @@ -333,41 +333,41 @@ public class WalletManager { } addressTransactions[address]?.insert(txid) } - + // Update mempool balance await updateMempoolBalance() - + print("🔄 New mempool transaction: \(txid)") print(" Amount: \(amount) satoshis") print(" Addresses: \(addresses.joined(separator: ", "))") } - + private func handleMempoolTransactionConfirmed(txid: String, blockHeight: UInt32, confirmations: UInt32) async { // Remove from mempool set mempoolTransactions.remove(txid) - + // Update transaction status if var transaction = transactions[txid] { transaction.height = blockHeight transaction.confirmations = confirmations transactions[txid] = transaction - + print("✅ Mempool transaction confirmed: \(txid) at height \(blockHeight)") } - + // Update balances await updateTotalBalance() await updateMempoolBalance() } - + private func handleMempoolTransactionRemoved(txid: String, reason: MempoolRemovalReason) async { // Remove from mempool set mempoolTransactions.remove(txid) - + // Remove transaction if it wasn't confirmed if reason != MempoolRemovalReason.confirmed { transactions.removeValue(forKey: txid) - + // Remove from address mappings for (address, var txids) in addressTransactions { if txids.remove(txid) != nil { @@ -375,13 +375,13 @@ public class WalletManager { } } } - + // Update mempool balance await updateMempoolBalance() - + print("❌ Mempool transaction removed: \(txid), reason: \(reason)") } - + private func updateMempoolBalance() async { do { totalMempoolBalance = try await getTotalMempoolBalance() @@ -389,34 +389,34 @@ public class WalletManager { print("Failed to update mempool balance: \(error)") } } - + private func validateAddress(_ address: String) throws { // This would call the FFI validation function guard address.starts(with: "X") || address.starts(with: "y") else { throw DashSDKError.invalidAddress(address) } } - + private func selectUTXOs(from utxos: [UTXO], targetAmount: UInt64, feeRate: UInt64) -> [UTXO] { // Simple UTXO selection algorithm var selected: [UTXO] = [] var totalSelected: UInt64 = 0 - + // Sort by value descending let sorted = utxos.sorted { $0.value > $1.value } - + for utxo in sorted { selected.append(utxo) totalSelected += utxo.value - + // Estimate fee based on transaction size let estimatedFee = UInt64(selected.count * 148 + 2 * 34 + 10) * feeRate / 1000 - + if totalSelected >= targetAmount + estimatedFee { break } } - + return selected } } @@ -425,7 +425,7 @@ public class WalletManager { public struct TransactionBuilder { public init() {} - + public func buildTransaction( inputs: [UTXO], outputs: [(address: String, amount: UInt64)], @@ -436,7 +436,7 @@ public struct TransactionBuilder { // For now, return empty data as placeholder return Data() } - + public func estimateFee( inputs: Int, outputs: Int, diff --git a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/DashSDKTests.swift b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/DashSDKTests.swift index 434814e58..d75a7fa46 100644 --- a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/DashSDKTests.swift +++ b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/DashSDKTests.swift @@ -2,27 +2,27 @@ import XCTest @testable import SwiftDashCoreSDK final class DashSDKTests: XCTestCase { - + var sdk: DashSDK! - + override func setUp() async throws { // Create test configuration let config = SPVClientConfiguration() config.network = .testnet config.validationMode = .basic - + sdk = try await DashSDK(configuration: config) } - + override func tearDown() async throws { if sdk.isConnected { try await sdk.disconnect() } sdk = nil } - + // MARK: - Configuration Tests - + func testDefaultConfiguration() throws { let config = SPVClientConfiguration.default XCTAssertEqual(config.network, .mainnet) @@ -30,28 +30,28 @@ final class DashSDKTests: XCTestCase { XCTAssertEqual(config.maxPeers, 12) XCTAssertTrue(config.enableFilterLoad) } - + func testNetworkSpecificConfigurations() throws { let mainnet = SPVClientConfiguration.mainnet() XCTAssertEqual(mainnet.network, .mainnet) - + let testnet = SPVClientConfiguration.testnet() XCTAssertEqual(testnet.network, .testnet) - + let regtest = SPVClientConfiguration.regtest() XCTAssertEqual(regtest.network, .regtest) XCTAssertEqual(regtest.validationMode, .none) } - + // MARK: - Model Tests - + func testNetworkProperties() { XCTAssertEqual(DashNetwork.mainnet.defaultPort, 9999) XCTAssertEqual(DashNetwork.testnet.defaultPort, 19999) XCTAssertEqual(DashNetwork.regtest.defaultPort, 19899) XCTAssertEqual(DashNetwork.devnet.defaultPort, 29999) } - + func testBalanceCalculations() { let balance = Balance( confirmed: 100_000_000, @@ -59,32 +59,32 @@ final class DashSDKTests: XCTestCase { instantLocked: 25_000_000, total: 150_000_000 ) - + XCTAssertEqual(balance.available, 125_000_000) XCTAssertEqual(balance.unconfirmed, 50_000_000) XCTAssertEqual(balance.formattedConfirmed, "1.00000000 DASH") XCTAssertEqual(balance.formattedPending, "0.50000000 DASH") } - + func testTransactionStatus() { let pendingTx = Transaction(txid: "test1", confirmations: 0) XCTAssertEqual(pendingTx.status, .pending) XCTAssertTrue(pendingTx.isPending) XCTAssertFalse(pendingTx.isConfirmed) - + let confirmingTx = Transaction(txid: "test2", confirmations: 3) XCTAssertEqual(confirmingTx.status, .confirming(3)) XCTAssertFalse(confirmingTx.isPending) XCTAssertTrue(confirmingTx.isConfirmed) - + let confirmedTx = Transaction(txid: "test3", confirmations: 6) XCTAssertEqual(confirmedTx.status, .confirmed) - + let instantTx = Transaction(txid: "test4", confirmations: 0, isInstantLocked: true) XCTAssertEqual(instantTx.status, .instantLocked) XCTAssertFalse(instantTx.isPending) } - + func testUTXOSpendability() { let unconfirmedUTXO = UTXO( outpoint: "txid:0", @@ -96,7 +96,7 @@ final class DashSDKTests: XCTestCase { confirmations: 0 ) XCTAssertFalse(unconfirmedUTXO.isSpendable) - + let confirmedUTXO = UTXO( outpoint: "txid:1", txid: "txid", @@ -107,7 +107,7 @@ final class DashSDKTests: XCTestCase { confirmations: 1 ) XCTAssertTrue(confirmedUTXO.isSpendable) - + let instantUTXO = UTXO( outpoint: "txid:2", txid: "txid", @@ -119,7 +119,7 @@ final class DashSDKTests: XCTestCase { isInstantLocked: true ) XCTAssertTrue(instantUTXO.isSpendable) - + let spentUTXO = UTXO( outpoint: "txid:3", txid: "txid", @@ -132,28 +132,28 @@ final class DashSDKTests: XCTestCase { ) XCTAssertFalse(spentUTXO.isSpendable) } - + // MARK: - Address Validation Tests - + func testAddressValidation() { // Mainnet addresses start with 'X' XCTAssertTrue(sdk.validateAddress("Xtesttesttest")) - + // Testnet addresses start with 'y' XCTAssertTrue(sdk.validateAddress("ytesttesttest")) - + // Invalid addresses XCTAssertFalse(sdk.validateAddress("1testtesttest")) XCTAssertFalse(sdk.validateAddress("btesttesttest")) } - + // MARK: - Error Tests - + func testErrorDescriptions() { let networkError = DashSDKError.networkError("Connection failed") XCTAssertEqual(networkError.errorDescription, "Network error: Connection failed") XCTAssertNotNil(networkError.recoverySuggestion) - + let insufficientFunds = DashSDKError.insufficientFunds( required: 200_000_000, available: 100_000_000 @@ -161,22 +161,22 @@ final class DashSDKTests: XCTestCase { XCTAssertTrue(insufficientFunds.errorDescription?.contains("2.0 DASH") ?? false) XCTAssertTrue(insufficientFunds.errorDescription?.contains("1.0 DASH") ?? false) } - + // MARK: - Async Tests - + func testConnectionLifecycle() async throws { XCTAssertFalse(sdk.isConnected) - + // Note: This would require a mock or test network // try await sdk.connect() // XCTAssertTrue(sdk.isConnected) - + // try await sdk.disconnect() // XCTAssertFalse(sdk.isConnected) } - + // MARK: - Storage Tests - + func testStorageStatistics() async throws { let stats = try sdk.getStorageStatistics() XCTAssertEqual(stats.watchedAddressCount, 0) @@ -188,17 +188,17 @@ final class DashSDKTests: XCTestCase { // MARK: - Mock Tests final class MockFFIBridgeTests: XCTestCase { - + func testStringConversion() { let testString = "Hello, Dash!" let cString = FFIBridge.fromString(testString) XCTAssertEqual(String(cString: cString), testString) } - + func testErrorConversion() { let error = FFIError(code: 3) XCTAssertEqual(error, .networkError) - + let unknownError = FFIError(code: 999) XCTAssertEqual(unknownError, .unknown) } @@ -208,32 +208,32 @@ final class MockFFIBridgeTests: XCTestCase { @available(iOS 17.0, *) final class StorageIntegrationTests: XCTestCase { - + var storage: StorageManager! - + override func setUp() async throws { storage = try await StorageManager() } - + override func tearDown() async throws { try storage.deleteAllData() storage = nil } - + func testWatchedAddressPersistence() async throws { let address = WatchedAddress( address: "XtestAddress123", label: "Test Wallet" ) - + try storage.saveWatchedAddress(address) - + let fetched = try storage.fetchWatchedAddresses() XCTAssertEqual(fetched.count, 1) XCTAssertEqual(fetched.first?.address, "XtestAddress123") XCTAssertEqual(fetched.first?.label, "Test Wallet") } - + func testTransactionPersistence() async throws { let tx = Transaction( txid: "abc123", @@ -241,15 +241,15 @@ final class StorageIntegrationTests: XCTestCase { amount: 100_000_000, confirmations: 6 ) - + try storage.saveTransaction(tx) - + let fetched = try storage.fetchTransaction(by: "abc123") XCTAssertNotNil(fetched) XCTAssertEqual(fetched?.amount, 100_000_000) XCTAssertEqual(fetched?.confirmations, 6) } - + func testUTXOManagement() async throws { let utxo1 = UTXO( outpoint: "tx1:0", @@ -259,7 +259,7 @@ final class StorageIntegrationTests: XCTestCase { script: Data(), value: 50_000_000 ) - + let utxo2 = UTXO( outpoint: "tx2:0", txid: "tx2", @@ -269,13 +269,13 @@ final class StorageIntegrationTests: XCTestCase { value: 75_000_000, isSpent: true ) - + try await storage.saveUTXOs([utxo1, utxo2]) - + let unspent = try storage.fetchUTXOs(includeSpent: false) XCTAssertEqual(unspent.count, 1) XCTAssertEqual(unspent.first?.value, 50_000_000) - + let all = try storage.fetchUTXOs(includeSpent: true) XCTAssertEqual(all.count, 2) } diff --git a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift index de7b4e3df..a92c822f5 100644 --- a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift +++ b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift @@ -3,25 +3,25 @@ import XCTest import DashSPVFFI final class MempoolTests: XCTestCase { - + func testMempoolConfigCreation() { // Test disabled configuration let disabled = MempoolConfig.disabled XCTAssertFalse(disabled.enabled) - + // Test selective configuration let selective = MempoolConfig.selective(maxTransactions: 1000) XCTAssertTrue(selective.enabled) XCTAssertEqual(selective.strategy, .selective) XCTAssertEqual(selective.maxTransactions, 1000) XCTAssertEqual(selective.timeoutSeconds, 3600) - + // Test fetchAll configuration let fetchAll = MempoolConfig.fetchAll(maxTransactions: 5000) XCTAssertTrue(fetchAll.enabled) XCTAssertEqual(fetchAll.strategy, .fetchAll) XCTAssertEqual(fetchAll.maxTransactions, 5000) - + // Test custom configuration let custom = MempoolConfig( enabled: true, @@ -38,14 +38,14 @@ final class MempoolTests: XCTestCase { XCTAssertFalse(custom.fetchTransactions) XCTAssertTrue(custom.persistMempool) } - + func testMempoolBalanceCalculations() { let balance = MempoolBalance(pending: 1000000, pendingInstant: 500000) XCTAssertEqual(balance.pending, 1000000) XCTAssertEqual(balance.pendingInstant, 500000) XCTAssertEqual(balance.total, 1500000) } - + func testMempoolTransactionProperties() { let tx = MempoolTransaction( txid: "abc123", @@ -58,7 +58,7 @@ final class MempoolTests: XCTestCase { netAmount: -50000, size: 250 ) - + XCTAssertEqual(tx.txid, "abc123") XCTAssertEqual(tx.fee, 1000) XCTAssertEqual(tx.size, 250) @@ -67,10 +67,10 @@ final class MempoolTests: XCTestCase { XCTAssertTrue(tx.isOutgoing) XCTAssertFalse(tx.isInstantSend) } - + func testMempoolRemovalReasons() { let reasons: [MempoolRemovalReason] = [.expired, .replaced, .doubleSpent, .confirmed, .manual, .unknown] - + XCTAssertEqual(reasons[0].rawValue, 0) XCTAssertEqual(reasons[1].rawValue, 1) XCTAssertEqual(reasons[2].rawValue, 2) @@ -78,30 +78,30 @@ final class MempoolTests: XCTestCase { XCTAssertEqual(reasons[4].rawValue, 4) XCTAssertEqual(reasons[5].rawValue, 255) } - + func testSPVClientConfigurationWithMempool() async throws { let config = SPVClientConfiguration() config.network = .testnet config.mempoolConfig = .fetchAll(maxTransactions: 1000) - + XCTAssertEqual(config.network, .testnet) XCTAssertTrue(config.mempoolConfig.enabled) XCTAssertEqual(config.mempoolConfig.strategy, .fetchAll) XCTAssertEqual(config.mempoolConfig.maxTransactions, 1000) - + // Test FFI config creation includes mempool settings let ffiConfig = try config.createFFIConfig() defer { dash_spv_ffi_config_destroy(OpaquePointer(ffiConfig)) } - + XCTAssertTrue(dash_spv_ffi_config_get_mempool_tracking(OpaquePointer(ffiConfig))) XCTAssertEqual( dash_spv_ffi_config_get_mempool_strategy(OpaquePointer(ffiConfig)), FFIMempoolStrategy(rawValue: 0) // FetchAll ) } - + func testMempoolEventTypes() { // Test transaction added event let addedTx = MempoolTransaction( @@ -116,21 +116,21 @@ final class MempoolTests: XCTestCase { size: 200 ) let addedEvent = MempoolEvent.transactionAdded(addedTx) - + if case .transactionAdded(let tx) = addedEvent { XCTAssertEqual(tx.txid, "tx1") XCTAssertTrue(tx.isInstantSend) } else { XCTFail("Expected transactionAdded event") } - + // Test transaction confirmed event let confirmedEvent = MempoolEvent.transactionConfirmed( txid: "tx2", blockHeight: 12345, blockHash: "blockhash123" ) - + if case .transactionConfirmed(let txid, let height, let hash) = confirmedEvent { XCTAssertEqual(txid, "tx2") XCTAssertEqual(height, 12345) @@ -138,13 +138,13 @@ final class MempoolTests: XCTestCase { } else { XCTFail("Expected transactionConfirmed event") } - + // Test transaction removed event let removedEvent = MempoolEvent.transactionRemoved( txid: "tx3", reason: .expired ) - + if case .transactionRemoved(let txid, let reason) = removedEvent { XCTAssertEqual(txid, "tx3") XCTAssertEqual(reason, .expired) diff --git a/swift-dash-core-sdk/build-ios.sh b/swift-dash-core-sdk/build-ios.sh index 406c51374..6687cd88a 100755 --- a/swift-dash-core-sdk/build-ios.sh +++ b/swift-dash-core-sdk/build-ios.sh @@ -25,9 +25,9 @@ run_cargo_build() { local target=$1 local package=$2 local description=$3 - + echo -e "${GREEN}Building $description...${NC}" - + # Capture output and error local output if output=$(cargo build --release --target "$target" -p "$package" 2>&1); then diff --git a/swift-dash-core-sdk/build.sh b/swift-dash-core-sdk/build.sh index 87f8f5388..747330d1d 100755 --- a/swift-dash-core-sdk/build.sh +++ b/swift-dash-core-sdk/build.sh @@ -12,7 +12,7 @@ else echo "Building with Swift command line..." echo "Note: SwiftData models require Xcode for full functionality." echo "Command line builds will have limited SwiftData support." - + # First build the Rust FFI library if needed if [ ! -f "../target/release/libdash_spv_ffi.a" ]; then echo "Building Rust FFI library first..." @@ -20,7 +20,7 @@ else cargo build --release -p dash-spv-ffi cd swift-dash-core-sdk fi - + # Build the Swift package swift build fi diff --git a/test-utils/src/fixtures.rs b/test-utils/src/fixtures.rs index 512d8f7eb..03f16a7f9 100644 --- a/test-utils/src/fixtures.rs +++ b/test-utils/src/fixtures.rs @@ -29,7 +29,7 @@ pub fn mainnet_genesis_hash() -> BlockHash { BlockHash::from_slice(&reversed).unwrap() } -/// Get testnet genesis block hash +/// Get testnet genesis block hash pub fn testnet_genesis_hash() -> BlockHash { let bytes = decode(TESTNET_GENESIS_HASH).unwrap(); let mut reversed = [0u8; 32]; From 599e696554d36ea9d887ccd3f63dec7b17f595ad Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 01:03:53 +1000 Subject: [PATCH 04/14] Fix `end-of-file-fixer` checks --- CLAUDE.md | 2 +- LICENSE | 1 - TEST_SUMMARY.md | 2 +- TODOS.md | 2 +- UNIFIED_SDK.md | 2 +- contrib/test-rpc.sh | 1 - dash-network-ffi/Cargo.toml | 1 - dash-network-ffi/README.md | 2 +- dash-network-ffi/src/dash_network_ffiFFI.modulemap | 2 +- dash-network/Cargo.toml | 2 +- dash-network/README.md | 2 +- dash-spv-ffi/CLAUDE.md | 2 +- dash-spv-ffi/FFI_DOCS_README.md | 2 +- dash-spv-ffi/Makefile | 2 +- dash-spv-ffi/examples/basic_usage.c | 2 +- dash-spv-ffi/tests/README.md | 2 +- dash-spv-ffi/tests/c_tests/Makefile | 2 +- dash-spv-ffi/tests/c_tests/test_advanced.c | 2 +- dash-spv-ffi/tests/c_tests/test_basic.c | 2 +- dash-spv-ffi/tests/c_tests/test_configuration.c | 2 +- dash-spv-ffi/tests/c_tests/test_event_draining.c | 2 +- dash-spv-ffi/tests/c_tests/test_integration.c | 2 +- dash-spv-ffi/tests/integration/mod.rs | 2 +- dash-spv-ffi/tests/integration/test_cross_language.rs | 2 +- dash-spv-ffi/tests/performance/mod.rs | 2 +- dash-spv-ffi/tests/security/mod.rs | 2 +- dash-spv-ffi/tests/security/test_security.rs | 2 +- dash-spv/CLAUDE.md | 2 +- dash-spv/README.md | 2 +- dash-spv/docs/utxo_rollback.md | 2 +- dash-spv/run_integration_tests.md | 2 +- dash-spv/src/validation/test_summary.md | 2 +- dash-spv/tests/test_plan.md | 2 +- dash/src/sml/mod.rs | 1 - dash/src/sml/order_option.rs | 1 - docs/implementation-notes/BLOOM_FILTER_SPEC.md | 2 +- docs/implementation-notes/CHAINLOCK_IMPLEMENTATION.md | 2 +- docs/implementation-notes/CHECKPOINT_IMPLEMENTATION.md | 2 +- docs/implementation-notes/IMPLEMENTATION_STATUS.md | 2 +- docs/implementation-notes/MEMPOOL_IMPLEMENTATION_SUMMARY.md | 2 +- docs/implementation-notes/PEER_REPUTATION_SYSTEM.md | 2 +- docs/implementation-notes/REORG_INTEGRATION_STATUS.md | 2 +- docs/implementation-notes/SEQUENTIAL_SYNC_DESIGN.md | 2 +- docs/implementation-notes/SEQUENTIAL_SYNC_SUMMARY.md | 2 +- docs/implementation-notes/WALLET_SPV_INTEGRATION.md | 2 +- fuzz/README.md | 1 - fuzz/contrib/test.sh | 1 - fuzz/cycle.sh | 1 - fuzz/generate-files.sh | 1 - key-wallet-ffi/Cargo.toml | 2 +- key-wallet-ffi/IMPORT_WALLET_FFI.md | 2 +- key-wallet-ffi/Makefile | 2 +- key-wallet-ffi/README.md | 2 +- key-wallet-ffi/build-ios.sh | 2 +- key-wallet-ffi/examples/check_transaction.c | 2 +- key-wallet-ffi/generate_header.sh | 2 +- key-wallet-ffi/src/managed_wallet_tests.rs | 2 +- key-wallet-manager/Cargo.toml | 2 +- key-wallet-manager/README.md | 2 +- key-wallet-manager/SPV_WALLET_GUIDE.md | 2 +- key-wallet-manager/TODO.md | 2 +- key-wallet-manager/missing_tests.md | 2 +- key-wallet/BIP38_TESTS.md | 2 +- key-wallet/CI_TESTING.md | 2 +- key-wallet/CLAUDE.md | 2 +- key-wallet/IMPLEMENTATION_SUMMARY.md | 2 +- key-wallet/README.md | 2 +- key-wallet/src/missing_tests.md | 2 +- key-wallet/src/utxo_integration_summary.md | 2 +- key-wallet/test_bip38.sh | 2 +- key-wallet/test_bip38_advanced.sh | 2 +- rpc-client/LICENSE | 1 - rpc-json/LICENSE | 1 - rpc-json/README.md | 2 +- swift-dash-core-sdk/.gitignore | 2 +- swift-dash-core-sdk/BUILD.md | 2 +- swift-dash-core-sdk/CLAUDE.md | 2 +- swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/CLAUDE.md | 2 +- .../Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift | 2 +- .../DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift | 2 +- .../Examples/DashHDWalletExample/DEMO_SUMMARY.md | 2 +- .../project.xcworkspace/xcshareddata/swiftpm/Package.resolved | 2 +- .../Assets.xcassets/AccentColor.colorset/Contents.json | 2 +- .../Assets.xcassets/AppIcon.appiconset/Contents.json | 2 +- .../DashHDWalletExample/Assets.xcassets/Contents.json | 2 +- .../DashHDWalletExample/DashHDWalletApp.swift | 2 +- .../DashHDWalletExample/Models/HDWalletModels.swift | 2 +- .../DashHDWalletExample/Services/HDWalletService.swift | 2 +- .../DashHDWalletExample/Services/WalletService.swift | 2 +- .../DashHDWalletExample/StandaloneModels.swift | 2 +- .../DashHDWalletExample/TestContentView.swift | 2 +- .../DashHDWalletExample/Utils/Clipboard.swift | 2 +- .../DashHDWalletExample/Utils/ModelContainerHelper.swift | 2 +- .../DashHDWalletExample/Utils/PlatformColor.swift | 2 +- .../DashHDWalletExample/Views/AccountDetailView.swift | 2 +- .../DashHDWalletExample/Views/ContentView.swift | 2 +- .../DashHDWalletExample/Views/CreateAccountView.swift | 2 +- .../DashHDWalletExample/Views/CreateWalletView.swift | 2 +- .../DashHDWalletExample/Views/EnhancedSyncProgressView.swift | 2 +- .../DashHDWalletExample/Views/ReceiveAddressView.swift | 2 +- .../DashHDWalletExample/Views/SendTransactionView.swift | 2 +- .../DashHDWalletExample/Views/SettingsView.swift | 2 +- .../DashHDWalletExample/Views/SyncProgressView.swift | 2 +- .../DashHDWalletExample/Views/WalletDetailView.swift | 2 +- .../DashHDWalletExample/Views/WatchStatusView.swift | 2 +- .../DashHDWalletExampleTests/DashHDWalletExampleTests.swift | 2 +- .../DashHDWalletExampleUITests/DashHDWalletExampleUITests.swift | 2 +- .../Examples/DashHDWalletExample/IOS_APP_SETUP_GUIDE.md | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/Local.xcconfig | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/README.md | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/XCODE_SETUP.md | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/build-phase.sh | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/build-spm.sh | 2 +- .../Examples/DashHDWalletExample/clean-simulator-data.sh | 2 +- .../Examples/DashHDWalletExample/dash_spv_ffi.pc | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh | 2 +- .../Examples/DashHDWalletExample/fix-spm-linking.sh | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/run-spm.sh | 2 +- .../Examples/DashHDWalletExample/select-library.sh | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/setup-env.sh | 2 +- swift-dash-core-sdk/Examples/DashHDWalletExample/setup-spm.sh | 2 +- .../Examples/DashHDWalletExample/test-link.swift | 2 +- .../Examples/DashWalletExample/ContentView.swift | 2 +- .../Examples/DashWalletExample/DashWalletApp.swift | 2 +- .../Examples/DashWalletExample/WalletViewModel.swift | 2 +- swift-dash-core-sdk/IMPLEMENTATION_PLAN.md | 2 +- swift-dash-core-sdk/INTEGRATION_NOTES.md | 2 +- swift-dash-core-sdk/Package.swift | 2 +- swift-dash-core-sdk/README.md | 2 +- swift-dash-core-sdk/Sources/DashSPVFFI/DashSPVFFI.swift | 2 +- swift-dash-core-sdk/Sources/DashSPVFFI/dummy.c | 2 +- .../Sources/DashSPVFFI/include/DashSPVFFIC.modulemap | 2 +- swift-dash-core-sdk/Sources/DashSPVFFI/include/module.modulemap | 2 +- .../Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift | 2 +- .../Sources/SwiftDashCoreSDK/Core/DashSDKError.swift | 2 +- .../Sources/SwiftDashCoreSDK/Core/FFITypes.swift | 2 +- .../Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift | 2 +- swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift | 2 +- .../Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift | 2 +- .../Sources/SwiftDashCoreSDK/Models/Balance.swift | 2 +- .../Sources/SwiftDashCoreSDK/Models/SPVStats.swift | 2 +- .../Sources/SwiftDashCoreSDK/Models/Transaction.swift | 2 +- swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift | 2 +- .../Sources/SwiftDashCoreSDK/Models/ValidationMode.swift | 2 +- .../Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift | 2 +- .../SwiftDashCoreSDK/Storage/PersistentWalletManager.swift | 2 +- .../Sources/SwiftDashCoreSDK/Storage/StorageManager.swift | 2 +- .../Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift | 2 +- .../Sources/SwiftDashCoreSDK/Types/WatchResult.swift | 2 +- .../Sources/SwiftDashCoreSDK/Utils/Extensions.swift | 2 +- .../SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift | 2 +- .../Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift | 2 +- .../Tests/SwiftDashCoreSDKTests/MempoolTests.swift | 2 +- swift-dash-core-sdk/sync-headers.sh | 2 +- test-utils/Cargo.toml | 2 +- test_checksum.rs | 2 +- test_smart_algo.sh | 2 +- 158 files changed, 147 insertions(+), 158 deletions(-) delete mode 100644 dash/src/sml/order_option.rs diff --git a/CLAUDE.md b/CLAUDE.md index c16f0806c..f28640825 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -201,4 +201,4 @@ The API is currently unstable (version 0.x.x). Breaking changes may occur in min - Cannot replicate exact consensus behavior of Dash Core - Not suitable for mining or consensus validation - FFI bindings have limited error propagation -- Some Dash Core RPC methods not yet implemented \ No newline at end of file +- Some Dash Core RPC methods not yet implemented diff --git a/LICENSE b/LICENSE index 6ca207ef0..0e259d42c 100644 --- a/LICENSE +++ b/LICENSE @@ -119,4 +119,3 @@ express Statement of Purpose. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. - diff --git a/TEST_SUMMARY.md b/TEST_SUMMARY.md index 976f0331b..49b4289f1 100644 --- a/TEST_SUMMARY.md +++ b/TEST_SUMMARY.md @@ -109,4 +109,4 @@ I have successfully implemented comprehensive unit tests for several critical da ## Conclusion -The test enhancement effort has significantly improved test coverage for dash-spv, with 163 tests currently passing in critical modules. The bloom filter, validation, and chain modules now have comprehensive test suites that verify functionality, handle edge cases, and ensure thread safety. The remaining work involves fixing API mismatches in client and wallet tests and resolving integration test compilation issues. \ No newline at end of file +The test enhancement effort has significantly improved test coverage for dash-spv, with 163 tests currently passing in critical modules. The bloom filter, validation, and chain modules now have comprehensive test suites that verify functionality, handle edge cases, and ensure thread safety. The remaining work involves fixing API mismatches in client and wallet tests and resolving integration test compilation issues. diff --git a/TODOS.md b/TODOS.md index 574938a7e..187910662 100644 --- a/TODOS.md +++ b/TODOS.md @@ -58,4 +58,4 @@ The architecture assumes dashcore will eventually compile. If dashcore continues - Clean separation of concerns - No circular dependencies - Proper use of existing dashcore types -- Extensible design for future features \ No newline at end of file +- Extensible design for future features diff --git a/UNIFIED_SDK.md b/UNIFIED_SDK.md index 73227eb60..f66ab5ea4 100644 --- a/UNIFIED_SDK.md +++ b/UNIFIED_SDK.md @@ -87,4 +87,4 @@ For detailed technical information about the Unified SDK architecture: - iOS 17.0+ deployment target - Rust 1.70+ - Swift 5.9+ -- Xcode 15.0+ \ No newline at end of file +- Xcode 15.0+ diff --git a/contrib/test-rpc.sh b/contrib/test-rpc.sh index a5a27c6f9..58e93d540 100755 --- a/contrib/test-rpc.sh +++ b/contrib/test-rpc.sh @@ -81,4 +81,3 @@ else cargo test --verbose cargo build --verbose --examples fi - diff --git a/dash-network-ffi/Cargo.toml b/dash-network-ffi/Cargo.toml index 4cab02429..55140ed40 100644 --- a/dash-network-ffi/Cargo.toml +++ b/dash-network-ffi/Cargo.toml @@ -19,4 +19,3 @@ hex = "0.4" [lib] crate-type = ["cdylib", "staticlib"] name = "dash_network_ffi" - diff --git a/dash-network-ffi/README.md b/dash-network-ffi/README.md index c0a4adcca..d948bfc39 100644 --- a/dash-network-ffi/README.md +++ b/dash-network-ffi/README.md @@ -103,4 +103,4 @@ except dash_network_ffi.NetworkError.InvalidMagic: ## License -This project is licensed under the CC0 1.0 Universal license. \ No newline at end of file +This project is licensed under the CC0 1.0 Universal license. diff --git a/dash-network-ffi/src/dash_network_ffiFFI.modulemap b/dash-network-ffi/src/dash_network_ffiFFI.modulemap index 5cb09df73..3af2a90ee 100644 --- a/dash-network-ffi/src/dash_network_ffiFFI.modulemap +++ b/dash-network-ffi/src/dash_network_ffiFFI.modulemap @@ -4,4 +4,4 @@ module dash_network_ffiFFI { use "Darwin" use "_Builtin_stdbool" use "_Builtin_stdint" -} \ No newline at end of file +} diff --git a/dash-network/Cargo.toml b/dash-network/Cargo.toml index 800db300a..e295de185 100644 --- a/dash-network/Cargo.toml +++ b/dash-network/Cargo.toml @@ -27,4 +27,4 @@ bincode = ["dep:bincode", "dep:bincode_derive"] [lib] name = "dash_network" -path = "src/lib.rs" \ No newline at end of file +path = "src/lib.rs" diff --git a/dash-network/README.md b/dash-network/README.md index dc945abd7..ee2eb9979 100644 --- a/dash-network/README.md +++ b/dash-network/README.md @@ -68,4 +68,4 @@ Each network has unique magic bytes used in message headers: ## License -This project is licensed under the CC0 1.0 Universal license. \ No newline at end of file +This project is licensed under the CC0 1.0 Universal license. diff --git a/dash-spv-ffi/CLAUDE.md b/dash-spv-ffi/CLAUDE.md index 8427df5a7..979fa9507 100644 --- a/dash-spv-ffi/CLAUDE.md +++ b/dash-spv-ffi/CLAUDE.md @@ -142,4 +142,4 @@ Key dependencies from Cargo.toml: - `dash-spv` - Core SPV implementation (local path) - `dashcore` - Dash protocol types (local path) - `tokio` - Async runtime -- `cbindgen` - C header generation (build dependency) \ No newline at end of file +- `cbindgen` - C header generation (build dependency) diff --git a/dash-spv-ffi/FFI_DOCS_README.md b/dash-spv-ffi/FFI_DOCS_README.md index 058561b88..c7915f2c1 100644 --- a/dash-spv-ffi/FFI_DOCS_README.md +++ b/dash-spv-ffi/FFI_DOCS_README.md @@ -106,4 +106,4 @@ The documentation verification is integrated into the CI pipeline: - `scripts/generate_ffi_docs.py` - Python script that parses Rust files and generates documentation - `scripts/check_ffi_docs.sh` - Bash script to verify documentation is current - `.github/workflows/verify-ffi-docs.yml` - GitHub Action for CI verification -- `Makefile` - Convenient commands for documentation tasks \ No newline at end of file +- `Makefile` - Convenient commands for documentation tasks diff --git a/dash-spv-ffi/Makefile b/dash-spv-ffi/Makefile index 3c3fb4915..ed855d91e 100644 --- a/dash-spv-ffi/Makefile +++ b/dash-spv-ffi/Makefile @@ -57,4 +57,4 @@ help: @echo " make docs - Generate all documentation" @echo " make ios - Build for iOS platforms" @echo " make full - Full build with documentation" - @echo " make help - Show this help message" \ No newline at end of file + @echo " make help - Show this help message" diff --git a/dash-spv-ffi/examples/basic_usage.c b/dash-spv-ffi/examples/basic_usage.c index bc3d6e0fb..eab322c2e 100644 --- a/dash-spv-ffi/examples/basic_usage.c +++ b/dash-spv-ffi/examples/basic_usage.c @@ -39,4 +39,4 @@ int main() { dash_spv_ffi_config_destroy(config); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/README.md b/dash-spv-ffi/tests/README.md index 0a153d89c..54bc12481 100644 --- a/dash-spv-ffi/tests/README.md +++ b/dash-spv-ffi/tests/README.md @@ -103,4 +103,4 @@ When adding new functionality to dash-spv-ffi: ## Known Limitations -Some tests may fail in environments without network access or when dash-spv services are unavailable. These tests are designed to handle such failures gracefully. \ No newline at end of file +Some tests may fail in environments without network access or when dash-spv services are unavailable. These tests are designed to handle such failures gracefully. diff --git a/dash-spv-ffi/tests/c_tests/Makefile b/dash-spv-ffi/tests/c_tests/Makefile index 1afadf309..eeafd0da5 100644 --- a/dash-spv-ffi/tests/c_tests/Makefile +++ b/dash-spv-ffi/tests/c_tests/Makefile @@ -68,4 +68,4 @@ endif # Full build: Rust library, header, then tests full: rust-lib header all -.PHONY: all test clean header rust-lib full \ No newline at end of file +.PHONY: all test clean header rust-lib full diff --git a/dash-spv-ffi/tests/c_tests/test_advanced.c b/dash-spv-ffi/tests/c_tests/test_advanced.c index 8d0162e78..1813c4c20 100644 --- a/dash-spv-ffi/tests/c_tests/test_advanced.c +++ b/dash-spv-ffi/tests/c_tests/test_advanced.c @@ -340,4 +340,4 @@ int main() { printf("All advanced tests passed!\n"); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/c_tests/test_basic.c b/dash-spv-ffi/tests/c_tests/test_basic.c index a5a285224..ad451e8d4 100644 --- a/dash-spv-ffi/tests/c_tests/test_basic.c +++ b/dash-spv-ffi/tests/c_tests/test_basic.c @@ -301,4 +301,4 @@ int main() { printf("All tests passed!\n"); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/c_tests/test_configuration.c b/dash-spv-ffi/tests/c_tests/test_configuration.c index 54c78a857..5535fd7f5 100644 --- a/dash-spv-ffi/tests/c_tests/test_configuration.c +++ b/dash-spv-ffi/tests/c_tests/test_configuration.c @@ -270,4 +270,4 @@ int main() { printf("\n=== All worker thread configuration tests passed! ===\n"); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/c_tests/test_event_draining.c b/dash-spv-ffi/tests/c_tests/test_event_draining.c index 398610e8c..48e404fe3 100644 --- a/dash-spv-ffi/tests/c_tests/test_event_draining.c +++ b/dash-spv-ffi/tests/c_tests/test_event_draining.c @@ -150,4 +150,4 @@ int main() { printf("\n=== All event draining tests passed! ===\n"); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/c_tests/test_integration.c b/dash-spv-ffi/tests/c_tests/test_integration.c index f42c404f9..7b33cb8e1 100644 --- a/dash-spv-ffi/tests/c_tests/test_integration.c +++ b/dash-spv-ffi/tests/c_tests/test_integration.c @@ -297,4 +297,4 @@ int main() { printf("All integration tests completed!\n"); return 0; -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/integration/mod.rs b/dash-spv-ffi/tests/integration/mod.rs index 71e7ebef4..9b7e281f1 100644 --- a/dash-spv-ffi/tests/integration/mod.rs +++ b/dash-spv-ffi/tests/integration/mod.rs @@ -1,2 +1,2 @@ mod test_full_workflow; -mod test_cross_language; \ No newline at end of file +mod test_cross_language; diff --git a/dash-spv-ffi/tests/integration/test_cross_language.rs b/dash-spv-ffi/tests/integration/test_cross_language.rs index a6a21cf4d..d071fde8c 100644 --- a/dash-spv-ffi/tests/integration/test_cross_language.rs +++ b/dash-spv-ffi/tests/integration/test_cross_language.rs @@ -266,4 +266,4 @@ mod tests { // Verify usize matches pointer size (important for FFI) assert_eq!(std::mem::size_of::(), ptr_size); } -} \ No newline at end of file +} diff --git a/dash-spv-ffi/tests/performance/mod.rs b/dash-spv-ffi/tests/performance/mod.rs index 7b6a4db09..ff2272d79 100644 --- a/dash-spv-ffi/tests/performance/mod.rs +++ b/dash-spv-ffi/tests/performance/mod.rs @@ -1 +1 @@ -mod test_benchmarks; \ No newline at end of file +mod test_benchmarks; diff --git a/dash-spv-ffi/tests/security/mod.rs b/dash-spv-ffi/tests/security/mod.rs index 132aa139f..cac2b1ba0 100644 --- a/dash-spv-ffi/tests/security/mod.rs +++ b/dash-spv-ffi/tests/security/mod.rs @@ -1 +1 @@ -mod test_security; \ No newline at end of file +mod test_security; diff --git a/dash-spv-ffi/tests/security/test_security.rs b/dash-spv-ffi/tests/security/test_security.rs index db0755a8c..824a97c87 100644 --- a/dash-spv-ffi/tests/security/test_security.rs +++ b/dash-spv-ffi/tests/security/test_security.rs @@ -434,4 +434,4 @@ mod tests { dash_spv_ffi_config_destroy(config); } } -} \ No newline at end of file +} diff --git a/dash-spv/CLAUDE.md b/dash-spv/CLAUDE.md index 5a3c112e2..490ea7cae 100644 --- a/dash-spv/CLAUDE.md +++ b/dash-spv/CLAUDE.md @@ -231,4 +231,4 @@ This is a refactored SPV client extracted from a monolithic example: - ⚠️ Some wallet functionality still in development (see `PLAN.md`) - ⚠️ ChainLock/InstantLock signature validation has TODO items -The project transforms a 1,143-line monolithic example into a production-ready, testable library suitable for integration into wallets and other Dash applications. \ No newline at end of file +The project transforms a 1,143-line monolithic example into a production-ready, testable library suitable for integration into wallets and other Dash applications. diff --git a/dash-spv/README.md b/dash-spv/README.md index 2a59f5545..9c7f0f50f 100644 --- a/dash-spv/README.md +++ b/dash-spv/README.md @@ -136,4 +136,4 @@ See [docs/PEER_REPUTATION_SYSTEM.md](docs/PEER_REPUTATION_SYSTEM.md) for detaile 5. **Documentation**: Self-documenting API with comprehensive examples 6. **Performance**: Async design for better resource utilization -This refactoring transforms an example script into a production-ready library suitable for integration into wallets, explorers, and other Dash applications requiring SPV functionality. \ No newline at end of file +This refactoring transforms an example script into a production-ready library suitable for integration into wallets, explorers, and other Dash applications requiring SPV functionality. diff --git a/dash-spv/docs/utxo_rollback.md b/dash-spv/docs/utxo_rollback.md index fb8f964af..de8672f59 100644 --- a/dash-spv/docs/utxo_rollback.md +++ b/dash-spv/docs/utxo_rollback.md @@ -197,4 +197,4 @@ match wallet_state.rollback_to_height(height, &mut storage).await { 1. **State Validation**: Regular consistency checks prevent corruption 2. **Atomic Operations**: All state changes are atomic 3. **Rollback Limits**: Maximum reorg depth prevents deep rollbacks -4. **Chain Locks**: Integration with Dash chain locks for finality \ No newline at end of file +4. **Chain Locks**: Integration with Dash chain locks for finality diff --git a/dash-spv/run_integration_tests.md b/dash-spv/run_integration_tests.md index fc56c798d..0bbfd8a6a 100644 --- a/dash-spv/run_integration_tests.md +++ b/dash-spv/run_integration_tests.md @@ -189,4 +189,4 @@ The integration tests provide comprehensive coverage of: ✅ **Error Handling**: Network timeouts and connection recovery ✅ **Chain Continuity**: Real blockchain linkage and consistency checks -These tests prove the SPV client works correctly with the actual Dash network and can handle real-world data loads and network conditions. \ No newline at end of file +These tests prove the SPV client works correctly with the actual Dash network and can handle real-world data loads and network conditions. diff --git a/dash-spv/src/validation/test_summary.md b/dash-spv/src/validation/test_summary.md index 95bba92e6..172dcf930 100644 --- a/dash-spv/src/validation/test_summary.md +++ b/dash-spv/src/validation/test_summary.md @@ -72,4 +72,4 @@ Run specific test suites: cargo test -p dash-spv --lib headers_test cargo test -p dash-spv --lib headers_edge_test cargo test -p dash-spv --lib manager_test -``` \ No newline at end of file +``` diff --git a/dash-spv/tests/test_plan.md b/dash-spv/tests/test_plan.md index f15563787..a0a73e8a0 100644 --- a/dash-spv/tests/test_plan.md +++ b/dash-spv/tests/test_plan.md @@ -278,4 +278,4 @@ Each test category should achieve: - Some tests may require specific network conditions or test data - Performance tests should be run in isolation to get accurate measurements - Integration tests may take longer to execute due to network operations -- Consider using test containers or mock servers for more controlled testing \ No newline at end of file +- Consider using test containers or mock servers for more controlled testing diff --git a/dash/src/sml/mod.rs b/dash/src/sml/mod.rs index 2374fa944..1486a64f4 100644 --- a/dash/src/sml/mod.rs +++ b/dash/src/sml/mod.rs @@ -6,6 +6,5 @@ pub mod masternode_list; pub mod masternode_list_engine; pub mod masternode_list_entry; mod message_verification_error; -mod order_option; pub mod quorum_entry; pub mod quorum_validation_error; diff --git a/dash/src/sml/order_option.rs b/dash/src/sml/order_option.rs deleted file mode 100644 index 8b1378917..000000000 --- a/dash/src/sml/order_option.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/docs/implementation-notes/BLOOM_FILTER_SPEC.md b/docs/implementation-notes/BLOOM_FILTER_SPEC.md index f239551e4..5eae0fd0e 100644 --- a/docs/implementation-notes/BLOOM_FILTER_SPEC.md +++ b/docs/implementation-notes/BLOOM_FILTER_SPEC.md @@ -723,4 +723,4 @@ This specification provides a complete blueprint for implementing BIP37 bloom fi - Flexibility in privacy/efficiency trade-offs - Robust error handling and security -The modular design allows gradual rollout and easy testing of each component independently. \ No newline at end of file +The modular design allows gradual rollout and easy testing of each component independently. diff --git a/docs/implementation-notes/CHAINLOCK_IMPLEMENTATION.md b/docs/implementation-notes/CHAINLOCK_IMPLEMENTATION.md index 079c149d7..8513fa076 100644 --- a/docs/implementation-notes/CHAINLOCK_IMPLEMENTATION.md +++ b/docs/implementation-notes/CHAINLOCK_IMPLEMENTATION.md @@ -104,4 +104,4 @@ Chain lock enforcement can be configured when creating the ChainLockManager: - [DIP8: ChainLocks](https://github.com/dashpay/dips/blob/master/dip-0008.md) - [Dash Core Implementation](https://github.com/dashpay/dash/pull/2643) -- [Long Living Masternode Quorums](https://www.dash.org/blog/long-living-masternode-quorums/) \ No newline at end of file +- [Long Living Masternode Quorums](https://www.dash.org/blog/long-living-masternode-quorums/) diff --git a/docs/implementation-notes/CHECKPOINT_IMPLEMENTATION.md b/docs/implementation-notes/CHECKPOINT_IMPLEMENTATION.md index 98d8a045b..b13bd553e 100644 --- a/docs/implementation-notes/CHECKPOINT_IMPLEMENTATION.md +++ b/docs/implementation-notes/CHECKPOINT_IMPLEMENTATION.md @@ -69,4 +69,4 @@ let sync_manager = HeaderSyncManagerWithReorg::new(&config, reorg_config); - Add more checkpoints for recent blocks - Implement checkpoint-based fast sync - Add checkpoint consensus rules for different protocol versions -- Support for downloading checkpoint data from trusted sources \ No newline at end of file +- Support for downloading checkpoint data from trusted sources diff --git a/docs/implementation-notes/IMPLEMENTATION_STATUS.md b/docs/implementation-notes/IMPLEMENTATION_STATUS.md index e61918cad..6eadac036 100644 --- a/docs/implementation-notes/IMPLEMENTATION_STATUS.md +++ b/docs/implementation-notes/IMPLEMENTATION_STATUS.md @@ -138,4 +138,4 @@ pub struct SyncManager { ## Conclusion -While significant progress has been made with reorg handling and checkpoints, **dash-spv is NOT production-ready**. The implemented features are not fully integrated, and critical security features like chain locks are missing. The library remains vulnerable to several attack vectors that the iOS implementation protects against. \ No newline at end of file +While significant progress has been made with reorg handling and checkpoints, **dash-spv is NOT production-ready**. The implemented features are not fully integrated, and critical security features like chain locks are missing. The library remains vulnerable to several attack vectors that the iOS implementation protects against. diff --git a/docs/implementation-notes/MEMPOOL_IMPLEMENTATION_SUMMARY.md b/docs/implementation-notes/MEMPOOL_IMPLEMENTATION_SUMMARY.md index 05e45e782..77daadc75 100644 --- a/docs/implementation-notes/MEMPOOL_IMPLEMENTATION_SUMMARY.md +++ b/docs/implementation-notes/MEMPOOL_IMPLEMENTATION_SUMMARY.md @@ -146,4 +146,4 @@ Existing users need no changes - mempool tracking is opt-in. To enable: - Selective strategy reveals minimal information - Bloom filters have known privacy weaknesses - FetchAll strategy reveals interest in all transactions -- No private keys or sensitive data in mempool storage \ No newline at end of file +- No private keys or sensitive data in mempool storage diff --git a/docs/implementation-notes/PEER_REPUTATION_SYSTEM.md b/docs/implementation-notes/PEER_REPUTATION_SYSTEM.md index dd41a4a02..c149a8c6d 100644 --- a/docs/implementation-notes/PEER_REPUTATION_SYSTEM.md +++ b/docs/implementation-notes/PEER_REPUTATION_SYSTEM.md @@ -242,4 +242,4 @@ The reputation system logs important events: Enable detailed logging with: ```bash RUST_LOG=dash_spv::network::reputation=debug cargo run -``` \ No newline at end of file +``` diff --git a/docs/implementation-notes/REORG_INTEGRATION_STATUS.md b/docs/implementation-notes/REORG_INTEGRATION_STATUS.md index 4004029f5..0ce3b6c81 100644 --- a/docs/implementation-notes/REORG_INTEGRATION_STATUS.md +++ b/docs/implementation-notes/REORG_INTEGRATION_STATUS.md @@ -62,4 +62,4 @@ While reorg handling and checkpoints are now fully integrated, several critical 3. **Peer Reputation** - No protection against malicious peers 4. **UTXO Rollback** - Wallet state not updated during reorgs -The library is now significantly more secure against reorganization attacks, but still requires the remaining features for production use. \ No newline at end of file +The library is now significantly more secure against reorganization attacks, but still requires the remaining features for production use. diff --git a/docs/implementation-notes/SEQUENTIAL_SYNC_DESIGN.md b/docs/implementation-notes/SEQUENTIAL_SYNC_DESIGN.md index 38acb9e6c..deb39fb35 100644 --- a/docs/implementation-notes/SEQUENTIAL_SYNC_DESIGN.md +++ b/docs/implementation-notes/SEQUENTIAL_SYNC_DESIGN.md @@ -437,4 +437,4 @@ async fn test_headers_must_complete_before_cfheaders() { 2. **Reliability**: No race conditions or dependency issues 3. **Debuggability**: Phase transitions clearly logged 4. **Performance**: Better request batching within phases -5. **Maintainability**: Easier to reason about and extend \ No newline at end of file +5. **Maintainability**: Easier to reason about and extend diff --git a/docs/implementation-notes/SEQUENTIAL_SYNC_SUMMARY.md b/docs/implementation-notes/SEQUENTIAL_SYNC_SUMMARY.md index cbd11a1b2..fb76634c4 100644 --- a/docs/implementation-notes/SEQUENTIAL_SYNC_SUMMARY.md +++ b/docs/implementation-notes/SEQUENTIAL_SYNC_SUMMARY.md @@ -177,4 +177,4 @@ println!("Current phase: {}", progress.current_phase); 4. **Optimization**: Fine-tune batch sizes and timeouts per phase 5. **Documentation**: Update API docs and examples -The sequential sync implementation provides a solid foundation for reliable, predictable synchronization in dash-spv. \ No newline at end of file +The sequential sync implementation provides a solid foundation for reliable, predictable synchronization in dash-spv. diff --git a/docs/implementation-notes/WALLET_SPV_INTEGRATION.md b/docs/implementation-notes/WALLET_SPV_INTEGRATION.md index b1b10b4a6..14f85cb2a 100644 --- a/docs/implementation-notes/WALLET_SPV_INTEGRATION.md +++ b/docs/implementation-notes/WALLET_SPV_INTEGRATION.md @@ -75,4 +75,4 @@ To test the integration: 2. Verify it's watched via SPV client logs 3. Send funds to the address 4. Verify balance updates are received -5. Restart app and verify addresses are re-watched \ No newline at end of file +5. Restart app and verify addresses are re-watched diff --git a/fuzz/README.md b/fuzz/README.md index 0da44365d..47d4d2581 100644 --- a/fuzz/README.md +++ b/fuzz/README.md @@ -90,4 +90,3 @@ hex output into the call to `extend_vec_from_hex`. Then run the test with It is important to add the `cfg=fuzzing` flag, which tells rustc to compile the library as though it were running a fuzztest. In particular, this will disable or weaken all the cryptography. - diff --git a/fuzz/contrib/test.sh b/fuzz/contrib/test.sh index fd34b23cb..edce7845b 100755 --- a/fuzz/contrib/test.sh +++ b/fuzz/contrib/test.sh @@ -41,4 +41,3 @@ if [ "$DO_ASAN" = true ]; then RUSTFLAGS='-Zsanitizer=memory -Zsanitizer-memory-track-origins -Cforce-frame-pointers=yes' \ cargo test --lib --no-default-features --features="$FEATURES" -Zbuild-std --target x86_64-unknown-linux-gnu fi - diff --git a/fuzz/cycle.sh b/fuzz/cycle.sh index 0b59827a1..294f32b0d 100755 --- a/fuzz/cycle.sh +++ b/fuzz/cycle.sh @@ -22,4 +22,3 @@ do HFUZZ_RUN_ARGS="-i hfuzz_workspace/$targetName/input/ -P -M" chrt -i 0 cargo hfuzz run "$targetName" done done - diff --git a/fuzz/generate-files.sh b/fuzz/generate-files.sh index dbd2498dc..f67b565fd 100755 --- a/fuzz/generate-files.sh +++ b/fuzz/generate-files.sh @@ -97,4 +97,3 @@ $(for name in $(listTargetNames); do echo "$name,"; done) - run: find executed_* -type f -exec cat {} + | sort > executed - run: source ./fuzz/fuzz-util.sh && listTargetNames | sort | diff - executed EOF - diff --git a/key-wallet-ffi/Cargo.toml b/key-wallet-ffi/Cargo.toml index 9a99bdf38..5148da999 100644 --- a/key-wallet-ffi/Cargo.toml +++ b/key-wallet-ffi/Cargo.toml @@ -34,4 +34,4 @@ cbindgen = "0.29" [dev-dependencies] tempfile = "3.0" -hex = "0.4" \ No newline at end of file +hex = "0.4" diff --git a/key-wallet-ffi/IMPORT_WALLET_FFI.md b/key-wallet-ffi/IMPORT_WALLET_FFI.md index c3fa16b11..5f472080c 100644 --- a/key-wallet-ffi/IMPORT_WALLET_FFI.md +++ b/key-wallet-ffi/IMPORT_WALLET_FFI.md @@ -101,4 +101,4 @@ The wallet bytes must be in bincode format (version 2.0.0-rc.3). The serializati ## Thread Safety -The wallet manager uses internal locking, so this function is thread-safe with respect to other wallet manager operations on the same instance. \ No newline at end of file +The wallet manager uses internal locking, so this function is thread-safe with respect to other wallet manager operations on the same instance. diff --git a/key-wallet-ffi/Makefile b/key-wallet-ffi/Makefile index 112b4c298..c756b8171 100644 --- a/key-wallet-ffi/Makefile +++ b/key-wallet-ffi/Makefile @@ -55,4 +55,4 @@ help: @echo " make docs - Generate all documentation" @echo " make ios - Build for iOS platforms" @echo " make full - Full build with documentation" - @echo " make help - Show this help message" \ No newline at end of file + @echo " make help - Show this help message" diff --git a/key-wallet-ffi/README.md b/key-wallet-ffi/README.md index 45c90230a..f75251245 100644 --- a/key-wallet-ffi/README.md +++ b/key-wallet-ffi/README.md @@ -143,4 +143,4 @@ All exposed types are `Send + Sync` and wrapped in `Arc` for thread-safe referen ## License -This project is licensed under the CC0 1.0 Universal license. \ No newline at end of file +This project is licensed under the CC0 1.0 Universal license. diff --git a/key-wallet-ffi/build-ios.sh b/key-wallet-ffi/build-ios.sh index d61556714..c296a5c75 100755 --- a/key-wallet-ffi/build-ios.sh +++ b/key-wallet-ffi/build-ios.sh @@ -30,4 +30,4 @@ cp target/aarch64-apple-ios/release/libkey_wallet_ffi.a target/universal/release echo "Build complete!" echo "Libraries available at:" echo " - Device: target/universal/release/libkey_wallet_ffi_device.a" -echo " - Simulator: target/universal/release/libkey_wallet_ffi_sim.a" \ No newline at end of file +echo " - Simulator: target/universal/release/libkey_wallet_ffi_sim.a" diff --git a/key-wallet-ffi/examples/check_transaction.c b/key-wallet-ffi/examples/check_transaction.c index 3bc2c2a55..44b77a4f6 100644 --- a/key-wallet-ffi/examples/check_transaction.c +++ b/key-wallet-ffi/examples/check_transaction.c @@ -128,4 +128,4 @@ int main() { wallet_free(wallet); return 0; -} \ No newline at end of file +} diff --git a/key-wallet-ffi/generate_header.sh b/key-wallet-ffi/generate_header.sh index 8ffaff698..a3ea6a365 100755 --- a/key-wallet-ffi/generate_header.sh +++ b/key-wallet-ffi/generate_header.sh @@ -64,4 +64,4 @@ else rm -f /tmp/test_header.c fi -echo -e "${GREEN}Done!${NC}" \ No newline at end of file +echo -e "${GREEN}Done!${NC}" diff --git a/key-wallet-ffi/src/managed_wallet_tests.rs b/key-wallet-ffi/src/managed_wallet_tests.rs index 6aaf87117..d027f15ac 100644 --- a/key-wallet-ffi/src/managed_wallet_tests.rs +++ b/key-wallet-ffi/src/managed_wallet_tests.rs @@ -436,4 +436,4 @@ mod tests { unsafe {wallet::wallet_free(wallet);} } } -} \ No newline at end of file +} diff --git a/key-wallet-manager/Cargo.toml b/key-wallet-manager/Cargo.toml index 1293fed1f..6c47aacb2 100644 --- a/key-wallet-manager/Cargo.toml +++ b/key-wallet-manager/Cargo.toml @@ -31,4 +31,4 @@ serde_json = "1.0" tokio = { version = "1.32", features = ["full"] } [lints.rust] -unexpected_cfgs = { level = "allow", check-cfg = ['cfg(bench)', 'cfg(fuzzing)'] } \ No newline at end of file +unexpected_cfgs = { level = "allow", check-cfg = ['cfg(bench)', 'cfg(fuzzing)'] } diff --git a/key-wallet-manager/README.md b/key-wallet-manager/README.md index 431c58afe..94f5a3e67 100644 --- a/key-wallet-manager/README.md +++ b/key-wallet-manager/README.md @@ -475,4 +475,4 @@ This project is licensed under CC0-1.0 - see the [LICENSE](../LICENSE) file for --- -Built with ❤️ for the Dash ecosystem \ No newline at end of file +Built with ❤️ for the Dash ecosystem diff --git a/key-wallet-manager/SPV_WALLET_GUIDE.md b/key-wallet-manager/SPV_WALLET_GUIDE.md index 3165bd859..ebe5eddce 100644 --- a/key-wallet-manager/SPV_WALLET_GUIDE.md +++ b/key-wallet-manager/SPV_WALLET_GUIDE.md @@ -228,4 +228,4 @@ cargo test -p key-wallet-manager - [BIP 157: Client Side Block Filtering](https://github.com/bitcoin/bips/blob/master/bip-0157.mediawiki) - [BIP 158: Compact Block Filters](https://github.com/bitcoin/bips/blob/master/bip-0158.mediawiki) -- [Neutrino Protocol](https://github.com/lightninglabs/neutrino) \ No newline at end of file +- [Neutrino Protocol](https://github.com/lightninglabs/neutrino) diff --git a/key-wallet-manager/TODO.md b/key-wallet-manager/TODO.md index 95dd96287..ad96c2d0b 100644 --- a/key-wallet-manager/TODO.md +++ b/key-wallet-manager/TODO.md @@ -185,4 +185,4 @@ struct AccountManager { - The enhanced_wallet_manager partially reimplements functionality to work around the ManagedAccount issues - The filter_client is complete but needs network integration -- Consider whether to maintain both wallet_manager and enhanced_wallet_manager or merge them \ No newline at end of file +- Consider whether to maintain both wallet_manager and enhanced_wallet_manager or merge them diff --git a/key-wallet-manager/missing_tests.md b/key-wallet-manager/missing_tests.md index 171157a2c..0228d1db8 100644 --- a/key-wallet-manager/missing_tests.md +++ b/key-wallet-manager/missing_tests.md @@ -148,4 +148,4 @@ 1. **High Priority**: Multi-wallet management, transaction building, UTXO management 2. **Medium Priority**: Coin selection, fee calculation, watch-only wallets -3. **Low Priority**: Performance tests, edge cases, persistence tests \ No newline at end of file +3. **Low Priority**: Performance tests, edge cases, persistence tests diff --git a/key-wallet/BIP38_TESTS.md b/key-wallet/BIP38_TESTS.md index 545087879..81710afe0 100644 --- a/key-wallet/BIP38_TESTS.md +++ b/key-wallet/BIP38_TESTS.md @@ -126,4 +126,4 @@ fn test_new_bip38_feature() { } ``` -This ensures they don't slow down regular test runs while remaining available for comprehensive testing. \ No newline at end of file +This ensures they don't slow down regular test runs while remaining available for comprehensive testing. diff --git a/key-wallet/CI_TESTING.md b/key-wallet/CI_TESTING.md index adb3eabf3..745b0a392 100644 --- a/key-wallet/CI_TESTING.md +++ b/key-wallet/CI_TESTING.md @@ -42,4 +42,4 @@ cargo test -p key-wallet To simulate CI and skip BIP38 tests locally: ```bash RUSTFLAGS="--cfg ci" cargo test -p key-wallet -``` \ No newline at end of file +``` diff --git a/key-wallet/CLAUDE.md b/key-wallet/CLAUDE.md index 627b17a9a..b46171466 100644 --- a/key-wallet/CLAUDE.md +++ b/key-wallet/CLAUDE.md @@ -432,4 +432,4 @@ The crate uses a custom `Error` type with specific variants: - Compatible with Dash Core: 0.18.0 - 0.21.0 - Follows semantic versioning (currently 0.x.x = unstable API) -Remember: This crate is security-critical. Always prioritize correctness over performance, and never compromise on key material safety. \ No newline at end of file +Remember: This crate is security-critical. Always prioritize correctness over performance, and never compromise on key material safety. diff --git a/key-wallet/IMPLEMENTATION_SUMMARY.md b/key-wallet/IMPLEMENTATION_SUMMARY.md index 14907b391..5f7d86278 100644 --- a/key-wallet/IMPLEMENTATION_SUMMARY.md +++ b/key-wallet/IMPLEMENTATION_SUMMARY.md @@ -201,4 +201,4 @@ This implementation follows Dash Core licensing (CC0-1.0). ## Status -The key-wallet library is now feature-complete for basic HD wallet functionality with comprehensive account management, address generation, gap limit tracking, and transaction creation. All modules compile successfully and include unit tests. \ No newline at end of file +The key-wallet library is now feature-complete for basic HD wallet functionality with comprehensive account management, address generation, gap limit tracking, and transaction creation. All modules compile successfully and include unit tests. diff --git a/key-wallet/README.md b/key-wallet/README.md index c07545567..dd774d383 100644 --- a/key-wallet/README.md +++ b/key-wallet/README.md @@ -265,4 +265,4 @@ Contributions are welcome! Please ensure: ## License -This project is licensed under the CC0 1.0 Universal license. \ No newline at end of file +This project is licensed under the CC0 1.0 Universal license. diff --git a/key-wallet/src/missing_tests.md b/key-wallet/src/missing_tests.md index f71d5d043..402922b67 100644 --- a/key-wallet/src/missing_tests.md +++ b/key-wallet/src/missing_tests.md @@ -134,4 +134,4 @@ High-level tests involving: - Multi-wallet operations - Balance tracking -Have been moved to `key-wallet-manager/missing_tests.md` \ No newline at end of file +Have been moved to `key-wallet-manager/missing_tests.md` diff --git a/key-wallet/src/utxo_integration_summary.md b/key-wallet/src/utxo_integration_summary.md index b6947117e..702079213 100644 --- a/key-wallet/src/utxo_integration_summary.md +++ b/key-wallet/src/utxo_integration_summary.md @@ -149,4 +149,4 @@ use dash_spv::wallet::UTXORollbackManager; 2. Fee calculation accuracy 3. Change output generation 4. Transaction signing -5. Edge cases (dust outputs, insufficient funds) \ No newline at end of file +5. Edge cases (dust outputs, insufficient funds) diff --git a/key-wallet/test_bip38.sh b/key-wallet/test_bip38.sh index c3e0eaab3..597e808e7 100755 --- a/key-wallet/test_bip38.sh +++ b/key-wallet/test_bip38.sh @@ -74,4 +74,4 @@ if [ "$ALL_PASSED" = true ]; then else echo -e "${RED}Some BIP38 tests failed. Please review the output above.${NC}" exit 1 -fi \ No newline at end of file +fi diff --git a/key-wallet/test_bip38_advanced.sh b/key-wallet/test_bip38_advanced.sh index 580306c8e..c62fc10d9 100755 --- a/key-wallet/test_bip38_advanced.sh +++ b/key-wallet/test_bip38_advanced.sh @@ -252,4 +252,4 @@ if [ $FAILED_TESTS -eq 0 ]; then else echo -e "${RED}Some tests failed. Please review the output above.${NC}" exit 1 -fi \ No newline at end of file +fi diff --git a/rpc-client/LICENSE b/rpc-client/LICENSE index 6ca207ef0..0e259d42c 100644 --- a/rpc-client/LICENSE +++ b/rpc-client/LICENSE @@ -119,4 +119,3 @@ express Statement of Purpose. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. - diff --git a/rpc-json/LICENSE b/rpc-json/LICENSE index 6ca207ef0..0e259d42c 100644 --- a/rpc-json/LICENSE +++ b/rpc-json/LICENSE @@ -119,4 +119,3 @@ express Statement of Purpose. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. - diff --git a/rpc-json/README.md b/rpc-json/README.md index 1a4cbd3da..81222b064 100644 --- a/rpc-json/README.md +++ b/rpc-json/README.md @@ -5,4 +5,4 @@ A collection of JSON-enabled data types used in the `dashcore-rpc` crate. # License -All code is licensed using the CC0 license, as per the LICENSE file. \ No newline at end of file +All code is licensed using the CC0 license, as per the LICENSE file. diff --git a/swift-dash-core-sdk/.gitignore b/swift-dash-core-sdk/.gitignore index 1ffbcfc41..0babbeade 100644 --- a/swift-dash-core-sdk/.gitignore +++ b/swift-dash-core-sdk/.gitignore @@ -97,4 +97,4 @@ iOSInjectionProject/ *.so # Generated headers (if not checked in) -# dash_spv_ffi.h \ No newline at end of file +# dash_spv_ffi.h diff --git a/swift-dash-core-sdk/BUILD.md b/swift-dash-core-sdk/BUILD.md index 11d1199a7..1a5d43738 100644 --- a/swift-dash-core-sdk/BUILD.md +++ b/swift-dash-core-sdk/BUILD.md @@ -224,4 +224,4 @@ For automated builds: cd swift-dash-core-sdk swift build swift test -``` \ No newline at end of file +``` diff --git a/swift-dash-core-sdk/CLAUDE.md b/swift-dash-core-sdk/CLAUDE.md index bf8cf7b64..4113528f8 100644 --- a/swift-dash-core-sdk/CLAUDE.md +++ b/swift-dash-core-sdk/CLAUDE.md @@ -185,4 +185,4 @@ This SDK is part of the larger rust-dashcore project: - Depends on `dash-spv-ffi` for core functionality - Uses `key-wallet-ffi` for HD wallet features - Follows same versioning scheme -- Shares git history and CI/CD pipeline \ No newline at end of file +- Shares git history and CI/CD pipeline diff --git a/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift b/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift index 7c51bc0f6..60da3e8cb 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletApp_Template.swift @@ -44,4 +44,4 @@ struct DashHDWalletApp: App { } } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLAUDE.md b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLAUDE.md index fc590e01e..a9bd2c6ea 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLAUDE.md +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLAUDE.md @@ -171,4 +171,4 @@ Default networks configured in `SPVClientConfiguration`: - Testnet: For development (default) - Devnet/Regtest: Local testing -Peers are hardcoded in configuration - no DNS seeds in example app. \ No newline at end of file +Peers are hardcoded in configuration - no DNS seeds in example app. diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift index ed5beef36..878bce79e 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/CLIDemo.swift @@ -156,4 +156,4 @@ extension String { static func * (left: String, right: Int) -> String { return String(repeating: left, count: right) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift index 2e6aa8e68..855ec46f1 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/CLIDemos/SimpleHDWalletDemo.swift @@ -282,4 +282,4 @@ struct DashHDWalletDemoApp: App { } // Run the app -DashHDWalletDemoApp.main() \ No newline at end of file +DashHDWalletDemoApp.main() diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DEMO_SUMMARY.md b/swift-dash-core-sdk/Examples/DashHDWalletExample/DEMO_SUMMARY.md index d0ca0b7f9..6eae6f766 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DEMO_SUMMARY.md +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DEMO_SUMMARY.md @@ -126,4 +126,4 @@ The full app requires: 3. **User Experience**: Intuitive flow for wallet creation, sync, and transactions 4. **Extensibility**: Easy to add features like hardware wallet support, multi-sig, etc. -The example provides a solid foundation for building a production Dash wallet application with HD wallet support, demonstrating all core features requested including multiple wallets, BIP44 accounts, sync progress tracking, and a complete user interface. \ No newline at end of file +The example provides a solid foundation for building a production Dash wallet application with HD wallet support, demonstrating all core features requested including multiple wallets, BIP44 accounts, sync progress tracking, and a complete user interface. diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index d8a170f16..25d93f220 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -2,4 +2,4 @@ "pins" : [ ], "version" : 2 -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AccentColor.colorset/Contents.json b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AccentColor.colorset/Contents.json index ee7e3ca03..eb8789700 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AccentColor.colorset/Contents.json +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AccentColor.colorset/Contents.json @@ -8,4 +8,4 @@ "author" : "xcode", "version" : 1 } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AppIcon.appiconset/Contents.json b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AppIcon.appiconset/Contents.json index dc70b5401..13613e3ee 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AppIcon.appiconset/Contents.json +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -10,4 +10,4 @@ "author" : "xcode", "version" : 1 } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/Contents.json b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/Contents.json index 4aa7c5350..73c00596a 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/Contents.json +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Assets.xcassets/Contents.json @@ -3,4 +3,4 @@ "author" : "xcode", "version" : 1 } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift index 64abf9b23..6f58b514f 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/DashHDWalletApp.swift @@ -38,4 +38,4 @@ struct DashHDWalletApp: App { .windowResizability(.contentSize) #endif } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift index d38624e24..313b42e16 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Models/HDWalletModels.swift @@ -226,4 +226,4 @@ final class SyncState { self.estimatedCompletion = Date().addingTimeInterval(eta) } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift index 3e78e0423..bdcca09ac 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/HDWalletService.swift @@ -428,4 +428,4 @@ class KeyWalletBridge { return nil } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift index 38407ec9c..76b8569c6 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Services/WalletService.swift @@ -1125,4 +1125,4 @@ enum WalletError: LocalizedError { return "Failed to decrypt wallet" } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift index d89090997..f3b74185a 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/StandaloneModels.swift @@ -31,4 +31,4 @@ public enum BIP44 { } // Note: This helper requires DashNetwork from SwiftDashCoreSDK -// Make sure to import SwiftDashCoreSDK where this is used \ No newline at end of file +// Make sure to import SwiftDashCoreSDK where this is used diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift index d843cd9bf..efa8f375d 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/TestContentView.swift @@ -15,4 +15,4 @@ struct TestContentView: View { } .padding() } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift index 1f49e7baa..a9e1fe077 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/Clipboard.swift @@ -52,4 +52,4 @@ struct CopyButton: View { .buttonStyle(.bordered) #endif } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift index 36cd43ca3..06ef7d79a 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/ModelContainerHelper.swift @@ -206,4 +206,4 @@ private struct MigrationWallet: Codable { let encryptedSeed: Data let seedHash: String let createdAt: Date -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift index 7377bf713..c9e6ebaa3 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Utils/PlatformColor.swift @@ -86,4 +86,4 @@ struct PlatformColor { return Color(NSColor.windowBackgroundColor) #endif } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift index 8de19f6c6..70f34b687 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/AccountDetailView.swift @@ -554,4 +554,4 @@ struct MempoolStatusView: View { .background(Color.purple.opacity(0.1)) .cornerRadius(8) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift index 6072ca5c9..cd837cf3c 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ContentView.swift @@ -253,4 +253,4 @@ struct EmptyWalletView: View { } .frame(maxWidth: .infinity, maxHeight: .infinity) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift index c5d20ab6b..2e581ee5d 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateAccountView.swift @@ -122,4 +122,4 @@ struct CreateAccountView: View { isCreating = false } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift index f4da70a54..9bed04d02 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/CreateWalletView.swift @@ -456,4 +456,4 @@ struct ImportWalletView: View { isImporting = false } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift index 2bcb62a4e..727017a4e 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/EnhancedSyncProgressView.swift @@ -446,4 +446,4 @@ struct EnhancedSyncProgressView_Previews: PreviewProvider { EnhancedSyncProgressView() .environmentObject(WalletService.shared) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift index 91988f8fc..9d66ea8e2 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/ReceiveAddressView.swift @@ -193,4 +193,4 @@ struct QRCodeView: View { #endif } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift index d29c2c2ff..c8f1a13d7 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SendTransactionView.swift @@ -252,4 +252,4 @@ struct SendTransactionView: View { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift index c86944f0a..3c05a3577 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SettingsView.swift @@ -107,4 +107,4 @@ struct SettingsView: View { SwiftDashCoreSDK.WatchedAddress.self, SyncState.self ]) -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift index 313a35e67..ad1f26030 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/SyncProgressView.swift @@ -278,4 +278,4 @@ struct StatItemView: View { .fontWeight(.medium) } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift index 3d370c7bf..053e23bd3 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WalletDetailView.swift @@ -360,4 +360,4 @@ struct ConnectionStatusView: View { return "Disconnected" } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift index 252c86766..e42053d76 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExample/Views/WatchStatusView.swift @@ -96,4 +96,4 @@ struct WatchErrorsView: View { ) } .padding() -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleTests/DashHDWalletExampleTests.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleTests/DashHDWalletExampleTests.swift index dc31f00f9..9d7731244 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleTests/DashHDWalletExampleTests.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleTests/DashHDWalletExampleTests.swift @@ -6,4 +6,4 @@ final class DashHDWalletExampleTests: XCTestCase { // This is an example of a functional test case. XCTAssertTrue(true) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleUITests/DashHDWalletExampleUITests.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleUITests/DashHDWalletExampleUITests.swift index 480425720..bf2408409 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleUITests/DashHDWalletExampleUITests.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/DashHDWalletExampleUITests/DashHDWalletExampleUITests.swift @@ -9,4 +9,4 @@ final class DashHDWalletExampleUITests: XCTestCase { } } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/IOS_APP_SETUP_GUIDE.md b/swift-dash-core-sdk/Examples/DashHDWalletExample/IOS_APP_SETUP_GUIDE.md index 1141f45bc..d0f8c0987 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/IOS_APP_SETUP_GUIDE.md +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/IOS_APP_SETUP_GUIDE.md @@ -333,4 +333,4 @@ If you encounter issues not covered here: 1. Check the build logs in Xcode's Report Navigator 2. Verify all prerequisites are installed correctly 3. Ensure FFI libraries are built for the correct target -4. Check the main project's CLAUDE.md for additional context \ No newline at end of file +4. Check the main project's CLAUDE.md for additional context diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/Local.xcconfig b/swift-dash-core-sdk/Examples/DashHDWalletExample/Local.xcconfig index bb7c7b216..3f0d3d8f0 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/Local.xcconfig +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/Local.xcconfig @@ -1,4 +1,4 @@ // Local.xcconfig - Local development configuration LIBRARY_SEARCH_PATHS = $(inherited) $(PROJECT_DIR)/DashHDWalletExample OTHER_LDFLAGS = $(inherited) -L$(PROJECT_DIR)/DashHDWalletExample -ldash_spv_ffi -SWIFT_INCLUDE_PATHS = $(inherited) /Users/quantum/src/rust-dashcore/swift-dash-core-sdk/Sources/DashSPVFFI/include \ No newline at end of file +SWIFT_INCLUDE_PATHS = $(inherited) /Users/quantum/src/rust-dashcore/swift-dash-core-sdk/Sources/DashSPVFFI/include diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/README.md b/swift-dash-core-sdk/Examples/DashHDWalletExample/README.md index 508332ce3..4b1ab208e 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/README.md +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/README.md @@ -205,4 +205,4 @@ swift build --product DashHDWalletExample -Xlinker -L$(pwd)/Examples/DashHDWalle ``` ### Building for macOS -The example app builds for both platforms by default. The UI automatically adapts based on the target platform using Swift's conditional compilation. \ No newline at end of file +The example app builds for both platforms by default. The UI automatically adapts based on the target platform using Swift's conditional compilation. diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/XCODE_SETUP.md b/swift-dash-core-sdk/Examples/DashHDWalletExample/XCODE_SETUP.md index 2ca76b79b..06b541e78 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/XCODE_SETUP.md +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/XCODE_SETUP.md @@ -55,4 +55,4 @@ DashHDWalletExample/ │ └── Assets.xcassets/ # App resources ├── DashHDWalletExampleTests/ # Unit tests └── DashHDWalletExampleUITests/ # UI tests -``` \ No newline at end of file +``` diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/build-phase.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/build-phase.sh index dfb6a0456..e24ea6376 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/build-phase.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/build-phase.sh @@ -47,4 +47,4 @@ cd "$SRCROOT/../.." echo "Running build-ios.sh..." ./build-ios.sh -echo "Build phase completed successfully" \ No newline at end of file +echo "Build phase completed successfully" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/build-spm.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/build-spm.sh index ea0d759b7..bbfd5baa2 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/build-spm.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/build-spm.sh @@ -19,4 +19,4 @@ if [ $? -eq 0 ]; then else echo "❌ Build failed!" exit 1 -fi \ No newline at end of file +fi diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh index f14771aef..24c730c2f 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/clean-simulator-data.sh @@ -22,4 +22,4 @@ if [ -d "$SIMULATOR_DIR" ]; then echo "Please rebuild and run your app in the simulator." else echo "❌ Simulator directory not found at: $SIMULATOR_DIR" -fi \ No newline at end of file +fi diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/dash_spv_ffi.pc b/swift-dash-core-sdk/Examples/DashHDWalletExample/dash_spv_ffi.pc index a8b3a6cc7..9e2e7ab3e 100644 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/dash_spv_ffi.pc +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/dash_spv_ffi.pc @@ -6,4 +6,4 @@ Name: dash_spv_ffi Description: Dash SPV FFI library Version: 0.1.0 Libs: -L${libdir} -ldash_spv_ffi -Cflags: -I${includedir} \ No newline at end of file +Cflags: -I${includedir} diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh index 18c699444..e4108d636 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-linking.sh @@ -47,4 +47,4 @@ echo "2. In Xcode: Product → Build (⌘B)" echo "" echo "If you still have issues, try:" echo "- File → Packages → Reset Package Caches" -echo "- Delete DerivedData: rm -rf ~/Library/Developer/Xcode/DerivedData" \ No newline at end of file +echo "- Delete DerivedData: rm -rf ~/Library/Developer/Xcode/DerivedData" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-spm-linking.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-spm-linking.sh index c721cf001..64796c893 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-spm-linking.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/fix-spm-linking.sh @@ -49,4 +49,4 @@ rm -rf ~/Library/Developer/Xcode/DerivedData/DashHDWalletExample* rm -rf ~/Library/Caches/com.apple.dt.Xcode* rm -rf ~/Library/Caches/org.swift.swiftpm -echo "Done! Now clean and rebuild in Xcode." \ No newline at end of file +echo "Done! Now clean and rebuild in Xcode." diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/run-spm.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/run-spm.sh index 13ef64ded..c3c5785ec 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/run-spm.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/run-spm.sh @@ -12,4 +12,4 @@ echo "Library path: ${SCRIPT_DIR}" swift run \ -Xlinker -L${SCRIPT_DIR} \ -Xlinker -ldash_spv_ffi \ - "$@" \ No newline at end of file + "$@" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/select-library.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/select-library.sh index eb457b490..ff4c7cb80 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/select-library.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/select-library.sh @@ -32,4 +32,4 @@ if [ -f "libdash_spv_ffi.a" ]; then else echo "ERROR: Failed to create libdash_spv_ffi.a" exit 1 -fi \ No newline at end of file +fi diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-env.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-env.sh index 0839ae6a9..8979078e8 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-env.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-env.sh @@ -22,4 +22,4 @@ echo "" echo "To build with Swift PM, use:" echo " swift build \$SWIFT_BUILD_FLAGS" echo "Or in Xcode, add to 'Other Linker Flags':" -echo " -L${SCRIPT_DIR}" \ No newline at end of file +echo " -L${SCRIPT_DIR}" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-spm.sh b/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-spm.sh index 5431b8eac..2119c96b8 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-spm.sh +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/setup-spm.sh @@ -10,4 +10,4 @@ sudo mkdir -p /usr/local/lib sudo ln -sf "${SCRIPT_DIR}/libdash_spv_ffi.a" /usr/local/lib/libdash_spv_ffi.a echo "Library symlink created at /usr/local/lib/libdash_spv_ffi.a" -echo "You may need to run 'swift package clean' and rebuild" \ No newline at end of file +echo "You may need to run 'swift package clean' and rebuild" diff --git a/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift b/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift index f647e9fd1..fa44c20b3 100755 --- a/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift +++ b/swift-dash-core-sdk/Examples/DashHDWalletExample/test-link.swift @@ -39,4 +39,4 @@ if fileManager.fileExists(atPath: libraryPath) { } } else { print("❌ Library file not found at: \(libraryPath)") -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift b/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift index b9c45582c..0a1300e55 100644 --- a/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift +++ b/swift-dash-core-sdk/Examples/DashWalletExample/ContentView.swift @@ -474,4 +474,4 @@ struct SendTransactionView: View { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashWalletExample/DashWalletApp.swift b/swift-dash-core-sdk/Examples/DashWalletExample/DashWalletApp.swift index d8f83e3d5..e3187ad05 100644 --- a/swift-dash-core-sdk/Examples/DashWalletExample/DashWalletApp.swift +++ b/swift-dash-core-sdk/Examples/DashWalletExample/DashWalletApp.swift @@ -7,4 +7,4 @@ struct DashWalletApp: App { ContentView() } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift b/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift index ed3dae406..2a0b59085 100644 --- a/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift +++ b/swift-dash-core-sdk/Examples/DashWalletExample/WalletViewModel.swift @@ -256,4 +256,4 @@ class WalletViewModel: ObservableObject { } showError = true } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/IMPLEMENTATION_PLAN.md b/swift-dash-core-sdk/IMPLEMENTATION_PLAN.md index 667ec11c9..f9edf1fd1 100644 --- a/swift-dash-core-sdk/IMPLEMENTATION_PLAN.md +++ b/swift-dash-core-sdk/IMPLEMENTATION_PLAN.md @@ -384,4 +384,4 @@ struct ContentView: View { 2. **DashPay**: Support for blockchain user identities 3. **Platform Integration**: Dash Platform SDK integration 4. **Advanced Features**: CoinJoin, governance participation -5. **Cross-Platform**: Kotlin Multiplatform Mobile support \ No newline at end of file +5. **Cross-Platform**: Kotlin Multiplatform Mobile support diff --git a/swift-dash-core-sdk/INTEGRATION_NOTES.md b/swift-dash-core-sdk/INTEGRATION_NOTES.md index a1e393808..2d39f31ef 100644 --- a/swift-dash-core-sdk/INTEGRATION_NOTES.md +++ b/swift-dash-core-sdk/INTEGRATION_NOTES.md @@ -230,4 +230,4 @@ cp ../dash-spv-ffi/target/release/libdash_spv_ffi.a swift-dash-core-sdk/Librarie 1. **Hardware Wallet Support**: Add interface for external signers 2. **Multi-Sig**: Support for multi-signature accounts 3. **Custom Derivation**: Support for non-BIP44 paths -4. **Key Rotation**: Support for key rotation and migration \ No newline at end of file +4. **Key Rotation**: Support for key rotation and migration diff --git a/swift-dash-core-sdk/Package.swift b/swift-dash-core-sdk/Package.swift index 0896b3973..214bb02cc 100644 --- a/swift-dash-core-sdk/Package.swift +++ b/swift-dash-core-sdk/Package.swift @@ -70,4 +70,4 @@ let package = Package( path: "Tests/SwiftDashCoreSDKTests" ), ] -) \ No newline at end of file +) diff --git a/swift-dash-core-sdk/README.md b/swift-dash-core-sdk/README.md index c66f9c3f3..ebba52755 100644 --- a/swift-dash-core-sdk/README.md +++ b/swift-dash-core-sdk/README.md @@ -260,4 +260,4 @@ This project is licensed under the MIT License - see the LICENSE file for detail - Built on top of [rust-dashcore](https://github.com/dashpay/rust-dashcore) - Uses dash-spv-ffi for Rust-Swift interoperability -- SwiftData for persistence \ No newline at end of file +- SwiftData for persistence diff --git a/swift-dash-core-sdk/Sources/DashSPVFFI/DashSPVFFI.swift b/swift-dash-core-sdk/Sources/DashSPVFFI/DashSPVFFI.swift index a56c0b189..a65eef997 100644 --- a/swift-dash-core-sdk/Sources/DashSPVFFI/DashSPVFFI.swift +++ b/swift-dash-core-sdk/Sources/DashSPVFFI/DashSPVFFI.swift @@ -1,4 +1,4 @@ // This file exists to satisfy Swift Package Manager's requirement for at least one Swift source file. // The actual FFI implementation is provided by the linked Rust library (libdash_spv_ffi.a). -import Foundation \ No newline at end of file +import Foundation diff --git a/swift-dash-core-sdk/Sources/DashSPVFFI/dummy.c b/swift-dash-core-sdk/Sources/DashSPVFFI/dummy.c index 884319dbd..be353e12c 100644 --- a/swift-dash-core-sdk/Sources/DashSPVFFI/dummy.c +++ b/swift-dash-core-sdk/Sources/DashSPVFFI/dummy.c @@ -1 +1 @@ -// Empty file - actual implementations come from libdash_spv_ffi.a \ No newline at end of file +// Empty file - actual implementations come from libdash_spv_ffi.a diff --git a/swift-dash-core-sdk/Sources/DashSPVFFI/include/DashSPVFFIC.modulemap b/swift-dash-core-sdk/Sources/DashSPVFFI/include/DashSPVFFIC.modulemap index 361937d1e..e987554e1 100644 --- a/swift-dash-core-sdk/Sources/DashSPVFFI/include/DashSPVFFIC.modulemap +++ b/swift-dash-core-sdk/Sources/DashSPVFFI/include/DashSPVFFIC.modulemap @@ -2,4 +2,4 @@ module DashSPVFFIC { header "dash_spv_ffi.h" export * } -EOF < /dev/null \ No newline at end of file +EOF < /dev/null diff --git a/swift-dash-core-sdk/Sources/DashSPVFFI/include/module.modulemap b/swift-dash-core-sdk/Sources/DashSPVFFI/include/module.modulemap index 036fdaac9..e4fb9d2e5 100644 --- a/swift-dash-core-sdk/Sources/DashSPVFFI/include/module.modulemap +++ b/swift-dash-core-sdk/Sources/DashSPVFFI/include/module.modulemap @@ -1,4 +1,4 @@ module DashSPVFFI { header "dash_spv_ffi.h" export * -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift index f387fa86e..e5c8e7ad5 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/AsyncBridge.swift @@ -152,4 +152,4 @@ actor AsyncBridge { } dataContinuations.removeAll() } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift index 3b32da055..81b5327ca 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/DashSDKError.swift @@ -98,4 +98,4 @@ public enum DashSDKError: LocalizedError { return "This feature is temporarily unavailable" } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift index f985c42c4..9739d861d 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/FFITypes.swift @@ -46,4 +46,4 @@ enum FFIError: Error { self = .unknown } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift index d03acd122..a7e03d1c7 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Core/SPVClient+Verification.swift @@ -64,4 +64,4 @@ extension SPVClient { defer { Self.watchedAddressesLock.unlock() } Self.watchedAddresses.removeAll() } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift index 4050a772d..3a9ac3834 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/DashSDK.swift @@ -284,4 +284,4 @@ extension DashSDK { public static func devnet() throws -> DashSDK { return try DashSDK(configuration: .devnet()) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift index 81ea5e41c..201ffe035 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Errors/WatchAddressError.swift @@ -33,4 +33,4 @@ public enum WatchAddressError: Error, LocalizedError { return false } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift index 94b608bc7..bd9e3bc22 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Balance.swift @@ -92,4 +92,4 @@ extension Balance { let dash = Double(satoshis) / 100_000_000.0 return String(format: "%.8f DASH", dash) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift index a973f8e90..73710f3b6 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/SPVStats.swift @@ -96,4 +96,4 @@ public struct SPVStats: Sendable { return "\(connectedPeers) peers connected" } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift index 91c8d008d..9c707f4ee 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/Transaction.swift @@ -109,4 +109,4 @@ public enum TransactionStatus: Equatable { return false } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift index dbb83f4ca..38e93e0cd 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/UTXO.swift @@ -83,4 +83,4 @@ extension UTXO { } return (String(components[0]), vout) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift index 08577f5f3..a0d22402f 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/ValidationMode.swift @@ -42,4 +42,4 @@ public enum ValidationMode: String, Codable, CaseIterable, Sendable { return nil } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift index cfea95852..30e473fcf 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Models/WatchedAddress.swift @@ -86,4 +86,4 @@ extension WatchedAddress { } } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift index 9e94359fd..4cf18fb3d 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/PersistentWalletManager.swift @@ -445,4 +445,4 @@ public enum WalletManagerError: LocalizedError { return "Failed to watch \(failures) out of \(addresses) addresses" } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift index 972b41f6c..f40b58b8c 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Storage/StorageManager.swift @@ -251,4 +251,4 @@ public struct StorageStatistics { - Unspent UTXOs: \(unspentUTXOCount) """ } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift index 7ab000751..e28752ca2 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/MempoolTypes.swift @@ -164,4 +164,4 @@ public enum MempoolEvent { public protocol MempoolObserver: AnyObject { /// Called when a mempool event occurs func mempoolEvent(_ event: MempoolEvent) -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift index 35593d87e..77031f396 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Types/WatchResult.swift @@ -14,4 +14,4 @@ public struct WatchAddressResult { self.timestamp = timestamp self.retryCount = retryCount } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift index 87079b8af..d0f459eee 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/Extensions.swift @@ -116,4 +116,4 @@ extension Task where Success == Never, Failure == Never { let nanoseconds = UInt64(seconds * 1_000_000_000) try await Task.sleep(nanoseconds: nanoseconds) } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift index eec7ee3fa..b72875e54 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Utils/WatchAddressRetryManager.swift @@ -102,4 +102,4 @@ public class WatchAddressRetryManager { retryTimer = nil } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift index 7a9fdeb1e..45cc9b9b8 100644 --- a/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift +++ b/swift-dash-core-sdk/Sources/SwiftDashCoreSDK/Wallet/WalletManager.swift @@ -446,4 +446,4 @@ public struct TransactionBuilder { let estimatedSize = UInt64(inputs * 148 + outputs * 34 + 10) return estimatedSize * feeRate / 1000 } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift index a92c822f5..72f917999 100644 --- a/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift +++ b/swift-dash-core-sdk/Tests/SwiftDashCoreSDKTests/MempoolTests.swift @@ -152,4 +152,4 @@ final class MempoolTests: XCTestCase { XCTFail("Expected transactionRemoved event") } } -} \ No newline at end of file +} diff --git a/swift-dash-core-sdk/sync-headers.sh b/swift-dash-core-sdk/sync-headers.sh index 7abdfbdd3..001b29200 100755 --- a/swift-dash-core-sdk/sync-headers.sh +++ b/swift-dash-core-sdk/sync-headers.sh @@ -44,4 +44,4 @@ else echo -e "${YELLOW}⚠ key_wallet_ffi.h not found, skipping${NC}" fi -echo -e "${GREEN}Header sync complete!${NC}" \ No newline at end of file +echo -e "${GREEN}Header sync complete!${NC}" diff --git a/test-utils/Cargo.toml b/test-utils/Cargo.toml index d93abf5c9..fb31a604e 100644 --- a/test-utils/Cargo.toml +++ b/test-utils/Cargo.toml @@ -22,4 +22,4 @@ async = ["tokio"] [dev-dependencies] serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" \ No newline at end of file +serde_json = "1.0" diff --git a/test_checksum.rs b/test_checksum.rs index 3f43edbc4..b6c2f3c19 100644 --- a/test_checksum.rs +++ b/test_checksum.rs @@ -10,4 +10,4 @@ fn main() { let checksum = sha2_checksum(empty_data); println\!("SHA256D checksum for empty data: {:02x?}", checksum); } -EOF < /dev/null \ No newline at end of file +EOF < /dev/null diff --git a/test_smart_algo.sh b/test_smart_algo.sh index 7f5f8d27d..e58cb4f92 100644 --- a/test_smart_algo.sh +++ b/test_smart_algo.sh @@ -11,4 +11,4 @@ export RUST_LOG=dash_spv::sync::masternodes=debug,dash_spv::sync::sequential=deb --start-height 1100000 \ 2>&1 | tee smart_algo_debug.log -echo "Debug log saved to smart_algo_debug.log" \ No newline at end of file +echo "Debug log saved to smart_algo_debug.log" From a10b97d32c5d2d3d04f8e668778143d19b5a88e2 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 01:03:12 +1000 Subject: [PATCH 05/14] Fix `check-executables-have-shebangs` checks --- contrib/test-rpc.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/contrib/test-rpc.sh b/contrib/test-rpc.sh index 58e93d540..e67f65d66 100755 --- a/contrib/test-rpc.sh +++ b/contrib/test-rpc.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash set -xe set -o pipefail From 1a53568e7d9749cf74b8f55a988fdea57d21c4e0 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 01:02:45 +1000 Subject: [PATCH 06/14] Fix `check-shebang-scripts-are-executable` checks --- dash-spv-ffi/scripts/generate_ffi_docs.py | 0 test_smart_algo.sh | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 dash-spv-ffi/scripts/generate_ffi_docs.py mode change 100644 => 100755 test_smart_algo.sh diff --git a/dash-spv-ffi/scripts/generate_ffi_docs.py b/dash-spv-ffi/scripts/generate_ffi_docs.py old mode 100644 new mode 100755 diff --git a/test_smart_algo.sh b/test_smart_algo.sh old mode 100644 new mode 100755 From 296e313730109b7b61d649239c0893c8d39cfb6e Mon Sep 17 00:00:00 2001 From: xdustinface Date: Fri, 21 Nov 2025 00:15:00 +1000 Subject: [PATCH 07/14] Fix actionlint --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 9b9892c2c..25474d909 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -73,7 +73,7 @@ jobs: # Run test in isolation, single-threaded to improve determinism cargo test -p dash-spv --all-features -- --nocapture --test-threads=1 "$t" 2>&1 | tee -a segv_scan.log rc=${PIPESTATUS[0]} - if [ $rc -ne 0 ]; then + if [ "$rc" -ne 0 ]; then if tail -n 200 segv_scan.log | grep -qE 'SIGSEGV|signal: 11'; then FOUND_THIS_ATTEMPT+=("$t") SEGV_TESTS+=("$t") From 1c8d9d19a4be5229419b75b6f66bb7e933e26d29 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 01:06:55 +1000 Subject: [PATCH 08/14] Fix `typos` checks --- dash-spv-ffi/FFI_API.md | 2 +- dash/examples/ecdsa-psbt.rs | 4 ++-- dash/examples/handshake.rs | 2 +- dash/src/address.rs | 6 +++--- dash/src/amount.rs | 2 +- dash/src/bip152.rs | 4 ++-- dash/src/blockdata/block.rs | 2 +- dash/src/blockdata/opcodes.rs | 2 +- dash/src/blockdata/script/borrowed.rs | 2 +- dash/src/blockdata/script/owned.rs | 4 ++-- dash/src/blockdata/script/push_bytes.rs | 2 +- dash/src/blockdata/script/tests.rs | 16 ++++++++-------- dash/src/blockdata/transaction/mod.rs | 4 ++-- dash/src/blockdata/weight.rs | 2 +- dash/src/consensus/encode.rs | 2 +- dash/src/crypto/sighash.rs | 2 +- dash/src/ephemerealdata/mod.rs | 2 +- dash/src/network/message.rs | 2 +- dash/src/parse.rs | 2 +- docs/implementation-notes/BLOOM_FILTER_SPEC.md | 2 +- fuzz/cycle.sh | 2 +- hashes/README.md | 4 ++-- hashes/src/hmac.rs | 2 +- hashes/src/lib.rs | 2 +- hashes/src/sha256.rs | 6 +++--- hashes/src/sha256t.rs | 2 +- hashes/src/sha512_256.rs | 6 +++--- internals/src/hex/display.rs | 2 +- key-wallet-ffi/FFI_API.md | 14 +++++++------- key-wallet/src/bip32.rs | 2 +- key-wallet/tests/psbt.rs | 8 ++++---- rpc-json/src/lib.rs | 4 ++-- 32 files changed, 60 insertions(+), 60 deletions(-) diff --git a/dash-spv-ffi/FFI_API.md b/dash-spv-ffi/FFI_API.md index fc76d4e22..254559dfb 100644 --- a/dash-spv-ffi/FFI_API.md +++ b/dash-spv-ffi/FFI_API.md @@ -39,7 +39,7 @@ Functions: 27 | Function | Description | Module | |----------|-------------|--------| | `dash_spv_ffi_client_update_config` | Update the running client's configuration | client | -| `dash_spv_ffi_config_add_peer` | Adds a peer address to the configuration Accepts either a full socket addres... | config | +| `dash_spv_ffi_config_add_peer` | Adds a peer address to the configuration Accepts either a full socket address... | config | | `dash_spv_ffi_config_destroy` | Destroys an FFIClientConfig and frees its memory # Safety - `config` must be... | config | | `dash_spv_ffi_config_get_data_dir` | Gets the data directory path from the configuration # Safety - `config` must... | config | | `dash_spv_ffi_config_get_mempool_strategy` | Gets the mempool synchronization strategy # Safety - `config` must be a vali... | config | diff --git a/dash/examples/ecdsa-psbt.rs b/dash/examples/ecdsa-psbt.rs index abeef5e44..64ae668b6 100644 --- a/dash/examples/ecdsa-psbt.rs +++ b/dash/examples/ecdsa-psbt.rs @@ -53,7 +53,7 @@ const INPUT_UTXO_TXID: &str = "295f06639cde6039bf0c3dbf4827f0e3f2b2c2b476408e2f9 const INPUT_UTXO_VOUT: u32 = 0; const INPUT_UTXO_SCRIPT_PUBKEY: &str = "00149891eeb8891b3e80a2a1ade180f143add23bf5de"; const INPUT_UTXO_VALUE: &str = "50 BTC"; -// Get this from the desciptor, +// Get this from the descriptor, // "wpkh([97f17dca/0'/0'/0']02749483607dafb30c66bd93ece4474be65745ce538c2d70e8e246f17e7a4e0c0c)#m9n56cx0". const INPUT_UTXO_DERIVATION_PATH: &str = "m/0h/0h/0h"; @@ -181,7 +181,7 @@ impl WatchOnly { } } - /// Creates the PSBT, in BIP174 parlance this is the 'Creater'. + /// Creates the PSBT, in BIP174 parlance this is the 'Creator'. fn create_psbt(&self, secp: &Secp256k1) -> Result { let to_address = Address::from_str(RECEIVE_ADDRESS)?.require_network(Network::Regtest)?; let to_amount = Amount::from_str(OUTPUT_AMOUNT_BTC)?; diff --git a/dash/examples/handshake.rs b/dash/examples/handshake.rs index baf53d3ff..b6cec7ccc 100644 --- a/dash/examples/handshake.rs +++ b/dash/examples/handshake.rs @@ -12,7 +12,7 @@ use dashcore::{Network, secp256k1}; use secp256k1::rand; fn main() { - // This example establishes a connection to a Bitcoin node, sends the intial + // This example establishes a connection to a Bitcoin node, sends the initial // "version" message, waits for the reply, and finally closes the connection. let args: Vec = env::args().collect(); if args.len() < 2 { diff --git a/dash/src/address.rs b/dash/src/address.rs index eed967313..02e4ba5f0 100644 --- a/dash/src/address.rs +++ b/dash/src/address.rs @@ -628,7 +628,7 @@ pub struct AddressEncoding<'a> { pub p2pkh_prefix: u8, /// base58 version byte for p2sh payloads (e.g. 0x05 for "3..." addresses). pub p2sh_prefix: u8, - /// hrp used in bech32 addresss (e.g. "bc" for "bc1..." addresses). + /// hrp used in bech32 address (e.g. "bc" for "bc1..." addresses). pub bech32_hrp: &'a str, } @@ -1019,7 +1019,7 @@ impl Address { encoding.fmt(fmt) } - /// Create new address from given components, infering the network validation + /// Create new address from given components, inferring the network validation /// marker type of the address. #[inline] pub fn new(network: Network, payload: Payload) -> Self { @@ -1222,7 +1222,7 @@ impl Address { unsafe { &*(self as *const Address as *const Address) } } /// Parsed addresses do not always have *one* network. The problem is that legacy testnet, - /// regtest and devnet addresse use the same prefix instead of multiple different ones. When + /// regtest and devnet addresses use the same prefix instead of multiple different ones. When /// parsing, such addresses are always assumed to be testnet addresses (the same is true for /// bech32 devnet addresses). So if one wants to check if an address belongs to a certain /// network a simple comparison is not enough anymore. Instead this function can be used. diff --git a/dash/src/amount.rs b/dash/src/amount.rs index 9d41c79de..e5f6df2d6 100644 --- a/dash/src/amount.rs +++ b/dash/src/amount.rs @@ -103,7 +103,7 @@ impl Denomination { } } -/// These form are ambigous and could have many meanings. For example, M could denote Mega or Milli. +/// These form are ambiguous and could have many meanings. For example, M could denote Mega or Milli. /// If any of these forms are used, an error type PossiblyConfusingDenomination is returned. const CONFUSING_FORMS: [&str; 9] = ["Msat", "Msats", "MSAT", "MSATS", "MSat", "MSats", "MBTC", "Mbtc", "PBTC"]; diff --git a/dash/src/bip152.rs b/dash/src/bip152.rs index ffb4949fe..a88343c63 100644 --- a/dash/src/bip152.rs +++ b/dash/src/bip152.rs @@ -353,7 +353,7 @@ impl_consensus_encoding!(BlockTransactions, block_hash, transactions); impl BlockTransactions { /// Construct a [BlockTransactions] from a [BlockTransactionsRequest] and - /// the corresponsing full [Block] by providing all requested transactions. + /// the corresponding full [Block] by providing all requested transactions. pub fn from_request( request: &BlockTransactionsRequest, block: &Block, @@ -459,7 +459,7 @@ mod test { #[test] fn test_getblocktx_differential_encoding_de_and_serialization() { let testcases = vec![ - // differentially encoded VarInts, indicies + // differentially encoded VarInts, indices (vec![4, 0, 5, 1, 10], vec![0, 6, 8, 19]), (vec![1, 0], vec![0]), (vec![5, 0, 0, 0, 0, 0], vec![0, 1, 2, 3, 4]), diff --git a/dash/src/blockdata/block.rs b/dash/src/blockdata/block.rs index f4a38cdae..2036ada35 100644 --- a/dash/src/blockdata/block.rs +++ b/dash/src/blockdata/block.rs @@ -28,7 +28,7 @@ use crate::{VarInt, io, merkle_tree}; /// Bitcoin block header. /// /// Contains all the block's information except the actual transactions, but -/// including a root of a [merkle tree] commiting to all transactions in the block. +/// including a root of a [merkle tree] committing to all transactions in the block. /// /// [merkle tree]: https://en.wikipedia.org/wiki/Merkle_tree /// diff --git a/dash/src/blockdata/opcodes.rs b/dash/src/blockdata/opcodes.rs index ccc40df71..ae5f191c2 100644 --- a/dash/src/blockdata/opcodes.rs +++ b/dash/src/blockdata/opcodes.rs @@ -336,7 +336,7 @@ all_opcodes! { /// Classification context for the opcode. /// /// Some opcodes like [`OP_RESERVED`] abort the script in `ClassifyContext::Legacy` context, -/// but will act as `OP_SUCCESSx` in `ClassifyContext::TapScript` (see BIP342 for full list). +/// but will act as `OP_SUCCESSSx` in `ClassifyContext::TapScript` (see BIP342 for full list). #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ClassifyContext { /// Opcode used in tapscript context. diff --git a/dash/src/blockdata/script/borrowed.rs b/dash/src/blockdata/script/borrowed.rs index d1b0a50b0..190ada4fe 100644 --- a/dash/src/blockdata/script/borrowed.rs +++ b/dash/src/blockdata/script/borrowed.rs @@ -59,7 +59,7 @@ use crate::taproot::{LeafVersion, TapLeafHash, TapNodeHash}; /// ## Memory safety /// /// The type is `#[repr(transparent)]` for internal purposes only! -/// No consumer crate may rely on the represenation of the struct! +/// No consumer crate may rely on the representation of the struct! /// /// ## References /// diff --git a/dash/src/blockdata/script/owned.rs b/dash/src/blockdata/script/owned.rs index f7276ccf4..da9230b34 100644 --- a/dash/src/blockdata/script/owned.rs +++ b/dash/src/blockdata/script/owned.rs @@ -257,7 +257,7 @@ impl ScriptBuf { self.0.extend_from_slice(data.as_bytes()); } - /// Computes the sum of `len` and the lenght of an appropriate push opcode. + /// Computes the sum of `len` and the length of an appropriate push opcode. pub fn reserved_len_for_slice(len: usize) -> usize { len + match len { 0..=0x4b => 1, @@ -323,7 +323,7 @@ impl ScriptBuf { /// Converts this `ScriptBuf` into a [boxed](Box) [`Script`]. /// - /// This method reallocates if the capacity is greater than lenght of the script but should not + /// This method reallocates if the capacity is greater than length of the script but should not /// when they are equal. If you know beforehand that you need to create a script of exact size /// use [`reserve_exact`](Self::reserve_exact) before adding data to the script so that the /// reallocation can be avoided. diff --git a/dash/src/blockdata/script/push_bytes.rs b/dash/src/blockdata/script/push_bytes.rs index adb2fbe00..d3fa7cf62 100644 --- a/dash/src/blockdata/script/push_bytes.rs +++ b/dash/src/blockdata/script/push_bytes.rs @@ -75,7 +75,7 @@ mod primitive { &self.0 } - /// Returns the underlying mutbale bytes. + /// Returns the underlying mutable bytes. pub fn as_mut_bytes(&mut self) -> &mut [u8] { &mut self.0 } diff --git a/dash/src/blockdata/script/tests.rs b/dash/src/blockdata/script/tests.rs index f6b9e3c54..69d0cd92e 100644 --- a/dash/src/blockdata/script/tests.rs +++ b/dash/src/blockdata/script/tests.rs @@ -64,7 +64,7 @@ fn p2pk_pubkey_bytes_no_checksig_returns_none() { } #[test] -fn p2pk_pubkey_bytes_emptry_script_returns_none() { +fn p2pk_pubkey_bytes_empty_script_returns_none() { let empty_script = Script::builder().into_script(); assert!(empty_script.p2pk_pubkey_bytes().is_none()); } @@ -174,7 +174,7 @@ fn p2pk_public_key_compressed_key_returns_some() { #[test] fn script_x_only_key() { - // Notice the "20" which prepends the keystr. That 20 is hexidecimal for "32". The Builder automatically adds the 32 opcode + // Notice the "20" which prepends the keystr. That 20 is hexadecimal for "32". The Builder automatically adds the 32 opcode // to our script in order to give a heads up to the script compiler that it should add the next 32 bytes to the stack. // From: https://github.com/bitcoin-core/btcdeb/blob/e8c2750c4a4702768c52d15640ed03bf744d2601/doc/tapscript-example.md?plain=1#L43 const KEYSTR: &str = "209997a497d964fc1a62885b05a51166a65a90df00492c8d7cf61d6accf54803be"; @@ -410,7 +410,7 @@ fn script_asm() { assert_eq!(ScriptBuf::from_hex("0047304402202457e78cc1b7f50d0543863c27de75d07982bde8359b9e3316adec0aec165f2f02200203fd331c4e4a4a02f48cf1c291e2c0d6b2f7078a784b5b3649fca41f8794d401004cf1552103244e602b46755f24327142a0517288cebd159eccb6ccf41ea6edf1f601e9af952103bbbacc302d19d29dbfa62d23f37944ae19853cf260c745c2bea739c95328fcb721039227e83246bd51140fe93538b2301c9048be82ef2fb3c7fc5d78426ed6f609ad210229bf310c379b90033e2ecb07f77ecf9b8d59acb623ab7be25a0caed539e2e6472103703e2ed676936f10b3ce9149fa2d4a32060fb86fa9a70a4efe3f21d7ab90611921031e9b7c6022400a6bb0424bbcde14cff6c016b91ee3803926f3440abf5c146d05210334667f975f55a8455d515a2ef1c94fdfa3315f12319a14515d2a13d82831f62f57ae").unwrap().to_asm_string(), "OP_0 OP_PUSHBYTES_71 304402202457e78cc1b7f50d0543863c27de75d07982bde8359b9e3316adec0aec165f2f02200203fd331c4e4a4a02f48cf1c291e2c0d6b2f7078a784b5b3649fca41f8794d401 OP_0 OP_PUSHDATA1 552103244e602b46755f24327142a0517288cebd159eccb6ccf41ea6edf1f601e9af952103bbbacc302d19d29dbfa62d23f37944ae19853cf260c745c2bea739c95328fcb721039227e83246bd51140fe93538b2301c9048be82ef2fb3c7fc5d78426ed6f609ad210229bf310c379b90033e2ecb07f77ecf9b8d59acb623ab7be25a0caed539e2e6472103703e2ed676936f10b3ce9149fa2d4a32060fb86fa9a70a4efe3f21d7ab90611921031e9b7c6022400a6bb0424bbcde14cff6c016b91ee3803926f3440abf5c146d05210334667f975f55a8455d515a2ef1c94fdfa3315f12319a14515d2a13d82831f62f57ae"); // Various weird scripts found in transaction 6d7ed9914625c73c0288694a6819196a27ef6c08f98e1270d975a8e65a3dc09a - // which triggerred overflow bugs on 32-bit machines in script formatting in the past. + // which triggered overflow bugs on 32-bit machines in script formatting in the past. assert_eq!( ScriptBuf::from_hex("01").unwrap().to_asm_string(), "OP_PUSHBYTES_1 " @@ -489,11 +489,11 @@ fn p2sh_p2wsh_conversion() { // Test vectors taken from Core tests/data/script_tests.json // bare p2wsh let redeem_script = ScriptBuf::from_hex("410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ac").unwrap(); - let expected_witout = + let expected_without = ScriptBuf::from_hex("0020b95237b48faaa69eb078e1170be3b5cbb3fddf16d0a991e14ad274f7b33a4f64") .unwrap(); assert!(redeem_script.to_v0_p2wsh().is_v0_p2wsh()); - assert_eq!(redeem_script.to_v0_p2wsh(), expected_witout); + assert_eq!(redeem_script.to_v0_p2wsh(), expected_without); // p2sh let redeem_script = ScriptBuf::from_hex("0479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8").unwrap(); @@ -504,14 +504,14 @@ fn p2sh_p2wsh_conversion() { // p2sh-p2wsh let redeem_script = ScriptBuf::from_hex("410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ac").unwrap(); - let expected_witout = + let expected_without = ScriptBuf::from_hex("0020b95237b48faaa69eb078e1170be3b5cbb3fddf16d0a991e14ad274f7b33a4f64") .unwrap(); let expected_out = ScriptBuf::from_hex("a914f386c2ba255cc56d20cfa6ea8b062f8b5994551887").unwrap(); assert!(redeem_script.to_p2sh().is_p2sh()); assert!(redeem_script.to_p2sh().to_v0_p2wsh().is_v0_p2wsh()); - assert_eq!(redeem_script.to_v0_p2wsh(), expected_witout); + assert_eq!(redeem_script.to_v0_p2wsh(), expected_without); assert_eq!(redeem_script.to_v0_p2wsh().to_p2sh(), expected_out); } @@ -613,7 +613,7 @@ fn test_bitcoinconsensus() { } #[test] -fn defult_dust_value_tests() { +fn default_dust_value_tests() { // Check that our dust_value() calculator correctly calculates the dust limit on common // well-known scriptPubKey types. let script_p2wpkh = Builder::new().push_int(0).push_slice([42; 20]).into_script(); diff --git a/dash/src/blockdata/transaction/mod.rs b/dash/src/blockdata/transaction/mod.rs index 088790f90..a40824bc4 100644 --- a/dash/src/blockdata/transaction/mod.rs +++ b/dash/src/blockdata/transaction/mod.rs @@ -700,7 +700,7 @@ impl InputWeightPrediction { /// /// # Panics /// - /// The funcion panics in const context and debug builds if `bytes_to_grind` is higher than 62. + /// The function panics in const context and debug builds if `bytes_to_grind` is higher than 62. /// /// [signature grinding]: https://bitcoin.stackexchange.com/questions/111660/what-is-signature-grinding pub const fn ground_p2wpkh(bytes_to_grind: usize) -> Self { @@ -787,7 +787,7 @@ impl InputWeightPrediction { /// of the to-be-constructed transaction. /// /// Note that lengths of the scripts and witness elements must be non-serialized, IOW *without* the -/// preceding compact size. The lenght of preceding compact size is computed and added inside the +/// preceding compact size. The length of preceding compact size is computed and added inside the /// function for convenience. /// /// # Usage diff --git a/dash/src/blockdata/weight.rs b/dash/src/blockdata/weight.rs index de319718b..d8c3c2d77 100644 --- a/dash/src/blockdata/weight.rs +++ b/dash/src/blockdata/weight.rs @@ -7,7 +7,7 @@ use crate::prelude::*; /// Represents block weight - the weight of a transaction or block. /// -/// This is an integer newtype representing weigth in `wu`. It provides protection against mixing +/// This is an integer newtype representing weight in `wu`. It provides protection against mixing /// up the types as well as basic formatting features. #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] diff --git a/dash/src/consensus/encode.rs b/dash/src/consensus/encode.rs index 6adfc8f56..7e1b6e1f9 100644 --- a/dash/src/consensus/encode.rs +++ b/dash/src/consensus/encode.rs @@ -712,7 +712,7 @@ macro_rules! impl_vec { r: &mut R, ) -> Result { let len = VarInt::consensus_decode_from_finite_reader(r)?.0; - // Do not allocate upfront more items than if the sequnce of type + // Do not allocate upfront more items than if the sequence of type // occupied roughly quarter a block. This should never be the case // for normal data, but even if that's not true - `push` will just // reallocate. diff --git a/dash/src/crypto/sighash.rs b/dash/src/crypto/sighash.rs index a7a8761b0..86ed9c0d3 100644 --- a/dash/src/crypto/sighash.rs +++ b/dash/src/crypto/sighash.rs @@ -1737,7 +1737,7 @@ mod tests { "SIGHASH_SINGLE| SIGHASH_ANYONECANPAY", "SIGHASH_ALL SIGHASH_ANYONECANPAY", "SIGHASH_NONE |", - "SIGHASH_SIGNLE", + "SIGHASH_SINGLE", "DEFAULT", "ALL", "sighash_none", diff --git a/dash/src/ephemerealdata/mod.rs b/dash/src/ephemerealdata/mod.rs index ee2c6e45f..14692c695 100644 --- a/dash/src/ephemerealdata/mod.rs +++ b/dash/src/ephemerealdata/mod.rs @@ -1,4 +1,4 @@ -//! Ephemereal data that is not stored on the Dash blockchain, but +//! Ephemeral data that is not stored on the Dash blockchain, but //! is important for Dash consensus to function, such as instant send locks, for example pub mod chain_lock; diff --git a/dash/src/network/message.rs b/dash/src/network/message.rs index 99bc5e99a..786b664fa 100644 --- a/dash/src/network/message.rs +++ b/dash/src/network/message.rs @@ -41,7 +41,7 @@ use crate::{ChainLock, InstantLock}; pub const MAX_INV_SIZE: usize = 50_000; /// Maximum size, in bytes, of an encoded message -/// This by neccessity should be larger tham `MAX_VEC_SIZE` +/// This by necessity should be larger tham `MAX_VEC_SIZE` pub const MAX_MSG_SIZE: usize = 5_000_000; /// Serializer for command string diff --git a/dash/src/parse.rs b/dash/src/parse.rs index a675eb3b7..79c747210 100644 --- a/dash/src/parse.rs +++ b/dash/src/parse.rs @@ -63,7 +63,7 @@ impl fmt::Display for ParseIntError { } } -/// Not strictly neccessary but serves as a lint - avoids weird behavior if someone accidentally +/// Not strictly necessary but serves as a lint - avoids weird behavior if someone accidentally /// passes non-integer to the `parse()` function. pub(crate) trait Integer: FromStr + TryFrom + Sized diff --git a/docs/implementation-notes/BLOOM_FILTER_SPEC.md b/docs/implementation-notes/BLOOM_FILTER_SPEC.md index 5eae0fd0e..1021392a7 100644 --- a/docs/implementation-notes/BLOOM_FILTER_SPEC.md +++ b/docs/implementation-notes/BLOOM_FILTER_SPEC.md @@ -700,7 +700,7 @@ peer.send_filter_add(new_address.to_script_pubkey().as_bytes()).await?; - Filter creation: < 1ms for 1000 elements - Insert operation: O(k) where k = number of hash functions - Contains check: O(k) -- Memory usage: ~4.5KB for 0.1% FPR with 1000 elements +- Memory usage: ~4.5KB for 0.1% false positive rate with 1000 elements ### Bandwidth Savings - Full blocks: ~1-2MB per block diff --git a/fuzz/cycle.sh b/fuzz/cycle.sh index 294f32b0d..b91c7f277 100755 --- a/fuzz/cycle.sh +++ b/fuzz/cycle.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -# Continuosly cycle over fuzz targets running each for 1 hour. +# Continuously cycle over fuzz targets running each for 1 hour. # It uses chrt SCHED_IDLE so that other process takes priority. # # For hfuzz options see https://github.com/google/honggfuzz/blob/master/docs/USAGE.md diff --git a/hashes/README.md b/hashes/README.md index 888a1f735..9e72067c1 100644 --- a/hashes/README.md +++ b/hashes/README.md @@ -4,8 +4,8 @@ This is a simple, no-dependency library which implements the hash functions needed by Bitcoin. These are SHA1, SHA256, SHA256d, SHA512, and RIPEMD160. As an -ancilliary thing, it exposes hexadecimal serialization and deserialization, -since these are needed to display hashes anway. +ancillary thing, it exposes hexadecimal serialization and deserialization, +since these are needed to display hashes anyway. [Documentation](https://docs.rs/dashcore_hashes/) diff --git a/hashes/src/hmac.rs b/hashes/src/hmac.rs index 20262ee40..46b374de9 100644 --- a/hashes/src/hmac.rs +++ b/hashes/src/hmac.rs @@ -49,7 +49,7 @@ pub struct HmacMidState { pub outer: ::MidState, } -/// Pair of underyling hash engines, used for the inner and outer hash of HMAC. +/// Pair of underlying hash engines, used for the inner and outer hash of HMAC. #[derive(Clone)] pub struct HmacEngine { iengine: T::Engine, diff --git a/hashes/src/lib.rs b/hashes/src/lib.rs index 0631e765b..233175eb2 100644 --- a/hashes/src/lib.rs +++ b/hashes/src/lib.rs @@ -17,7 +17,7 @@ //! This is a simple, no-dependency library which implements the hash functions //! needed by Bitcoin. These are SHA256, SHA256d, and RIPEMD160. As an ancillary //! thing, it exposes hexadecimal serialization and deserialization, since these -//! are needed to display hashes anway. +//! are needed to display hashes anyway. //! //! ## Commonly used operations //! diff --git a/hashes/src/sha256.rs b/hashes/src/sha256.rs index 40680022c..7e9e83704 100644 --- a/hashes/src/sha256.rs +++ b/hashes/src/sha256.rs @@ -230,7 +230,7 @@ macro_rules! round( ); impl Midstate { - #[allow(clippy::identity_op)] // more readble + #[allow(clippy::identity_op)] // more readable const fn read_u32(bytes: &[u8], index: usize) -> u32 { ((bytes[index + 0] as u32) << 24) | ((bytes[index + 1] as u32) << 16) @@ -280,7 +280,7 @@ impl Midstate { if (bytes.len() % 64 <= 64 - 9) || (chunk + 2 == num_chunks) { buf[i] = 0x80; } - #[allow(clippy::identity_op)] // more readble + #[allow(clippy::identity_op)] // more readable #[allow(clippy::erasing_op)] if chunk + 1 == num_chunks { let bit_len = bytes.len() as u64 * 8; @@ -385,7 +385,7 @@ impl Midstate { } let mut output = [0u8; 32]; let mut i = 0; - #[allow(clippy::identity_op)] // more readble + #[allow(clippy::identity_op)] // more readable while i < 8 { output[i * 4 + 0] = (state[i + 0] >> 24) as u8; output[i * 4 + 1] = (state[i + 0] >> 16) as u8; diff --git a/hashes/src/sha256t.rs b/hashes/src/sha256t.rs index 0ac29e6b6..e5a0dba69 100644 --- a/hashes/src/sha256t.rs +++ b/hashes/src/sha256t.rs @@ -120,7 +120,7 @@ fn from_engine(e: sha256::HashEngine) -> Hash { /// says the midstate should be generated by hashing the supplied string in a way described in /// BIP-341. Alternatively, you can supply `hash_bytes` to hash raw bytes. If you have the midstate /// already pre-computed and prefer **compiler** performance to readability you may use -/// `raw(MIDSTATE_BYTES, HASHED_BYTES_LENGHT)` instead. +/// `raw(MIDSTATE_BYTES, HASHED_BYTES_LENGTH)` instead. /// /// Both visibility modifiers and attributes are optional and passed to inner structs (excluding /// `#[hash_newtype(...)]`). The attributes suffer same compiler performance limitations as in diff --git a/hashes/src/sha512_256.rs b/hashes/src/sha512_256.rs index 01774e804..2d24ffbb9 100644 --- a/hashes/src/sha512_256.rs +++ b/hashes/src/sha512_256.rs @@ -19,7 +19,7 @@ //! SHA512_256 implementation. //! -//! SHA512/256 is a hash function that uses the sha512 alogrithm but it truncates +//! SHA512/256 is a hash function that uses the sha512 algorithm but it truncates //! the output to 256 bits. It has different initial constants than sha512 so it //! produces an entirely different hash compared to sha512. More information at //! . @@ -33,7 +33,7 @@ use crate::{sha512, Error}; /// Engine to compute SHA512/256 hash function. /// -/// SHA512/256 is a hash function that uses the sha512 alogrithm but it truncates +/// SHA512/256 is a hash function that uses the sha512 algorithm but it truncates /// the output to 256 bits. It has different initial constants than sha512 so it /// produces an entirely different hash compared to sha512. More information at /// . @@ -75,7 +75,7 @@ impl crate::HashEngine for HashEngine { crate::internal_macros::hash_type! { 256, false, - "Output of the SHA512/256 hash function.\n\nSHA512/256 is a hash function that uses the sha512 alogrithm but it truncates the output to 256 bits. It has different initial constants than sha512 so it produces an entirely different hash compared to sha512. More information at . ", + "Output of the SHA512/256 hash function.\n\nSHA512/256 is a hash function that uses the sha512 algorithm but it truncates the output to 256 bits. It has different initial constants than sha512 so it produces an entirely different hash compared to sha512. More information at . ", "crate::util::json_hex_string::len_32" } diff --git a/internals/src/hex/display.rs b/internals/src/hex/display.rs index dc541812f..239debb80 100644 --- a/internals/src/hex/display.rs +++ b/internals/src/hex/display.rs @@ -62,7 +62,7 @@ pub trait DisplayHex: Copy + sealed::IsRef { /// Appends hex-encoded content to an existing `String`. /// /// This may be faster than `write!(string, "{:x}", self.display_hex())` because it uses - /// `reserve_sugggestion`. + /// `reserve_suggestion`. #[cfg(feature = "alloc")] fn append_hex_to_string(self, case: Case, string: &mut String) { use fmt::Write; diff --git a/key-wallet-ffi/FFI_API.md b/key-wallet-ffi/FFI_API.md index c42372067..ebb9d62b8 100644 --- a/key-wallet-ffi/FFI_API.md +++ b/key-wallet-ffi/FFI_API.md @@ -63,7 +63,7 @@ Functions: 19 | `wallet_manager_get_wallet_ids` | Get wallet IDs # Safety - `manager` must be a valid pointer to an FFIWallet... | wallet_manager | | `wallet_manager_import_wallet_from_bytes` | No description | wallet_manager | | `wallet_manager_process_transaction` | Process a transaction through all wallets Checks a transaction against all w... | wallet_manager | -| `wallet_manager_update_height` | Update block height for a network # Safety - `manager` must be a valid poin... | wallet_manager | +| `wallet_manager_update_height` | Update block height for a network # Safety - `manager` must be a valid point... | wallet_manager | | `wallet_manager_wallet_count` | Get wallet count # Safety - `manager` must be a valid pointer to an FFIWall... | wallet_manager | ### Wallet Operations @@ -82,7 +82,7 @@ Functions: 62 | `managed_account_get_parent_wallet_id` | Get the parent wallet ID of a managed account Note: ManagedAccount doesn't s... | managed_account | | `managed_wallet_check_transaction` | Check if a transaction belongs to the wallet This function checks a transact... | transaction_checking | | `managed_wallet_free` | Free managed wallet info # Safety - `managed_wallet` must be a valid pointe... | managed_wallet | -| `managed_wallet_generate_addresses_to_index` | Generate addresses up to a specific index in a pool This ensures that addres... | address_pool | +| `managed_wallet_generate_addresses_to_index` | Generate addresses up to a specific index in a pool This ensures that address... | address_pool | | `managed_wallet_get_account` | Get a managed account from a managed wallet This function gets a ManagedAcco... | managed_account | | `managed_wallet_get_account_collection` | Get managed account collection for a specific network from wallet manager # ... | managed_account_collection | | `managed_wallet_get_account_count` | Get number of accounts in a managed wallet # Safety - `manager` must be a v... | managed_account | @@ -93,11 +93,11 @@ Functions: 62 | `managed_wallet_get_dashpay_external_account` | Get a managed DashPay external account by composite key # Safety - Pointers ... | managed_account | | `managed_wallet_get_dashpay_receiving_account` | Get a managed DashPay receiving funds account by composite key # Safety - `m... | managed_account | | `managed_wallet_get_next_bip44_change_address` | Get the next unused change address Generates the next unused change address ... | managed_wallet | -| `managed_wallet_get_next_bip44_receive_address` | Get the next unused receive address Generates the next unused receive addres... | managed_wallet | +| `managed_wallet_get_next_bip44_receive_address` | Get the next unused receive address Generates the next unused receive address... | managed_wallet | | `managed_wallet_get_top_up_account_with_registration_index` | Get a managed IdentityTopUp account with a specific registration index This ... | managed_account | | `managed_wallet_get_utxos` | Get all UTXOs from managed wallet info # Safety - `managed_info` must be a ... | utxo | | `managed_wallet_info_free` | Free managed wallet info returned by wallet_manager_get_managed_wallet_info ... | managed_wallet | -| `managed_wallet_mark_address_used` | Mark an address as used in the pool This updates the pool's tracking of whic... | address_pool | +| `managed_wallet_mark_address_used` | Mark an address as used in the pool This updates the pool's tracking of which... | address_pool | | `managed_wallet_set_gap_limit` | Set the gap limit for an address pool The gap limit determines how many unus... | address_pool | | `wallet_add_account` | Add an account to the wallet without xpub # Safety This function dereferenc... | wallet | | `wallet_add_account_with_string_xpub` | Add an account to the wallet with xpub as string # Safety This function der... | wallet | @@ -172,13 +172,13 @@ Functions: 94 | `account_derive_extended_private_key_from_mnemonic` | Derive an extended private key from a mnemonic + optional passphrase at the g... | account_derivation | | `account_derive_extended_private_key_from_seed` | Derive an extended private key from a raw seed buffer at the given index | account_derivation | | `account_derive_private_key_as_wif_at` | Derive a private key from an account at a given chain/index and return as WIF... | account_derivation | -| `account_derive_private_key_at` | Derive a private key (secp256k1) from an account at a given chain/index, usin... | account_derivation | +| `account_derive_private_key_at` | Derive a private key (secp256k1) from an account at a given chain/index, using... | account_derivation | | `account_derive_private_key_from_mnemonic` | Derive a private key from a mnemonic + optional passphrase at the given index | account_derivation | | `account_derive_private_key_from_seed` | Derive a private key from a raw seed buffer at the given index | account_derivation | | `account_free` | Free an account handle # Safety - `account` must be a valid pointer to an F... | account | | `account_get_account_type` | Get the account type of an account # Safety - `account` must be a valid poi... | account | | `account_get_extended_public_key_as_string` | Get the extended public key of an account as a string # Safety - `account` ... | account | -| `account_get_is_watch_only` | Check if an account is watch-only # Safety - `account` must be a valid poin... | account | +| `account_get_is_watch_only` | Check if an account is watch-only # Safety - `account` must be a valid point... | account | | `account_get_network` | Get the network of an account # Safety - `account` must be a valid pointer ... | account | | `bls_account_derive_private_key_from_mnemonic` | No description | account_derivation | | `bls_account_derive_private_key_from_seed` | No description | account_derivation | @@ -219,7 +219,7 @@ Functions: 94 | `managed_account_collection_has_provider_owner_keys` | Check if provider owner keys account exists in managed collection # Safety ... | managed_account_collection | | `managed_account_collection_has_provider_platform_keys` | Check if provider platform keys account exists in managed collection # Safet... | managed_account_collection | | `managed_account_collection_has_provider_voting_keys` | Check if provider voting keys account exists in managed collection # Safety ... | managed_account_collection | -| `managed_account_collection_summary` | Get a human-readable summary of all accounts in the managed collection Retur... | managed_account_collection | +| `managed_account_collection_summary` | Get a human-readable summary of all accounts in the managed collection Return... | managed_account_collection | | `managed_account_collection_summary_data` | Get structured account collection summary data for managed collection Return... | managed_account_collection | | `managed_account_collection_summary_free` | Free a managed account collection summary and all its allocated memory # Saf... | managed_account_collection | | `managed_account_free` | Free a managed account handle # Safety - `account` must be a valid pointer ... | managed_account | diff --git a/key-wallet/src/bip32.rs b/key-wallet/src/bip32.rs index ab13df77b..506288d0c 100644 --- a/key-wallet/src/bip32.rs +++ b/key-wallet/src/bip32.rs @@ -937,7 +937,7 @@ impl serde::Serialize for ChildNumber { /// Trait that allows possibly failable conversion from a type into a /// derivation path pub trait IntoDerivationPath { - /// Convers a given type into a [`DerivationPath`] with possible error + /// Converts a given type into a [`DerivationPath`] with possible error fn into_derivation_path(self) -> Result; } diff --git a/key-wallet/tests/psbt.rs b/key-wallet/tests/psbt.rs index 6262e235a..089a196f2 100644 --- a/key-wallet/tests/psbt.rs +++ b/key-wallet/tests/psbt.rs @@ -71,7 +71,7 @@ fn bip174_psbt_workflow() { // Strings from BIP 174 test vector. let test_vector = vec![ ("cQREycwKkUqJrgkYqcSLpv5Ab1ytgVkRk5e7dENJ5pQjUd83qwFd", "m/0h/0h/0h"), // from_priv, into_derivation_path? - ("cUQLHVPngMorTZfKhb74quVxtiUuHtt5CnbEgnZTUFk8Vhid9HCh", "m/0h/0h/2h"), + ("cUQLHVPngMoreTZfKhb74quVxtiUuHtt5CnbEgnZTUFk8Vhid9HCh", "m/0h/0h/2h"), ]; // We pass the keys to the signer after doing verification to make explicit @@ -276,15 +276,15 @@ fn update_psbt(mut psbt: Psbt, fingerprint: Fingerprint) -> Psbt { psbt } -/// `pk_path` holds tuples of `(public_key, derivation_path)`. `indecies` is used to access the +/// `pk_path` holds tuples of `(public_key, derivation_path)`. `indices` is used to access the /// `pk_path` vector. `fingerprint` is from the parent extended public key. fn bip32_derivation( fingerprint: Fingerprint, pk_path: &[(&str, &str)], - indecies: Vec, + indices: Vec, ) -> BTreeMap { let mut tree = BTreeMap::new(); - for i in indecies { + for i in indices { let pk = pk_path[i].0; let path = pk_path[i].1; diff --git a/rpc-json/src/lib.rs b/rpc-json/src/lib.rs index 066348594..454b2f639 100644 --- a/rpc-json/src/lib.rs +++ b/rpc-json/src/lib.rs @@ -1490,7 +1490,7 @@ pub struct GetBlockTemplateResult { pub min_time: u64, /// List of things that may be changed by the client before submitting a /// block - pub mutable: Vec, + pub mutable: Vec, // TODO figure out what is the data is represented to value // pub value: /// A range of valid nonces @@ -1573,7 +1573,7 @@ pub enum GetBlockTemplateResultRules { /// but not implemented in the getblocktemplate implementation of Bitcoin Core. #[derive(Copy, Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] -pub enum GetBlockTemplateResulMutations { +pub enum GetBlockTemplateResultMutations { /// The client is allowed to modify the time in the header of the block Time, /// The client is allowed to add transactions to the block From 27c7a3de3fcb9d3fb848cec3aa6fa3e44955fd97 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 06:38:58 +1000 Subject: [PATCH 09/14] Fix `clippy-workspace` checks --- dash/examples/ecdsa-psbt.rs | 2 +- dash/examples/taproot-psbt.rs | 7 +++--- hashes/benches/hashes.rs | 9 +++---- .../examples/wallet_creation.rs | 1 - rpc-client/examples/connect_to_masternode.rs | 4 +-- rpc-client/src/client.rs | 9 ++++--- rpc-integration-test/src/main.rs | 25 +++++++++++++++++-- rpc-json/src/lib.rs | 8 +++--- 8 files changed, 43 insertions(+), 22 deletions(-) diff --git a/dash/examples/ecdsa-psbt.rs b/dash/examples/ecdsa-psbt.rs index 64ae668b6..313675537 100644 --- a/dash/examples/ecdsa-psbt.rs +++ b/dash/examples/ecdsa-psbt.rs @@ -255,7 +255,7 @@ impl WatchOnly { let sigs: Vec<_> = psbt.inputs[0].partial_sigs.values().collect(); let mut script_witness: Witness = Witness::new(); - script_witness.push(&sigs[0].to_vec()); + script_witness.push(sigs[0].to_vec()); script_witness.push(self.input_xpub.to_pub().to_bytes()); psbt.inputs[0].final_script_witness = Some(script_witness); diff --git a/dash/examples/taproot-psbt.rs b/dash/examples/taproot-psbt.rs index 1611106bf..f94c5ac83 100644 --- a/dash/examples/taproot-psbt.rs +++ b/dash/examples/taproot-psbt.rs @@ -398,10 +398,10 @@ impl BenefactorWallet { if let ChildNumber::Normal { index, } = self.next + && index > 0 + && self.current_spend_info.is_some() { - if index > 0 && self.current_spend_info.is_some() { - return Err("Transaction already exists, use refresh_inheritance_timelock to refresh the timelock".into()); - } + return Err("Transaction already exists, use refresh_inheritance_timelock to refresh the timelock".into()); } // We use some other derivation path in this example for our inheritance protocol. The important thing is to ensure // that we use an unhardened path so we can make use of xpubs. @@ -473,7 +473,6 @@ impl BenefactorWallet { let input = Input { witness_utxo: { - let script_pubkey = script_pubkey; let amount = Amount::from_sat(value); Some(TxOut { diff --git a/hashes/benches/hashes.rs b/hashes/benches/hashes.rs index 1a315c328..2e4c16da9 100644 --- a/hashes/benches/hashes.rs +++ b/hashes/benches/hashes.rs @@ -128,7 +128,6 @@ fn bench_siphash24(c: &mut Criterion) { for (size, label) in [(1024, "1k"), (65536, "64k"), (1048576, "1m")] { group.throughput(Throughput::Bytes(size as u64)); group.bench_with_input(BenchmarkId::from_parameter(label), &size, |b, &size| { - let key = [0u8; 16]; let bytes = vec![1u8; size]; b.iter(|| { let hash = @@ -217,26 +216,26 @@ fn bench_slice_comparisons(c: &mut Criterion) { group.bench_function("32b_ne", |b| { let hash_a = sha256::Hash::hash(&[0; 1]); let hash_b = sha256::Hash::hash(&[1; 1]); - b.iter(|| &hash_a[..] == &hash_b[..]); + b.iter(|| hash_a[..] == hash_b[..]); }); group.bench_function("32b_eq", |b| { let hash_a = sha256::Hash::hash(&[0; 1]); let hash_b = sha256::Hash::hash(&[0; 1]); - b.iter(|| &hash_a[..] == &hash_b[..]); + b.iter(|| hash_a[..] == hash_b[..]); }); // 64-byte comparisons (SHA512) group.bench_function("64b_ne", |b| { let hash_a = sha512::Hash::hash(&[0; 1]); let hash_b = sha512::Hash::hash(&[1; 1]); - b.iter(|| &hash_a[..] == &hash_b[..]); + b.iter(|| hash_a[..] == hash_b[..]); }); group.bench_function("64b_eq", |b| { let hash_a = sha512::Hash::hash(&[0; 1]); let hash_b = sha512::Hash::hash(&[0; 1]); - b.iter(|| &hash_a[..] == &hash_b[..]); + b.iter(|| hash_a[..] == hash_b[..]); }); group.finish(); diff --git a/key-wallet-manager/examples/wallet_creation.rs b/key-wallet-manager/examples/wallet_creation.rs index 241b90a19..2f4decf2b 100644 --- a/key-wallet-manager/examples/wallet_creation.rs +++ b/key-wallet-manager/examples/wallet_creation.rs @@ -5,7 +5,6 @@ //! - Creating wallets from mnemonics //! - Managing wallet accounts and addresses -use hex; use key_wallet::account::StandardAccountType; use key_wallet::wallet::initialization::WalletAccountCreationOptions; use key_wallet::wallet::managed_wallet_info::transaction_building::AccountTypePreference; diff --git a/rpc-client/examples/connect_to_masternode.rs b/rpc-client/examples/connect_to_masternode.rs index 0c108b497..01e9d6ff0 100644 --- a/rpc-client/examples/connect_to_masternode.rs +++ b/rpc-client/examples/connect_to_masternode.rs @@ -73,7 +73,7 @@ fn main() { println!("\nQuorum list: \n{:?}", quorum_list); let quorum_hashes = quorum_list.quorums_by_type.get(&QuorumType::LlmqTest).unwrap(); - let quorum_hash = quorum_hashes.get(0); + let quorum_hash = quorum_hashes.first(); // Get Quorum info let quorum_info = @@ -83,7 +83,7 @@ fn main() { let quorum_listextended = rpc.get_quorum_listextended(Some(quorum_info.height)).unwrap(); println!("\n\nQuorum list extended \n{:?}", quorum_listextended); - let mn0 = quorum_info.members.get(0).unwrap(); + let mn0 = quorum_info.members.first().unwrap(); let mn0_pro_tx_hash = mn0.to_owned().pro_tx_hash; // Get Quorum DKG status diff --git a/rpc-client/src/client.rs b/rpc-client/src/client.rs index 772b3bc59..f37d87486 100644 --- a/rpc-client/src/client.rs +++ b/rpc-client/src/client.rs @@ -841,6 +841,7 @@ pub trait RpcApi: Sized { self.call("getchaintips", &[]) } + #[allow(clippy::too_many_arguments)] fn send_to_address( &self, address: &Address, @@ -995,7 +996,7 @@ pub trait RpcApi: Sized { /// # Arguments /// /// 1. `timeout`: Time in milliseconds to wait for a response. 0 - /// indicates no timeout. + /// indicates no timeout. fn wait_for_new_block(&self, timeout: u64) -> Result { self.call("waitfornewblock", &[into_json(timeout)?]) } @@ -1007,7 +1008,7 @@ pub trait RpcApi: Sized { /// /// 1. `blockhash`: Block hash to wait for. /// 2. `timeout`: Time in milliseconds to wait for a response. 0 - /// indicates no timeout. + /// indicates no timeout. fn wait_for_block( &self, blockhash: &dashcore::BlockHash, @@ -1452,6 +1453,7 @@ pub trait RpcApi: Sized { } /// Creates a ProRegTx referencing an existing collateral and and sends it to the network + #[allow(clippy::too_many_arguments)] fn get_protx_register( &self, collateral_hash: &str, @@ -1482,6 +1484,7 @@ pub trait RpcApi: Sized { } /// Creates and funds a ProRegTx with the 1,000 DASH necessary for a masternode and then sends it to the network + #[allow(clippy::too_many_arguments)] fn get_protx_register_fund( &self, collateral_address: &str, @@ -1510,6 +1513,7 @@ pub trait RpcApi: Sized { } /// Creates an unsigned ProTx and a message that must be signed externally + #[allow(clippy::too_many_arguments)] fn get_protx_register_prepare( &self, collateral_hash: &str, @@ -1730,7 +1734,6 @@ fn log_response(cmd: &str, resp: &Result) { #[cfg(test)] mod tests { use super::*; - use serde_json; #[test] fn test_raw_tx() { diff --git a/rpc-integration-test/src/main.rs b/rpc-integration-test/src/main.rs index eb86b7fbd..31ebe19db 100644 --- a/rpc-integration-test/src/main.rs +++ b/rpc-integration-test/src/main.rs @@ -115,6 +115,7 @@ fn btc>(btc: F) -> Amount { } /// Quickly create a signed BTC amount. +#[allow(dead_code)] fn sbtc>(btc: F) -> SignedAmount { SignedAmount::from_btc(btc.into()).unwrap() } @@ -422,6 +423,7 @@ fn test_dump_private_key(cl: &Client) { assert_eq!(addr.to_string(), Address::p2pkh(&sk.public_key(&SECP), *NET).to_string()); } +#[allow(dead_code)] fn test_get_balance_generate_to_address(cl: &Client) { let initial = cl.get_balance(None, None).unwrap(); @@ -464,6 +466,7 @@ fn test_get_block_hash(cl: &Client) { assert_eq!(cl.get_block_hash(h).unwrap(), cl.get_best_block_hash().unwrap()); } +#[allow(dead_code)] fn test_get_block(cl: &Client) { let tip = cl.get_best_block_hash().unwrap(); let block = cl.get_block(&tip).unwrap(); @@ -515,7 +518,7 @@ fn test_get_address_info(cl: &Client) { fn test_set_label(cl: &Client) { let addr = cl.get_new_address(Some("label")).unwrap().require_network(*NET).unwrap(); let info = cl.get_address_info(&addr).unwrap(); - if wallet_node_version() >= 0_20_00_00 { + if wallet_node_version() >= 20_00_00 { assert!(info.label.is_none()); assert_eq!(info.labels[0], json::GetAddressInfoResultLabel::Simple("label".into())); } else { @@ -531,7 +534,7 @@ fn test_set_label(cl: &Client) { cl.set_label(&addr, "other").unwrap(); let info = cl.get_address_info(&addr).unwrap(); - if wallet_node_version() >= 0_20_00_00 { + if wallet_node_version() >= 20_00_00 { assert!(info.label.is_none()); assert_eq!(info.labels[0], json::GetAddressInfoResultLabel::Simple("other".into())); } else { @@ -677,6 +680,7 @@ fn test_get_tx_out(cl: &Client) { let _ = cl.get_tx_out(&txid, 0, None).unwrap(); } +#[allow(dead_code)] fn test_get_tx_out_proof(cl: &Client) { let txid1 = cl .send_to_address(&RANDOM_ADDRESS, btc(1), None, None, None, None, None, None, None, None) @@ -713,6 +717,7 @@ fn test_lock_unspent_unlock_unspent(cl: &Client) { assert!(cl.unlock_unspent_all().unwrap()); } +#[allow(dead_code)] fn test_get_block_filter(cl: &Client) { let addr = &cl.get_new_address(None).unwrap().require_network(*NET).unwrap(); let blocks = cl.generate_to_address(7, addr).unwrap(); @@ -1340,6 +1345,7 @@ fn test_getblocktemplate(cl: &Client) { cl.generate_to_address(2, &RANDOM_ADDRESS).unwrap(); } +#[allow(dead_code)] fn test_stop(cl: &Client) { println!("Stopping: '{}'", cl.stop().unwrap()); } @@ -1357,6 +1363,7 @@ fn test_get_masternode_list(cl: &Client) { let _masternode_list = cl.get_masternode_list(Some("json"), None).unwrap(); } +#[allow(dead_code)] fn test_get_masternode_outputs(cl: &Client) { let _masternode_outputs = cl.get_masternode_outputs().unwrap(); } @@ -1425,6 +1432,7 @@ fn test_get_quorum_sign(cl: &Client, _wallet_client: &Client) { .unwrap(); } +#[allow(dead_code)] fn test_get_quorum_getrecsig(cl: &Client) { let list = cl.get_quorum_list(Some(1)).unwrap(); let quorum_type = list.quorums_by_type.keys().next().unwrap().to_owned(); @@ -1458,6 +1466,7 @@ fn test_get_quorum_isconflicting(cl: &Client) { .unwrap(); } +#[allow(dead_code)] fn test_get_quorum_memberof(cl: &Client) { let pro_tx_hash = ProTxHash::from_str("39c07d2c9c6d0ead56f52726b63c15e295cb5c3ecf7fe1fefcfb23b2e3cfed1f") @@ -1466,6 +1475,7 @@ fn test_get_quorum_memberof(cl: &Client) { assert!(quorum_memberof.0[0].height > 0); } +#[allow(dead_code)] fn test_get_quorum_rotationinfo(cl: &Client) { let block_hash = BlockHash::from_str("0000012197b7ca6360af3756c6a49c217dbbdf8b595fd55e0fcef7ffcd546044") @@ -1482,6 +1492,7 @@ fn test_get_quorum_selectquorum(cl: &Client) { .unwrap(); } +#[allow(dead_code)] fn test_get_quorum_verify(cl: &Client) { let _quorum_verify = cl.get_quorum_verify( LlmqTest, @@ -1513,6 +1524,7 @@ fn test_get_protx_diff(cl: &Client) { let _protx_diff = cl.get_protx_diff(1000, 1000).unwrap(); } +#[allow(dead_code)] fn test_get_protx_info(cl: &Client) { let pro_tx_hash = ProTxHash::from_str("000000000c9eddd5d2a707281b7e30d5aac974dac600ff10f01937e1ca36066f") @@ -1538,6 +1550,7 @@ fn test_get_protx_list(cl: &Client) { cl.get_protx_list(Some(ProTxListType::Valid), Some(true), Some(1000)).unwrap(); } +#[allow(dead_code)] fn test_get_protx_register(cl: &Client) { let _protx_register = cl.get_protx_register( "8b2eab3413abb6e04d17d1defe2b71039ba6b6f72ea1e5dab29bb10e7b745948", @@ -1553,11 +1566,13 @@ fn test_get_protx_register(cl: &Client) { ).unwrap(); } +#[allow(dead_code)] fn test_get_protx_register_fund(cl: &Client) { let _protx_register_fund = cl.get_protx_register_fund("yakx4mMRptKhgfjedNzX5FGQq7kSSBF2e7", "3.4.5.6:3456", "yURczr3qY31xkQZfFu8eZvKz19eAEPQxsd", "0e02146e9c34cfbcb3f3037574a1abb35525e2ca0c3c6901dbf82ac591e30218d1711223b7ca956edf39f3d984d06d51", "yURczr3qY31xkQZfFu8eZvKz19eAEPQxsd", 5.0, "yUYTxqjpCfAAK4vgxXtBPywRBtZqsxN7Vy", Some("yRMFHxcJ2aS2vfo5whhE2Gg73dfQVm8LAF"), Some(false)).unwrap(); } +#[allow(dead_code)] fn test_get_protx_register_prepare(cl: &Client) { let owner_address = Address::::from_str("yemjhGQ99V5ayJMjoyGGPtxteahii6G1Jz") .unwrap() @@ -1589,6 +1604,7 @@ fn test_get_protx_register_prepare(cl: &Client) { ).unwrap(); } +#[allow(dead_code)] fn test_get_protx_register_submit(cl: &Client) { let _protx_register_submit = cl.get_protx_register_submit( "03000100012d988526d5d1efd32320023c92eff09c2963dcb021b0de9761", @@ -1596,6 +1612,7 @@ fn test_get_protx_register_submit(cl: &Client) { ).unwrap(); } +#[allow(dead_code)] fn test_get_protx_revoke(cl: &Client) { let _protx_revoke = cl .get_protx_revoke( @@ -1607,6 +1624,7 @@ fn test_get_protx_revoke(cl: &Client) { .unwrap(); } +#[allow(dead_code)] fn test_get_protx_update_registrar(cl: &Client) { let voting_address = Address::::from_str("yX2cDS4kcJ4LK4uq9Hd4TG7kURV3sGLZrw") @@ -1623,6 +1641,7 @@ fn test_get_protx_update_registrar(cl: &Client) { ).unwrap(); } +#[allow(dead_code)] fn test_get_protx_update_service(cl: &Client) { let _protx_update_service = cl .get_protx_update_service( @@ -1635,6 +1654,7 @@ fn test_get_protx_update_service(cl: &Client) { .unwrap(); } +#[allow(dead_code)] fn test_get_verifychainlock(cl: &Client) { let _verifychainlock = cl.get_verifychainlock("00000036d5c520be6e9a32d3829efc983a7b5e88052bf138f80a2b3988689a24", "97ec34efd1615b84af62495e54024880752f57790cf450ae974b80002440963592d96826e24f109e6c149411b70bb9a0035443752368590adae60365cf4251464e0423c1263e9c56a33eae9be9e9c79a117151b2173bcee93497008cace8d793", None).unwrap(); } @@ -1645,6 +1665,7 @@ fn test_get_asset_unlock_statuses(cl: &Client) { let _statuses = cl.get_asset_unlock_statuses(&indices, height); } +#[allow(dead_code)] fn test_get_verifyislock(cl: &Client) { let _verifychainlock = cl.get_verifyislock("d0b1a9c70fdfff6bf7f6cbe3d1fe33a4ca44ceb17059b6381a4ac25d9c9b6495", "8b5174d0e95b5642ebec23c3fe8f0bbf8f6993502f4210322871bba0e818ff3b", "97ec34efd1615b84af62495e54024880752f57790cf450ae974b80002440963592d96826e24f109e6c149411b70bb9a0035443752368590adae60365cf4251464e0423c1263e9c56a33eae9be9e9c79a117151b2173bcee93497008cace8d793", None).unwrap(); } diff --git a/rpc-json/src/lib.rs b/rpc-json/src/lib.rs index 454b2f639..4ff72665d 100644 --- a/rpc-json/src/lib.rs +++ b/rpc-json/src/lib.rs @@ -3270,10 +3270,10 @@ mod tests { } let json = r#"{"field": 1}"#; - let result: Test = serde_json::from_str(&json).unwrap(); + let result: Test = serde_json::from_str(json).unwrap(); assert_eq!(result.field, Some(1)); let json = r#"{"field": -1}"#; - let result: Test = serde_json::from_str(&json).unwrap(); + let result: Test = serde_json::from_str(json).unwrap(); assert_eq!(result.field, None); } @@ -3294,7 +3294,7 @@ mod tests { let result: ExtendedQuorumListResult = serde_json::from_str(json_list).expect("expected to deserialize json"); let first_type = result.quorums_by_type.get(&QuorumType::Llmq50_60).unwrap(); - let first_quorum = first_type.into_iter().nth(0).unwrap(); + let first_quorum = first_type.iter().next().unwrap(); assert_eq!( first_quorum.0.to_byte_array(), @@ -3424,7 +3424,7 @@ mod tests { ] }"#; let result: MasternodeListDiff = - serde_json::from_str(&json).expect("expected to deserialize json"); + serde_json::from_str(json).expect("expected to deserialize json"); println!("{:#?}", result); assert_eq!(32, result.added_mns[0].pro_tx_hash.as_byte_array().len()); From 3438a984ecc2a46d3d6efaf28511da60ed33a62c Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 20 Nov 2025 22:43:14 +1000 Subject: [PATCH 10/14] Fix `verify-ffi-docs` --- dash-spv-ffi/FFI_API.md | 62 +-- dash-spv-ffi/scripts/check_ffi_docs.sh | 32 -- dash-spv-ffi/scripts/generate_ffi_docs.py | 7 +- key-wallet-ffi/FFI_API.md | 414 ++++++++++---------- key-wallet-ffi/scripts/check_ffi_docs.sh | 32 -- key-wallet-ffi/scripts/generate_ffi_docs.py | 7 +- 6 files changed, 250 insertions(+), 304 deletions(-) delete mode 100755 dash-spv-ffi/scripts/check_ffi_docs.sh delete mode 100755 key-wallet-ffi/scripts/check_ffi_docs.sh diff --git a/dash-spv-ffi/FFI_API.md b/dash-spv-ffi/FFI_API.md index 254559dfb..868f72639 100644 --- a/dash-spv-ffi/FFI_API.md +++ b/dash-spv-ffi/FFI_API.md @@ -39,31 +39,31 @@ Functions: 27 | Function | Description | Module | |----------|-------------|--------| | `dash_spv_ffi_client_update_config` | Update the running client's configuration | client | -| `dash_spv_ffi_config_add_peer` | Adds a peer address to the configuration Accepts either a full socket address... | config | -| `dash_spv_ffi_config_destroy` | Destroys an FFIClientConfig and frees its memory # Safety - `config` must be... | config | -| `dash_spv_ffi_config_get_data_dir` | Gets the data directory path from the configuration # Safety - `config` must... | config | -| `dash_spv_ffi_config_get_mempool_strategy` | Gets the mempool synchronization strategy # Safety - `config` must be a vali... | config | -| `dash_spv_ffi_config_get_mempool_tracking` | Gets whether mempool tracking is enabled # Safety - `config` must be a valid... | config | -| `dash_spv_ffi_config_get_network` | Gets the network type from the configuration # Safety - `config` must be a v... | config | +| `dash_spv_ffi_config_add_peer` | Adds a peer address to the configuration Accepts either a full socket... | config | +| `dash_spv_ffi_config_destroy` | Destroys an FFIClientConfig and frees its memory # Safety - `config` must... | config | +| `dash_spv_ffi_config_get_data_dir` | Gets the data directory path from the configuration # Safety - `config`... | config | +| `dash_spv_ffi_config_get_mempool_strategy` | Gets the mempool synchronization strategy # Safety - `config` must be a... | config | +| `dash_spv_ffi_config_get_mempool_tracking` | Gets whether mempool tracking is enabled # Safety - `config` must be a... | config | +| `dash_spv_ffi_config_get_network` | Gets the network type from the configuration # Safety - `config` must be a... | config | | `dash_spv_ffi_config_mainnet` | No description | config | | `dash_spv_ffi_config_new` | No description | config | -| `dash_spv_ffi_config_set_data_dir` | Sets the data directory for storing blockchain data # Safety - `config` must... | config | -| `dash_spv_ffi_config_set_fetch_mempool_transactions` | Sets whether to fetch full mempool transaction data # Safety - `config` must... | config | -| `dash_spv_ffi_config_set_filter_load` | Sets whether to load bloom filters # Safety - `config` must be a valid point... | config | -| `dash_spv_ffi_config_set_masternode_sync_enabled` | Enables or disables masternode synchronization # Safety - `config` must be a... | config | -| `dash_spv_ffi_config_set_max_mempool_transactions` | Sets the maximum number of mempool transactions to track # Safety - `config`... | config | -| `dash_spv_ffi_config_set_max_peers` | Sets the maximum number of peers to connect to # Safety - `config` must be a... | config | -| `dash_spv_ffi_config_set_mempool_strategy` | Sets the mempool synchronization strategy # Safety - `config` must be a vali... | config | -| `dash_spv_ffi_config_set_mempool_timeout` | Sets the mempool transaction timeout in seconds # Safety - `config` must be ... | config | -| `dash_spv_ffi_config_set_mempool_tracking` | Enables or disables mempool tracking # Safety - `config` must be a valid poi... | config | -| `dash_spv_ffi_config_set_persist_mempool` | Sets whether to persist mempool state to disk # Safety - `config` must be a ... | config | -| `dash_spv_ffi_config_set_relay_transactions` | Sets whether to relay transactions (currently a no-op) # Safety - `config` m... | config | -| `dash_spv_ffi_config_set_restrict_to_configured_peers` | Restrict connections strictly to configured peers (disable DNS discovery and ... | config | -| `dash_spv_ffi_config_set_start_from_height` | Sets the starting block height for synchronization # Safety - `config` must ... | config | -| `dash_spv_ffi_config_set_user_agent` | Sets the user agent string to advertise in the P2P handshake # Safety - `con... | config | -| `dash_spv_ffi_config_set_validation_mode` | Sets the validation mode for the SPV client # Safety - `config` must be a va... | config | -| `dash_spv_ffi_config_set_wallet_creation_time` | Sets the wallet creation timestamp for synchronization optimization # Safety... | config | -| `dash_spv_ffi_config_set_worker_threads` | Sets the number of Tokio worker threads for the FFI runtime (0 = auto) # Saf... | config | +| `dash_spv_ffi_config_set_data_dir` | Sets the data directory for storing blockchain data # Safety - `config`... | config | +| `dash_spv_ffi_config_set_fetch_mempool_transactions` | Sets whether to fetch full mempool transaction data # Safety - `config`... | config | +| `dash_spv_ffi_config_set_filter_load` | Sets whether to load bloom filters # Safety - `config` must be a valid... | config | +| `dash_spv_ffi_config_set_masternode_sync_enabled` | Enables or disables masternode synchronization # Safety - `config` must be... | config | +| `dash_spv_ffi_config_set_max_mempool_transactions` | Sets the maximum number of mempool transactions to track # Safety -... | config | +| `dash_spv_ffi_config_set_max_peers` | Sets the maximum number of peers to connect to # Safety - `config` must be... | config | +| `dash_spv_ffi_config_set_mempool_strategy` | Sets the mempool synchronization strategy # Safety - `config` must be a... | config | +| `dash_spv_ffi_config_set_mempool_timeout` | Sets the mempool transaction timeout in seconds # Safety - `config` must be... | config | +| `dash_spv_ffi_config_set_mempool_tracking` | Enables or disables mempool tracking # Safety - `config` must be a valid... | config | +| `dash_spv_ffi_config_set_persist_mempool` | Sets whether to persist mempool state to disk # Safety - `config` must be a... | config | +| `dash_spv_ffi_config_set_relay_transactions` | Sets whether to relay transactions (currently a no-op) # Safety - `config`... | config | +| `dash_spv_ffi_config_set_restrict_to_configured_peers` | Restrict connections strictly to configured peers (disable DNS discovery and... | config | +| `dash_spv_ffi_config_set_start_from_height` | Sets the starting block height for synchronization # Safety - `config` must... | config | +| `dash_spv_ffi_config_set_user_agent` | Sets the user agent string to advertise in the P2P handshake # Safety -... | config | +| `dash_spv_ffi_config_set_validation_mode` | Sets the validation mode for the SPV client # Safety - `config` must be a... | config | +| `dash_spv_ffi_config_set_wallet_creation_time` | Sets the wallet creation timestamp for synchronization optimization #... | config | +| `dash_spv_ffi_config_set_worker_threads` | Sets the number of Tokio worker threads for the FFI runtime (0 = auto) #... | config | | `dash_spv_ffi_config_testnet` | No description | config | ### Synchronization @@ -78,7 +78,7 @@ Functions: 8 | `dash_spv_ffi_client_is_filter_sync_available` | Check if compact filter sync is currently available | client | | `dash_spv_ffi_client_sync_to_tip` | Sync the SPV client to the chain tip | client | | `dash_spv_ffi_client_sync_to_tip_with_progress` | Sync the SPV client to the chain tip with detailed progress updates | client | -| `dash_spv_ffi_client_test_sync` | Performs a test synchronization of the SPV client # Parameters - `client`: P... | client | +| `dash_spv_ffi_client_test_sync` | Performs a test synchronization of the SPV client # Parameters - `client`:... | client | | `dash_spv_ffi_sync_progress_destroy` | Destroy a `FFISyncProgress` object returned by this crate | client | ### Address Monitoring @@ -87,7 +87,7 @@ Functions: 1 | Function | Description | Module | |----------|-------------|--------| -| `dash_spv_ffi_unconfirmed_transaction_destroy_addresses` | Destroys the addresses array allocated for an FFIUnconfirmedTransaction # Sa... | types | +| `dash_spv_ffi_unconfirmed_transaction_destroy_addresses` | Destroys the addresses array allocated for an FFIUnconfirmedTransaction #... | types | ### Transaction Management @@ -96,7 +96,7 @@ Functions: 3 | Function | Description | Module | |----------|-------------|--------| | `dash_spv_ffi_client_broadcast_transaction` | No description | broadcast | -| `dash_spv_ffi_unconfirmed_transaction_destroy` | Destroys an FFIUnconfirmedTransaction and all its associated resources # Saf... | types | +| `dash_spv_ffi_unconfirmed_transaction_destroy` | Destroys an FFIUnconfirmedTransaction and all its associated resources #... | types | | `dash_spv_ffi_unconfirmed_transaction_destroy_raw_tx` | Destroys the raw transaction bytes allocated for an FFIUnconfirmedTransaction... | types | ### Mempool Operations @@ -113,10 +113,10 @@ Functions: 4 | Function | Description | Module | |----------|-------------|--------| -| `ffi_dash_spv_get_core_handle` | Creates a CoreSDKHandle from an FFIDashSpvClient # Safety This function is ... | platform_integration | -| `ffi_dash_spv_get_platform_activation_height` | Gets the platform activation height from the Core chain # Safety This funct... | platform_integration | -| `ffi_dash_spv_get_quorum_public_key` | Gets a quorum public key from the Core chain # Safety This function is unsa... | platform_integration | -| `ffi_dash_spv_release_core_handle` | Releases a CoreSDKHandle # Safety This function is unsafe because: - The ca... | platform_integration | +| `ffi_dash_spv_get_core_handle` | Creates a CoreSDKHandle from an FFIDashSpvClient # Safety This function is... | platform_integration | +| `ffi_dash_spv_get_platform_activation_height` | Gets the platform activation height from the Core chain # Safety This... | platform_integration | +| `ffi_dash_spv_get_quorum_public_key` | Gets a quorum public key from the Core chain # Safety This function is... | platform_integration | +| `ffi_dash_spv_release_core_handle` | Releases a CoreSDKHandle # Safety This function is unsafe because: - The... | platform_integration | ### Event Callbacks @@ -151,7 +151,7 @@ Functions: 19 | `dash_spv_ffi_client_get_stats` | Get current runtime statistics for the SPV client | client | | `dash_spv_ffi_client_get_tip_hash` | Get the current chain tip hash (32 bytes) if available | client | | `dash_spv_ffi_client_get_tip_height` | Get the current chain tip height (absolute) | client | -| `dash_spv_ffi_client_get_wallet_manager` | Get the wallet manager from the SPV client Returns a pointer to an `FFIWalle... | client | +| `dash_spv_ffi_client_get_wallet_manager` | Get the wallet manager from the SPV client Returns a pointer to an... | client | | `dash_spv_ffi_client_record_send` | Record that we attempted to send a transaction by its txid | client | | `dash_spv_ffi_client_rescan_blockchain` | Request a rescan of the blockchain from a given height (not yet implemented) | client | | `dash_spv_ffi_enable_test_mode` | No description | utils | diff --git a/dash-spv-ffi/scripts/check_ffi_docs.sh b/dash-spv-ffi/scripts/check_ffi_docs.sh deleted file mode 100755 index 6dede2442..000000000 --- a/dash-spv-ffi/scripts/check_ffi_docs.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -# Check if FFI documentation is up to date - -set -e - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -PROJECT_DIR="$(dirname "$SCRIPT_DIR")" - -echo "Checking FFI documentation..." - -cd "$PROJECT_DIR" - -# Generate new documentation -python3 scripts/generate_ffi_docs.py > /dev/null 2>&1 - -# Check if there are any changes (tracked, staged, or untracked) -if ! git diff --quiet --exit-code -- FFI_API.md \ - || ! git diff --quiet --cached -- FFI_API.md \ - || [ -n "$(git ls-files --others --exclude-standard -- FFI_API.md)" ]; then - echo "❌ FFI documentation is out of date!" - echo "" - echo "Please regenerate the documentation by running:" - echo " cd dash-spv-ffi && python3 scripts/generate_ffi_docs.py" - echo "" - echo "Or use the make command:" - echo " make update-docs" - echo "" - exit 1 -else - echo "✅ FFI documentation is up to date" -fi diff --git a/dash-spv-ffi/scripts/generate_ffi_docs.py b/dash-spv-ffi/scripts/generate_ffi_docs.py index 43ef6aab9..c0b65210a 100755 --- a/dash-spv-ffi/scripts/generate_ffi_docs.py +++ b/dash-spv-ffi/scripts/generate_ffi_docs.py @@ -215,7 +215,12 @@ def generate_markdown(functions: List[FFIFunction]) -> str: desc = func.doc_comment.split('.')[0] if func.doc_comment else "No description" desc = desc.replace('|', '\\|') # Escape pipes in description if len(desc) > 80: - desc = desc[:77] + "..." + # Truncate at last complete word before 77 chars to avoid mid-word breaks + truncate_pos = desc.rfind(' ', 0, 77) + if truncate_pos > 60: # Only if we find a space reasonably close + desc = desc[:truncate_pos] + "..." + else: + desc = desc[:77] + "..." md.append(f"| `{func.name}` | {desc} | {func.module} |") md.append("") diff --git a/key-wallet-ffi/FFI_API.md b/key-wallet-ffi/FFI_API.md index ebb9d62b8..ecaed4921 100644 --- a/key-wallet-ffi/FFI_API.md +++ b/key-wallet-ffi/FFI_API.md @@ -28,7 +28,7 @@ Functions: 2 | Function | Description | Module | |----------|-------------|--------| | `key_wallet_ffi_initialize` | Initialize the library | lib | -| `key_wallet_ffi_version` | Get library version Returns a static string that should NOT be freed by the ... | lib | +| `key_wallet_ffi_version` | Get library version Returns a static string that should NOT be freed by the... | lib | ### Error Handling @@ -36,9 +36,9 @@ Functions: 3 | Function | Description | Module | |----------|-------------|--------| -| `account_result_free_error` | Free an account result's error message (if any) Note: This does NOT free the ... | account | -| `error_message_free` | Free an error message # Safety - `message` must be a valid pointer to a C s... | error | -| `managed_account_result_free_error` | Free a managed account result's error message (if any) Note: This does NOT fr... | managed_account | +| `account_result_free_error` | Free an account result's error message (if any) Note: This does NOT free the... | account | +| `error_message_free` | Free an error message # Safety - `message` must be a valid pointer to a C... | error | +| `managed_account_result_free_error` | Free a managed account result's error message (if any) Note: This does NOT... | managed_account | ### Wallet Manager @@ -46,25 +46,25 @@ Functions: 19 | Function | Description | Module | |----------|-------------|--------| -| `wallet_manager_add_wallet_from_mnemonic` | Add a wallet from mnemonic to the manager (backward compatibility) # Safety ... | wallet_manager | +| `wallet_manager_add_wallet_from_mnemonic` | Add a wallet from mnemonic to the manager (backward compatibility) # Safety... | wallet_manager | | `wallet_manager_add_wallet_from_mnemonic_return_serialized_bytes` | No description | wallet_manager | -| `wallet_manager_add_wallet_from_mnemonic_with_options` | Add a wallet from mnemonic to the manager with options # Safety - `manager`... | wallet_manager | +| `wallet_manager_add_wallet_from_mnemonic_with_options` | Add a wallet from mnemonic to the manager with options # Safety -... | wallet_manager | | `wallet_manager_create` | Create a new wallet manager | wallet_manager | -| `wallet_manager_current_height` | Get current height for a network # Safety - `manager` must be a valid point... | wallet_manager | -| `wallet_manager_describe` | Describe the wallet manager for a given network and return a newly allocated ... | wallet_manager | -| `wallet_manager_free` | Free wallet manager # Safety - `manager` must be a valid pointer to an FFIW... | wallet_manager | -| `wallet_manager_free_addresses` | Free address array # Safety - `addresses` must be a valid pointer to an arr... | wallet_manager | +| `wallet_manager_current_height` | Get current height for a network # Safety - `manager` must be a valid... | wallet_manager | +| `wallet_manager_describe` | Describe the wallet manager for a given network and return a newly allocated... | wallet_manager | +| `wallet_manager_free` | Free wallet manager # Safety - `manager` must be a valid pointer to an... | wallet_manager | +| `wallet_manager_free_addresses` | Free address array # Safety - `addresses` must be a valid pointer to an... | wallet_manager | | `wallet_manager_free_string` | Free a string previously returned by wallet manager APIs | wallet_manager | | `wallet_manager_free_wallet_bytes` | No description | wallet_manager | -| `wallet_manager_free_wallet_ids` | Free wallet IDs buffer # Safety - `wallet_ids` must be a valid pointer to a... | wallet_manager | -| `wallet_manager_get_managed_wallet_info` | Get managed wallet info from the manager Returns a reference to the managed ... | wallet_manager | -| `wallet_manager_get_wallet` | Get a wallet from the manager Returns a reference to the wallet if found # ... | wallet_manager | -| `wallet_manager_get_wallet_balance` | Get wallet balance Returns the confirmed and unconfirmed balance for a speci... | wallet_manager | -| `wallet_manager_get_wallet_ids` | Get wallet IDs # Safety - `manager` must be a valid pointer to an FFIWallet... | wallet_manager | +| `wallet_manager_free_wallet_ids` | Free wallet IDs buffer # Safety - `wallet_ids` must be a valid pointer to... | wallet_manager | +| `wallet_manager_get_managed_wallet_info` | Get managed wallet info from the manager Returns a reference to the managed... | wallet_manager | +| `wallet_manager_get_wallet` | Get a wallet from the manager Returns a reference to the wallet if found #... | wallet_manager | +| `wallet_manager_get_wallet_balance` | Get wallet balance Returns the confirmed and unconfirmed balance for a... | wallet_manager | +| `wallet_manager_get_wallet_ids` | Get wallet IDs # Safety - `manager` must be a valid pointer to an... | wallet_manager | | `wallet_manager_import_wallet_from_bytes` | No description | wallet_manager | -| `wallet_manager_process_transaction` | Process a transaction through all wallets Checks a transaction against all w... | wallet_manager | -| `wallet_manager_update_height` | Update block height for a network # Safety - `manager` must be a valid point... | wallet_manager | -| `wallet_manager_wallet_count` | Get wallet count # Safety - `manager` must be a valid pointer to an FFIWall... | wallet_manager | +| `wallet_manager_process_transaction` | Process a transaction through all wallets Checks a transaction against all... | wallet_manager | +| `wallet_manager_update_height` | Update block height for a network # Safety - `manager` must be a valid... | wallet_manager | +| `wallet_manager_wallet_count` | Get wallet count # Safety - `manager` must be a valid pointer to an... | wallet_manager | ### Wallet Operations @@ -72,68 +72,68 @@ Functions: 62 | Function | Description | Module | |----------|-------------|--------| -| `account_get_parent_wallet_id` | Get the parent wallet ID of an account # Safety - `account` must be a valid... | account | +| `account_get_parent_wallet_id` | Get the parent wallet ID of an account # Safety - `account` must be a... | account | | `bls_account_get_parent_wallet_id` | No description | account | | `eddsa_account_get_parent_wallet_id` | No description | account | -| `ffi_managed_wallet_free` | Free a managed wallet (FFIManagedWalletInfo type) # Safety - `managed_walle... | transaction_checking | -| `key_wallet_derive_address_from_key` | Derive an address from a private key # Safety - `private_key` must be a vali... | derivation | -| `key_wallet_derive_address_from_seed` | Derive an address from a seed at a specific derivation path # Safety - `seed... | derivation | -| `key_wallet_derive_private_key_from_seed` | Derive a private key from a seed at a specific derivation path # Safety - `s... | derivation | -| `managed_account_get_parent_wallet_id` | Get the parent wallet ID of a managed account Note: ManagedAccount doesn't s... | managed_account | -| `managed_wallet_check_transaction` | Check if a transaction belongs to the wallet This function checks a transact... | transaction_checking | -| `managed_wallet_free` | Free managed wallet info # Safety - `managed_wallet` must be a valid pointe... | managed_wallet | -| `managed_wallet_generate_addresses_to_index` | Generate addresses up to a specific index in a pool This ensures that address... | address_pool | -| `managed_wallet_get_account` | Get a managed account from a managed wallet This function gets a ManagedAcco... | managed_account | -| `managed_wallet_get_account_collection` | Get managed account collection for a specific network from wallet manager # ... | managed_account_collection | -| `managed_wallet_get_account_count` | Get number of accounts in a managed wallet # Safety - `manager` must be a v... | managed_account | -| `managed_wallet_get_address_pool_info` | Get address pool information for an account # Safety - `managed_wallet` mus... | address_pool | -| `managed_wallet_get_balance` | Get wallet balance from managed wallet info Returns the balance breakdown in... | managed_wallet | -| `managed_wallet_get_bip_44_external_address_range` | Get BIP44 external (receive) addresses in the specified range Returns extern... | managed_wallet | -| `managed_wallet_get_bip_44_internal_address_range` | Get BIP44 internal (change) addresses in the specified range Returns interna... | managed_wallet | -| `managed_wallet_get_dashpay_external_account` | Get a managed DashPay external account by composite key # Safety - Pointers ... | managed_account | -| `managed_wallet_get_dashpay_receiving_account` | Get a managed DashPay receiving funds account by composite key # Safety - `m... | managed_account | -| `managed_wallet_get_next_bip44_change_address` | Get the next unused change address Generates the next unused change address ... | managed_wallet | -| `managed_wallet_get_next_bip44_receive_address` | Get the next unused receive address Generates the next unused receive address... | managed_wallet | -| `managed_wallet_get_top_up_account_with_registration_index` | Get a managed IdentityTopUp account with a specific registration index This ... | managed_account | -| `managed_wallet_get_utxos` | Get all UTXOs from managed wallet info # Safety - `managed_info` must be a ... | utxo | -| `managed_wallet_info_free` | Free managed wallet info returned by wallet_manager_get_managed_wallet_info ... | managed_wallet | -| `managed_wallet_mark_address_used` | Mark an address as used in the pool This updates the pool's tracking of which... | address_pool | -| `managed_wallet_set_gap_limit` | Set the gap limit for an address pool The gap limit determines how many unus... | address_pool | -| `wallet_add_account` | Add an account to the wallet without xpub # Safety This function dereferenc... | wallet | -| `wallet_add_account_with_string_xpub` | Add an account to the wallet with xpub as string # Safety This function der... | wallet | -| `wallet_add_account_with_xpub_bytes` | Add an account to the wallet with xpub as byte array # Safety This function... | wallet | -| `wallet_add_dashpay_external_account_with_xpub_bytes` | Add a DashPay external (watch-only) account with xpub bytes # Safety - `wall... | wallet | -| `wallet_add_dashpay_receiving_account` | Add a DashPay receiving funds account # Safety - `wallet` must be a valid po... | wallet | -| `wallet_build_and_sign_transaction` | Build and sign a transaction using the wallet's managed info This is the rec... | transaction | +| `ffi_managed_wallet_free` | Free a managed wallet (FFIManagedWalletInfo type) # Safety -... | transaction_checking | +| `key_wallet_derive_address_from_key` | Derive an address from a private key # Safety - `private_key` must be a... | derivation | +| `key_wallet_derive_address_from_seed` | Derive an address from a seed at a specific derivation path # Safety -... | derivation | +| `key_wallet_derive_private_key_from_seed` | Derive a private key from a seed at a specific derivation path # Safety -... | derivation | +| `managed_account_get_parent_wallet_id` | Get the parent wallet ID of a managed account Note: ManagedAccount doesn't... | managed_account | +| `managed_wallet_check_transaction` | Check if a transaction belongs to the wallet This function checks a... | transaction_checking | +| `managed_wallet_free` | Free managed wallet info # Safety - `managed_wallet` must be a valid... | managed_wallet | +| `managed_wallet_generate_addresses_to_index` | Generate addresses up to a specific index in a pool This ensures that... | address_pool | +| `managed_wallet_get_account` | Get a managed account from a managed wallet This function gets a... | managed_account | +| `managed_wallet_get_account_collection` | Get managed account collection for a specific network from wallet manager #... | managed_account_collection | +| `managed_wallet_get_account_count` | Get number of accounts in a managed wallet # Safety - `manager` must be a... | managed_account | +| `managed_wallet_get_address_pool_info` | Get address pool information for an account # Safety - `managed_wallet`... | address_pool | +| `managed_wallet_get_balance` | Get wallet balance from managed wallet info Returns the balance breakdown... | managed_wallet | +| `managed_wallet_get_bip_44_external_address_range` | Get BIP44 external (receive) addresses in the specified range Returns... | managed_wallet | +| `managed_wallet_get_bip_44_internal_address_range` | Get BIP44 internal (change) addresses in the specified range Returns... | managed_wallet | +| `managed_wallet_get_dashpay_external_account` | Get a managed DashPay external account by composite key # Safety - Pointers... | managed_account | +| `managed_wallet_get_dashpay_receiving_account` | Get a managed DashPay receiving funds account by composite key # Safety -... | managed_account | +| `managed_wallet_get_next_bip44_change_address` | Get the next unused change address Generates the next unused change address... | managed_wallet | +| `managed_wallet_get_next_bip44_receive_address` | Get the next unused receive address Generates the next unused receive... | managed_wallet | +| `managed_wallet_get_top_up_account_with_registration_index` | Get a managed IdentityTopUp account with a specific registration index This... | managed_account | +| `managed_wallet_get_utxos` | Get all UTXOs from managed wallet info # Safety - `managed_info` must be a... | utxo | +| `managed_wallet_info_free` | Free managed wallet info returned by wallet_manager_get_managed_wallet_info ... | managed_wallet | +| `managed_wallet_mark_address_used` | Mark an address as used in the pool This updates the pool's tracking of... | address_pool | +| `managed_wallet_set_gap_limit` | Set the gap limit for an address pool The gap limit determines how many... | address_pool | +| `wallet_add_account` | Add an account to the wallet without xpub # Safety This function... | wallet | +| `wallet_add_account_with_string_xpub` | Add an account to the wallet with xpub as string # Safety This function... | wallet | +| `wallet_add_account_with_xpub_bytes` | Add an account to the wallet with xpub as byte array # Safety This... | wallet | +| `wallet_add_dashpay_external_account_with_xpub_bytes` | Add a DashPay external (watch-only) account with xpub bytes # Safety -... | wallet | +| `wallet_add_dashpay_receiving_account` | Add a DashPay receiving funds account # Safety - `wallet` must be a valid... | wallet | +| `wallet_build_and_sign_transaction` | Build and sign a transaction using the wallet's managed info This is the... | transaction | | `wallet_build_transaction` | Build a transaction (unsigned) This creates an unsigned transaction | transaction | -| `wallet_check_transaction` | Check if a transaction belongs to the wallet using ManagedWalletInfo # Safet... | transaction | -| `wallet_create_from_mnemonic` | Create a new wallet from mnemonic (backward compatibility - single network) ... | wallet | -| `wallet_create_from_mnemonic_with_options` | Create a new wallet from mnemonic with options # Safety - `mnemonic` must b... | wallet | -| `wallet_create_from_seed` | Create a new wallet from seed (backward compatibility) # Safety - `seed` mu... | wallet | -| `wallet_create_from_seed_with_options` | Create a new wallet from seed with options # Safety - `seed` must be a vali... | wallet | -| `wallet_create_managed_wallet` | Create a managed wallet from a regular wallet This creates a ManagedWalletIn... | transaction_checking | -| `wallet_create_random` | Create a new random wallet (backward compatibility) # Safety - `error` must... | wallet | -| `wallet_create_random_with_options` | Create a new random wallet with options # Safety - `account_options` must b... | wallet | -| `wallet_derive_extended_private_key` | Derive extended private key at a specific path Returns an opaque FFIExtendedP... | keys | -| `wallet_derive_extended_public_key` | Derive extended public key at a specific path Returns an opaque FFIExtendedPu... | keys | -| `wallet_derive_private_key` | Derive private key at a specific path Returns an opaque FFIPrivateKey pointer... | keys | -| `wallet_derive_private_key_as_wif` | Derive private key at a specific path and return as WIF string # Safety - `... | keys | -| `wallet_derive_public_key` | Derive public key at a specific path Returns an opaque FFIPublicKey pointer t... | keys | -| `wallet_derive_public_key_as_hex` | Derive public key at a specific path and return as hex string # Safety - `w... | keys | -| `wallet_free` | Free a wallet # Safety - `wallet` must be a valid pointer to an FFIWallet t... | wallet | -| `wallet_free_const` | Free a const wallet handle This is a const-safe wrapper for wallet_free() th... | wallet | -| `wallet_get_account` | Get an account handle for a specific account type Returns a result containing... | account | -| `wallet_get_account_collection` | Get account collection for a specific network from wallet # Safety - `walle... | account_collection | -| `wallet_get_account_count` | Get number of accounts # Safety - `wallet` must be a valid pointer to an FF... | account | -| `wallet_get_account_xpriv` | Get extended private key for account # Safety - `wallet` must be a valid po... | keys | -| `wallet_get_account_xpub` | Get extended public key for account # Safety - `wallet` must be a valid poi... | keys | -| `wallet_get_id` | Get wallet ID (32-byte hash) # Safety - `wallet` must be a valid pointer to... | wallet | -| `wallet_get_top_up_account_with_registration_index` | Get an IdentityTopUp account handle with a specific registration index This i... | account | -| `wallet_get_utxos` | Get all UTXOs (deprecated - use managed_wallet_get_utxos instead) # Safety ... | utxo | -| `wallet_get_xpub` | Get extended public key for account # Safety - `wallet` must be a valid poi... | wallet | -| `wallet_has_mnemonic` | Check if wallet has mnemonic # Safety - `wallet` must be a valid pointer to... | wallet | -| `wallet_is_watch_only` | Check if wallet is watch-only # Safety - `wallet` must be a valid pointer t... | wallet | -| `wallet_sign_transaction` | Sign a transaction # Safety - `wallet` must be a valid pointer to an FFIWal... | transaction | +| `wallet_check_transaction` | Check if a transaction belongs to the wallet using ManagedWalletInfo #... | transaction | +| `wallet_create_from_mnemonic` | Create a new wallet from mnemonic (backward compatibility - single network) ... | wallet | +| `wallet_create_from_mnemonic_with_options` | Create a new wallet from mnemonic with options # Safety - `mnemonic` must... | wallet | +| `wallet_create_from_seed` | Create a new wallet from seed (backward compatibility) # Safety - `seed`... | wallet | +| `wallet_create_from_seed_with_options` | Create a new wallet from seed with options # Safety - `seed` must be a... | wallet | +| `wallet_create_managed_wallet` | Create a managed wallet from a regular wallet This creates a... | transaction_checking | +| `wallet_create_random` | Create a new random wallet (backward compatibility) # Safety - `error`... | wallet | +| `wallet_create_random_with_options` | Create a new random wallet with options # Safety - `account_options` must... | wallet | +| `wallet_derive_extended_private_key` | Derive extended private key at a specific path Returns an opaque... | keys | +| `wallet_derive_extended_public_key` | Derive extended public key at a specific path Returns an opaque... | keys | +| `wallet_derive_private_key` | Derive private key at a specific path Returns an opaque FFIPrivateKey... | keys | +| `wallet_derive_private_key_as_wif` | Derive private key at a specific path and return as WIF string # Safety -... | keys | +| `wallet_derive_public_key` | Derive public key at a specific path Returns an opaque FFIPublicKey pointer... | keys | +| `wallet_derive_public_key_as_hex` | Derive public key at a specific path and return as hex string # Safety -... | keys | +| `wallet_free` | Free a wallet # Safety - `wallet` must be a valid pointer to an FFIWallet... | wallet | +| `wallet_free_const` | Free a const wallet handle This is a const-safe wrapper for wallet_free()... | wallet | +| `wallet_get_account` | Get an account handle for a specific account type Returns a result... | account | +| `wallet_get_account_collection` | Get account collection for a specific network from wallet # Safety -... | account_collection | +| `wallet_get_account_count` | Get number of accounts # Safety - `wallet` must be a valid pointer to an... | account | +| `wallet_get_account_xpriv` | Get extended private key for account # Safety - `wallet` must be a valid... | keys | +| `wallet_get_account_xpub` | Get extended public key for account # Safety - `wallet` must be a valid... | keys | +| `wallet_get_id` | Get wallet ID (32-byte hash) # Safety - `wallet` must be a valid pointer... | wallet | +| `wallet_get_top_up_account_with_registration_index` | Get an IdentityTopUp account handle with a specific registration index This... | account | +| `wallet_get_utxos` | Get all UTXOs (deprecated - use managed_wallet_get_utxos instead) # Safety ... | utxo | +| `wallet_get_xpub` | Get extended public key for account # Safety - `wallet` must be a valid... | wallet | +| `wallet_has_mnemonic` | Check if wallet has mnemonic # Safety - `wallet` must be a valid pointer... | wallet | +| `wallet_is_watch_only` | Check if wallet is watch-only # Safety - `wallet` must be a valid pointer... | wallet | +| `wallet_sign_transaction` | Sign a transaction # Safety - `wallet` must be a valid pointer to an... | transaction | ### Account Management @@ -141,45 +141,45 @@ Functions: 94 | Function | Description | Module | |----------|-------------|--------| -| `account_collection_count` | Get the total number of accounts in the collection # Safety - `collection` ... | account_collection | -| `account_collection_free` | Free an account collection handle # Safety - `collection` must be a valid p... | account_collection | -| `account_collection_get_bip32_account` | Get a BIP32 account by index from the collection # Safety - `collection` mu... | account_collection | -| `account_collection_get_bip32_indices` | Get all BIP32 account indices # Safety - `collection` must be a valid point... | account_collection | -| `account_collection_get_bip44_account` | Get a BIP44 account by index from the collection # Safety - `collection` mu... | account_collection | -| `account_collection_get_bip44_indices` | Get all BIP44 account indices # Safety - `collection` must be a valid point... | account_collection | -| `account_collection_get_coinjoin_account` | Get a CoinJoin account by index from the collection # Safety - `collection`... | account_collection | -| `account_collection_get_coinjoin_indices` | Get all CoinJoin account indices # Safety - `collection` must be a valid po... | account_collection | -| `account_collection_get_identity_invitation` | Get the identity invitation account if it exists # Safety - `collection` mu... | account_collection | -| `account_collection_get_identity_registration` | Get the identity registration account if it exists # Safety - `collection` ... | account_collection | -| `account_collection_get_identity_topup` | Get an identity topup account by registration index # Safety - `collection`... | account_collection | -| `account_collection_get_identity_topup_indices` | Get all identity topup registration indices # Safety - `collection` must be... | account_collection | -| `account_collection_get_identity_topup_not_bound` | Get the identity topup not bound account if it exists # Safety - `collectio... | account_collection | -| `account_collection_get_provider_operator_keys` | Get the provider operator keys account if it exists Note: Returns null if the... | account_collection | -| `account_collection_get_provider_owner_keys` | Get the provider owner keys account if it exists # Safety - `collection` mu... | account_collection | -| `account_collection_get_provider_platform_keys` | Get the provider platform keys account if it exists Note: Returns null if the... | account_collection | -| `account_collection_get_provider_voting_keys` | Get the provider voting keys account if it exists # Safety - `collection` m... | account_collection | -| `account_collection_has_identity_invitation` | Check if identity invitation account exists # Safety - `collection` must be... | account_collection | -| `account_collection_has_identity_registration` | Check if identity registration account exists # Safety - `collection` must ... | account_collection | -| `account_collection_has_identity_topup_not_bound` | Check if identity topup not bound account exists # Safety - `collection` mu... | account_collection | -| `account_collection_has_provider_operator_keys` | Check if provider operator keys account exists # Safety - `collection` must... | account_collection | -| `account_collection_has_provider_owner_keys` | Check if provider owner keys account exists # Safety - `collection` must be... | account_collection | -| `account_collection_has_provider_platform_keys` | Check if provider platform keys account exists # Safety - `collection` must... | account_collection | -| `account_collection_has_provider_voting_keys` | Check if provider voting keys account exists # Safety - `collection` must b... | account_collection | -| `account_collection_summary` | Get a human-readable summary of all accounts in the collection Returns a for... | account_collection | -| `account_collection_summary_data` | Get structured account collection summary data Returns a struct containing a... | account_collection | -| `account_collection_summary_free` | Free an account collection summary and all its allocated memory # Safety - ... | account_collection | -| `account_derive_extended_private_key_at` | Derive an extended private key from an account at a given index, using the pr... | account_derivation | -| `account_derive_extended_private_key_from_mnemonic` | Derive an extended private key from a mnemonic + optional passphrase at the g... | account_derivation | +| `account_collection_count` | Get the total number of accounts in the collection # Safety - `collection`... | account_collection | +| `account_collection_free` | Free an account collection handle # Safety - `collection` must be a valid... | account_collection | +| `account_collection_get_bip32_account` | Get a BIP32 account by index from the collection # Safety - `collection`... | account_collection | +| `account_collection_get_bip32_indices` | Get all BIP32 account indices # Safety - `collection` must be a valid... | account_collection | +| `account_collection_get_bip44_account` | Get a BIP44 account by index from the collection # Safety - `collection`... | account_collection | +| `account_collection_get_bip44_indices` | Get all BIP44 account indices # Safety - `collection` must be a valid... | account_collection | +| `account_collection_get_coinjoin_account` | Get a CoinJoin account by index from the collection # Safety -... | account_collection | +| `account_collection_get_coinjoin_indices` | Get all CoinJoin account indices # Safety - `collection` must be a valid... | account_collection | +| `account_collection_get_identity_invitation` | Get the identity invitation account if it exists # Safety - `collection`... | account_collection | +| `account_collection_get_identity_registration` | Get the identity registration account if it exists # Safety - `collection`... | account_collection | +| `account_collection_get_identity_topup` | Get an identity topup account by registration index # Safety -... | account_collection | +| `account_collection_get_identity_topup_indices` | Get all identity topup registration indices # Safety - `collection` must... | account_collection | +| `account_collection_get_identity_topup_not_bound` | Get the identity topup not bound account if it exists # Safety -... | account_collection | +| `account_collection_get_provider_operator_keys` | Get the provider operator keys account if it exists Note: Returns null if... | account_collection | +| `account_collection_get_provider_owner_keys` | Get the provider owner keys account if it exists # Safety - `collection`... | account_collection | +| `account_collection_get_provider_platform_keys` | Get the provider platform keys account if it exists Note: Returns null if... | account_collection | +| `account_collection_get_provider_voting_keys` | Get the provider voting keys account if it exists # Safety - `collection`... | account_collection | +| `account_collection_has_identity_invitation` | Check if identity invitation account exists # Safety - `collection` must... | account_collection | +| `account_collection_has_identity_registration` | Check if identity registration account exists # Safety - `collection` must... | account_collection | +| `account_collection_has_identity_topup_not_bound` | Check if identity topup not bound account exists # Safety - `collection`... | account_collection | +| `account_collection_has_provider_operator_keys` | Check if provider operator keys account exists # Safety - `collection`... | account_collection | +| `account_collection_has_provider_owner_keys` | Check if provider owner keys account exists # Safety - `collection` must... | account_collection | +| `account_collection_has_provider_platform_keys` | Check if provider platform keys account exists # Safety - `collection`... | account_collection | +| `account_collection_has_provider_voting_keys` | Check if provider voting keys account exists # Safety - `collection` must... | account_collection | +| `account_collection_summary` | Get a human-readable summary of all accounts in the collection Returns a... | account_collection | +| `account_collection_summary_data` | Get structured account collection summary data Returns a struct containing... | account_collection | +| `account_collection_summary_free` | Free an account collection summary and all its allocated memory # Safety -... | account_collection | +| `account_derive_extended_private_key_at` | Derive an extended private key from an account at a given index, using the... | account_derivation | +| `account_derive_extended_private_key_from_mnemonic` | Derive an extended private key from a mnemonic + optional passphrase at the... | account_derivation | | `account_derive_extended_private_key_from_seed` | Derive an extended private key from a raw seed buffer at the given index | account_derivation | -| `account_derive_private_key_as_wif_at` | Derive a private key from an account at a given chain/index and return as WIF... | account_derivation | -| `account_derive_private_key_at` | Derive a private key (secp256k1) from an account at a given chain/index, using... | account_derivation | +| `account_derive_private_key_as_wif_at` | Derive a private key from an account at a given chain/index and return as... | account_derivation | +| `account_derive_private_key_at` | Derive a private key (secp256k1) from an account at a given chain/index,... | account_derivation | | `account_derive_private_key_from_mnemonic` | Derive a private key from a mnemonic + optional passphrase at the given index | account_derivation | | `account_derive_private_key_from_seed` | Derive a private key from a raw seed buffer at the given index | account_derivation | -| `account_free` | Free an account handle # Safety - `account` must be a valid pointer to an F... | account | -| `account_get_account_type` | Get the account type of an account # Safety - `account` must be a valid poi... | account | -| `account_get_extended_public_key_as_string` | Get the extended public key of an account as a string # Safety - `account` ... | account | -| `account_get_is_watch_only` | Check if an account is watch-only # Safety - `account` must be a valid point... | account | -| `account_get_network` | Get the network of an account # Safety - `account` must be a valid pointer ... | account | +| `account_free` | Free an account handle # Safety - `account` must be a valid pointer to an... | account | +| `account_get_account_type` | Get the account type of an account # Safety - `account` must be a valid... | account | +| `account_get_extended_public_key_as_string` | Get the extended public key of an account as a string # Safety - `account`... | account | +| `account_get_is_watch_only` | Check if an account is watch-only # Safety - `account` must be a valid... | account | +| `account_get_network` | Get the network of an account # Safety - `account` must be a valid pointer... | account | | `bls_account_derive_private_key_from_mnemonic` | No description | account_derivation | | `bls_account_derive_private_key_from_seed` | No description | account_derivation | | `bls_account_free` | No description | account | @@ -195,46 +195,46 @@ Functions: 94 | `eddsa_account_get_extended_public_key_as_string` | No description | account | | `eddsa_account_get_is_watch_only` | No description | account | | `eddsa_account_get_network` | No description | account | -| `managed_account_collection_count` | Get the total number of accounts in the managed collection # Safety - `coll... | managed_account_collection | -| `managed_account_collection_free` | Free a managed account collection handle # Safety - `collection` must be a ... | managed_account_collection | -| `managed_account_collection_get_bip32_account` | Get a BIP32 account by index from the managed collection # Safety - `collec... | managed_account_collection | -| `managed_account_collection_get_bip32_indices` | Get all BIP32 account indices from managed collection # Safety - `collectio... | managed_account_collection | -| `managed_account_collection_get_bip44_account` | Get a BIP44 account by index from the managed collection # Safety - `collec... | managed_account_collection | -| `managed_account_collection_get_bip44_indices` | Get all BIP44 account indices from managed collection # Safety - `collectio... | managed_account_collection | -| `managed_account_collection_get_coinjoin_account` | Get a CoinJoin account by index from the managed collection # Safety - `col... | managed_account_collection | -| `managed_account_collection_get_coinjoin_indices` | Get all CoinJoin account indices from managed collection # Safety - `collec... | managed_account_collection | -| `managed_account_collection_get_identity_invitation` | Get the identity invitation account if it exists in managed collection # Saf... | managed_account_collection | -| `managed_account_collection_get_identity_registration` | Get the identity registration account if it exists in managed collection # S... | managed_account_collection | -| `managed_account_collection_get_identity_topup` | Get an identity topup account by registration index from managed collection ... | managed_account_collection | -| `managed_account_collection_get_identity_topup_indices` | Get all identity topup registration indices from managed collection # Safety... | managed_account_collection | -| `managed_account_collection_get_identity_topup_not_bound` | Get the identity topup not bound account if it exists in managed collection ... | managed_account_collection | -| `managed_account_collection_get_provider_operator_keys` | Get the provider operator keys account if it exists in managed collection Not... | managed_account_collection | -| `managed_account_collection_get_provider_owner_keys` | Get the provider owner keys account if it exists in managed collection # Saf... | managed_account_collection | -| `managed_account_collection_get_provider_platform_keys` | Get the provider platform keys account if it exists in managed collection Not... | managed_account_collection | -| `managed_account_collection_get_provider_voting_keys` | Get the provider voting keys account if it exists in managed collection # Sa... | managed_account_collection | -| `managed_account_collection_has_identity_invitation` | Check if identity invitation account exists in managed collection # Safety ... | managed_account_collection | -| `managed_account_collection_has_identity_registration` | Check if identity registration account exists in managed collection # Safety... | managed_account_collection | -| `managed_account_collection_has_identity_topup_not_bound` | Check if identity topup not bound account exists in managed collection # Saf... | managed_account_collection | -| `managed_account_collection_has_provider_operator_keys` | Check if provider operator keys account exists in managed collection # Safet... | managed_account_collection | -| `managed_account_collection_has_provider_owner_keys` | Check if provider owner keys account exists in managed collection # Safety ... | managed_account_collection | -| `managed_account_collection_has_provider_platform_keys` | Check if provider platform keys account exists in managed collection # Safet... | managed_account_collection | -| `managed_account_collection_has_provider_voting_keys` | Check if provider voting keys account exists in managed collection # Safety ... | managed_account_collection | -| `managed_account_collection_summary` | Get a human-readable summary of all accounts in the managed collection Return... | managed_account_collection | -| `managed_account_collection_summary_data` | Get structured account collection summary data for managed collection Return... | managed_account_collection | -| `managed_account_collection_summary_free` | Free a managed account collection summary and all its allocated memory # Saf... | managed_account_collection | -| `managed_account_free` | Free a managed account handle # Safety - `account` must be a valid pointer ... | managed_account | -| `managed_account_free_transactions` | Free transactions array returned by managed_account_get_transactions # Safet... | managed_account | -| `managed_account_get_account_type` | Get the account type of a managed account # Safety - `account` must be a va... | managed_account | -| `managed_account_get_address_pool` | Get an address pool from a managed account by type This function returns the... | managed_account | -| `managed_account_get_balance` | Get the balance of a managed account # Safety - `account` must be a valid p... | managed_account | -| `managed_account_get_external_address_pool` | Get the external address pool from a managed account This function returns t... | managed_account | -| `managed_account_get_index` | Get the account index from a managed account Returns the primary account ind... | managed_account | -| `managed_account_get_internal_address_pool` | Get the internal address pool from a managed account This function returns t... | managed_account | -| `managed_account_get_is_watch_only` | Check if a managed account is watch-only # Safety - `account` must be a val... | managed_account | -| `managed_account_get_network` | Get the network of a managed account # Safety - `account` must be a valid p... | managed_account | -| `managed_account_get_transaction_count` | Get the number of transactions in a managed account # Safety - `account` mu... | managed_account | -| `managed_account_get_transactions` | Get all transactions from a managed account Returns an array of FFITransacti... | managed_account | -| `managed_account_get_utxo_count` | Get the number of UTXOs in a managed account # Safety - `account` must be a... | managed_account | +| `managed_account_collection_count` | Get the total number of accounts in the managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_free` | Free a managed account collection handle # Safety - `collection` must be a... | managed_account_collection | +| `managed_account_collection_get_bip32_account` | Get a BIP32 account by index from the managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_bip32_indices` | Get all BIP32 account indices from managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_bip44_account` | Get a BIP44 account by index from the managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_bip44_indices` | Get all BIP44 account indices from managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_coinjoin_account` | Get a CoinJoin account by index from the managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_coinjoin_indices` | Get all CoinJoin account indices from managed collection # Safety -... | managed_account_collection | +| `managed_account_collection_get_identity_invitation` | Get the identity invitation account if it exists in managed collection #... | managed_account_collection | +| `managed_account_collection_get_identity_registration` | Get the identity registration account if it exists in managed collection #... | managed_account_collection | +| `managed_account_collection_get_identity_topup` | Get an identity topup account by registration index from managed collection ... | managed_account_collection | +| `managed_account_collection_get_identity_topup_indices` | Get all identity topup registration indices from managed collection #... | managed_account_collection | +| `managed_account_collection_get_identity_topup_not_bound` | Get the identity topup not bound account if it exists in managed collection ... | managed_account_collection | +| `managed_account_collection_get_provider_operator_keys` | Get the provider operator keys account if it exists in managed collection... | managed_account_collection | +| `managed_account_collection_get_provider_owner_keys` | Get the provider owner keys account if it exists in managed collection #... | managed_account_collection | +| `managed_account_collection_get_provider_platform_keys` | Get the provider platform keys account if it exists in managed collection... | managed_account_collection | +| `managed_account_collection_get_provider_voting_keys` | Get the provider voting keys account if it exists in managed collection #... | managed_account_collection | +| `managed_account_collection_has_identity_invitation` | Check if identity invitation account exists in managed collection # Safety ... | managed_account_collection | +| `managed_account_collection_has_identity_registration` | Check if identity registration account exists in managed collection #... | managed_account_collection | +| `managed_account_collection_has_identity_topup_not_bound` | Check if identity topup not bound account exists in managed collection #... | managed_account_collection | +| `managed_account_collection_has_provider_operator_keys` | Check if provider operator keys account exists in managed collection #... | managed_account_collection | +| `managed_account_collection_has_provider_owner_keys` | Check if provider owner keys account exists in managed collection # Safety ... | managed_account_collection | +| `managed_account_collection_has_provider_platform_keys` | Check if provider platform keys account exists in managed collection #... | managed_account_collection | +| `managed_account_collection_has_provider_voting_keys` | Check if provider voting keys account exists in managed collection # Safety... | managed_account_collection | +| `managed_account_collection_summary` | Get a human-readable summary of all accounts in the managed collection ... | managed_account_collection | +| `managed_account_collection_summary_data` | Get structured account collection summary data for managed collection ... | managed_account_collection | +| `managed_account_collection_summary_free` | Free a managed account collection summary and all its allocated memory #... | managed_account_collection | +| `managed_account_free` | Free a managed account handle # Safety - `account` must be a valid pointer... | managed_account | +| `managed_account_free_transactions` | Free transactions array returned by managed_account_get_transactions #... | managed_account | +| `managed_account_get_account_type` | Get the account type of a managed account # Safety - `account` must be a... | managed_account | +| `managed_account_get_address_pool` | Get an address pool from a managed account by type This function returns... | managed_account | +| `managed_account_get_balance` | Get the balance of a managed account # Safety - `account` must be a valid... | managed_account | +| `managed_account_get_external_address_pool` | Get the external address pool from a managed account This function returns... | managed_account | +| `managed_account_get_index` | Get the account index from a managed account Returns the primary account... | managed_account | +| `managed_account_get_internal_address_pool` | Get the internal address pool from a managed account This function returns... | managed_account | +| `managed_account_get_is_watch_only` | Check if a managed account is watch-only # Safety - `account` must be a... | managed_account | +| `managed_account_get_network` | Get the network of a managed account # Safety - `account` must be a valid... | managed_account | +| `managed_account_get_transaction_count` | Get the number of transactions in a managed account # Safety - `account`... | managed_account | +| `managed_account_get_transactions` | Get all transactions from a managed account Returns an array of... | managed_account | +| `managed_account_get_utxo_count` | Get the number of UTXOs in a managed account # Safety - `account` must be... | managed_account | ### Address Management @@ -242,16 +242,16 @@ Functions: 10 | Function | Description | Module | |----------|-------------|--------| -| `address_array_free` | Free address array # Safety - `addresses` must be a valid pointer to an arr... | address | -| `address_free` | Free address string # Safety - `address` must be a valid pointer created by... | address | -| `address_get_type` | Get address type Returns: - 0: P2PKH address - 1: P2SH address - 2: Other ad... | address | -| `address_info_array_free` | Free an array of FFIAddressInfo structures # Safety - `infos` must be a val... | address_pool | -| `address_info_free` | Free a single FFIAddressInfo structure # Safety - `info` must be a valid po... | address_pool | -| `address_pool_free` | Free an address pool handle # Safety - `pool` must be a valid pointer to an... | address_pool | -| `address_pool_get_address_at_index` | Get a single address info at a specific index from the pool Returns detailed... | address_pool | -| `address_pool_get_addresses_in_range` | Get a range of addresses from the pool Returns an array of FFIAddressInfo st... | address_pool | -| `address_to_pubkey_hash` | Extract public key hash from P2PKH address # Safety - `address` must be a va... | transaction | -| `address_validate` | Validate an address # Safety - `address` must be a valid null-terminated C ... | address | +| `address_array_free` | Free address array # Safety - `addresses` must be a valid pointer to an... | address | +| `address_free` | Free address string # Safety - `address` must be a valid pointer created... | address | +| `address_get_type` | Get address type Returns: - 0: P2PKH address - 1: P2SH address - 2: Other... | address | +| `address_info_array_free` | Free an array of FFIAddressInfo structures # Safety - `infos` must be a... | address_pool | +| `address_info_free` | Free a single FFIAddressInfo structure # Safety - `info` must be a valid... | address_pool | +| `address_pool_free` | Free an address pool handle # Safety - `pool` must be a valid pointer to... | address_pool | +| `address_pool_get_address_at_index` | Get a single address info at a specific index from the pool Returns... | address_pool | +| `address_pool_get_addresses_in_range` | Get a range of addresses from the pool Returns an array of FFIAddressInfo... | address_pool | +| `address_to_pubkey_hash` | Extract public key hash from P2PKH address # Safety - `address` must be a... | transaction | +| `address_validate` | Validate an address # Safety - `address` must be a valid null-terminated C... | address | ### Transaction Management @@ -259,20 +259,20 @@ Functions: 14 | Function | Description | Module | |----------|-------------|--------| -| `transaction_add_input` | Add an input to a transaction # Safety - `tx` must be a valid pointer to an ... | transaction | -| `transaction_add_output` | Add an output to a transaction # Safety - `tx` must be a valid pointer to an... | transaction | -| `transaction_bytes_free` | Free transaction bytes # Safety - `tx_bytes` must be a valid pointer create... | transaction | -| `transaction_check_result_free` | Free a transaction check result # Safety - `result` must be a valid pointer... | transaction_checking | -| `transaction_classify` | Get the transaction classification for routing Returns a string describing t... | transaction_checking | -| `transaction_create` | Create a new empty transaction # Returns - Pointer to FFITransaction on succ... | transaction | -| `transaction_deserialize` | Deserialize a transaction # Safety - `data` must be a valid pointer to seria... | transaction | -| `transaction_destroy` | Destroy a transaction # Safety - `tx` must be a valid pointer to an FFITrans... | transaction | -| `transaction_get_txid` | Get the transaction ID # Safety - `tx` must be a valid pointer to an FFITran... | transaction | -| `transaction_get_txid_from_bytes` | Get transaction ID from raw transaction bytes # Safety - `tx_bytes` must be ... | transaction | -| `transaction_serialize` | Serialize a transaction # Safety - `tx` must be a valid pointer to an FFITra... | transaction | -| `transaction_sighash` | Calculate signature hash for an input # Safety - `tx` must be a valid pointe... | transaction | -| `transaction_sign_input` | Sign a transaction input # Safety - `tx` must be a valid pointer to an FFITr... | transaction | -| `utxo_array_free` | Free UTXO array # Safety - `utxos` must be a valid pointer to an array of F... | utxo | +| `transaction_add_input` | Add an input to a transaction # Safety - `tx` must be a valid pointer to an... | transaction | +| `transaction_add_output` | Add an output to a transaction # Safety - `tx` must be a valid pointer to... | transaction | +| `transaction_bytes_free` | Free transaction bytes # Safety - `tx_bytes` must be a valid pointer... | transaction | +| `transaction_check_result_free` | Free a transaction check result # Safety - `result` must be a valid... | transaction_checking | +| `transaction_classify` | Get the transaction classification for routing Returns a string describing... | transaction_checking | +| `transaction_create` | Create a new empty transaction # Returns - Pointer to FFITransaction on... | transaction | +| `transaction_deserialize` | Deserialize a transaction # Safety - `data` must be a valid pointer to... | transaction | +| `transaction_destroy` | Destroy a transaction # Safety - `tx` must be a valid pointer to an... | transaction | +| `transaction_get_txid` | Get the transaction ID # Safety - `tx` must be a valid pointer to an... | transaction | +| `transaction_get_txid_from_bytes` | Get transaction ID from raw transaction bytes # Safety - `tx_bytes` must be... | transaction | +| `transaction_serialize` | Serialize a transaction # Safety - `tx` must be a valid pointer to an... | transaction | +| `transaction_sighash` | Calculate signature hash for an input # Safety - `tx` must be a valid... | transaction | +| `transaction_sign_input` | Sign a transaction input # Safety - `tx` must be a valid pointer to an... | transaction | +| `utxo_array_free` | Free UTXO array # Safety - `utxos` must be a valid pointer to an array of... | utxo | ### Key Management @@ -280,20 +280,20 @@ Functions: 14 | Function | Description | Module | |----------|-------------|--------| -| `bip38_decrypt_private_key` | Decrypt a BIP38 encrypted private key # Safety This function is unsafe beca... | bip38 | -| `bip38_encrypt_private_key` | Encrypt a private key with BIP38 # Safety This function is unsafe because i... | bip38 | -| `derivation_derive_private_key_from_seed` | Derive private key for a specific path from seed # Safety - `seed` must be ... | derivation | -| `derivation_new_master_key` | Create a new master extended private key from seed # Safety - `seed` must b... | derivation | -| `extended_private_key_free` | Free an extended private key # Safety - `key` must be a valid pointer creat... | keys | -| `extended_private_key_get_private_key` | Get the private key from an extended private key Extracts the non-extended p... | keys | -| `extended_private_key_to_string` | Get extended private key as string (xprv format) Returns the extended privat... | keys | -| `extended_public_key_free` | Free an extended public key # Safety - `key` must be a valid pointer create... | keys | -| `extended_public_key_get_public_key` | Get the public key from an extended public key Extracts the non-extended pub... | keys | -| `extended_public_key_to_string` | Get extended public key as string (xpub format) Returns the extended public ... | keys | -| `private_key_free` | Free a private key # Safety - `key` must be a valid pointer created by priv... | keys | -| `private_key_to_wif` | Get private key as WIF string from FFIPrivateKey # Safety - `key` must be a... | keys | -| `public_key_free` | Free a public key # Safety - `key` must be a valid pointer created by publi... | keys | -| `public_key_to_hex` | Get public key as hex string from FFIPublicKey # Safety - `key` must be a v... | keys | +| `bip38_decrypt_private_key` | Decrypt a BIP38 encrypted private key # Safety This function is unsafe... | bip38 | +| `bip38_encrypt_private_key` | Encrypt a private key with BIP38 # Safety This function is unsafe because... | bip38 | +| `derivation_derive_private_key_from_seed` | Derive private key for a specific path from seed # Safety - `seed` must be... | derivation | +| `derivation_new_master_key` | Create a new master extended private key from seed # Safety - `seed` must... | derivation | +| `extended_private_key_free` | Free an extended private key # Safety - `key` must be a valid pointer... | keys | +| `extended_private_key_get_private_key` | Get the private key from an extended private key Extracts the non-extended... | keys | +| `extended_private_key_to_string` | Get extended private key as string (xprv format) Returns the extended... | keys | +| `extended_public_key_free` | Free an extended public key # Safety - `key` must be a valid pointer... | keys | +| `extended_public_key_get_public_key` | Get the public key from an extended public key Extracts the non-extended... | keys | +| `extended_public_key_to_string` | Get extended public key as string (xpub format) Returns the extended public... | keys | +| `private_key_free` | Free a private key # Safety - `key` must be a valid pointer created by... | keys | +| `private_key_to_wif` | Get private key as WIF string from FFIPrivateKey # Safety - `key` must be... | keys | +| `public_key_free` | Free a public key # Safety - `key` must be a valid pointer created by... | keys | +| `public_key_to_hex` | Get public key as hex string from FFIPublicKey # Safety - `key` must be a... | keys | ### Mnemonic Operations @@ -301,12 +301,12 @@ Functions: 6 | Function | Description | Module | |----------|-------------|--------| -| `mnemonic_free` | Free a mnemonic string # Safety - `mnemonic` must be a valid pointer create... | mnemonic | +| `mnemonic_free` | Free a mnemonic string # Safety - `mnemonic` must be a valid pointer... | mnemonic | | `mnemonic_generate` | Generate a new mnemonic with specified word count (12, 15, 18, 21, or 24) | mnemonic | | `mnemonic_generate_with_language` | Generate a new mnemonic with specified language and word count | mnemonic | -| `mnemonic_to_seed` | Convert mnemonic to seed with optional passphrase # Safety - `mnemonic` mus... | mnemonic | -| `mnemonic_validate` | Validate a mnemonic phrase # Safety - `mnemonic` must be a valid null-termi... | mnemonic | -| `mnemonic_word_count` | Get word count from mnemonic # Safety - `mnemonic` must be a valid null-ter... | mnemonic | +| `mnemonic_to_seed` | Convert mnemonic to seed with optional passphrase # Safety - `mnemonic`... | mnemonic | +| `mnemonic_validate` | Validate a mnemonic phrase # Safety - `mnemonic` must be a valid... | mnemonic | +| `mnemonic_word_count` | Get word count from mnemonic # Safety - `mnemonic` must be a valid... | mnemonic | ### Utility Functions @@ -319,19 +319,19 @@ Functions: 18 | `derivation_identity_authentication_path` | Derive identity authentication path (m/9'/5'/5'/0'/identity_index'/key_index') | derivation | | `derivation_identity_registration_path` | Derive identity registration path (m/9'/5'/5'/1'/index') | derivation | | `derivation_identity_topup_path` | Derive identity top-up path (m/9'/5'/5'/2'/identity_index'/top_up_index') | derivation | -| `derivation_path_free` | Free derivation path arrays Note: This function expects the count to properly... | keys | -| `derivation_path_parse` | Convert derivation path string to indices # Safety - `path` must be a valid... | keys | -| `derivation_string_free` | Free derivation path string # Safety - `s` must be a valid pointer to a C s... | derivation | -| `derivation_xpriv_free` | Free extended private key # Safety - `xpriv` must be a valid pointer to an ... | derivation | -| `derivation_xpriv_to_string` | Get extended private key as string # Safety - `xpriv` must be a valid point... | derivation | -| `derivation_xpriv_to_xpub` | Derive public key from extended private key # Safety - `xpriv` must be a va... | derivation | -| `derivation_xpub_fingerprint` | Get fingerprint from extended public key (4 bytes) # Safety - `xpub` must b... | derivation | -| `derivation_xpub_free` | Free extended public key # Safety - `xpub` must be a valid pointer to an FF... | derivation | -| `derivation_xpub_to_string` | Get extended public key as string # Safety - `xpub` must be a valid pointer... | derivation | +| `derivation_path_free` | Free derivation path arrays Note: This function expects the count to... | keys | +| `derivation_path_parse` | Convert derivation path string to indices # Safety - `path` must be a... | keys | +| `derivation_string_free` | Free derivation path string # Safety - `s` must be a valid pointer to a C... | derivation | +| `derivation_xpriv_free` | Free extended private key # Safety - `xpriv` must be a valid pointer to an... | derivation | +| `derivation_xpriv_to_string` | Get extended private key as string # Safety - `xpriv` must be a valid... | derivation | +| `derivation_xpriv_to_xpub` | Derive public key from extended private key # Safety - `xpriv` must be a... | derivation | +| `derivation_xpub_fingerprint` | Get fingerprint from extended public key (4 bytes) # Safety - `xpub` must... | derivation | +| `derivation_xpub_free` | Free extended public key # Safety - `xpub` must be a valid pointer to an... | derivation | +| `derivation_xpub_to_string` | Get extended public key as string # Safety - `xpub` must be a valid... | derivation | | `ffi_network_get_name` | No description | types | -| `free_u32_array` | Free a u32 array allocated by this library # Safety - `array` must be a val... | account_collection | -| `script_p2pkh` | Create a P2PKH script pubkey # Safety - `pubkey_hash` must be a valid pointe... | transaction | -| `string_free` | Free a string # Safety - `s` must be a valid pointer created by C string cr... | utils | +| `free_u32_array` | Free a u32 array allocated by this library # Safety - `array` must be a... | account_collection | +| `script_p2pkh` | Create a P2PKH script pubkey # Safety - `pubkey_hash` must be a valid... | transaction | +| `string_free` | Free a string # Safety - `s` must be a valid pointer created by C string... | utils | ## Detailed Function Documentation diff --git a/key-wallet-ffi/scripts/check_ffi_docs.sh b/key-wallet-ffi/scripts/check_ffi_docs.sh deleted file mode 100755 index b2c807f8b..000000000 --- a/key-wallet-ffi/scripts/check_ffi_docs.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -# Check if FFI documentation is up to date - -set -e - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -PROJECT_DIR="$(dirname "$SCRIPT_DIR")" - -echo "Checking FFI documentation..." - -cd "$PROJECT_DIR" - -# Generate new documentation -python3 scripts/generate_ffi_docs.py > /dev/null 2>&1 - -# Check if there are any changes (tracked, staged, or untracked) -if ! git diff --quiet --exit-code -- FFI_API.md \ - || ! git diff --quiet --cached -- FFI_API.md \ - || [ -n "$(git ls-files --others --exclude-standard -- FFI_API.md)" ]; then - echo "❌ FFI documentation is out of date!" - echo "" - echo "Please regenerate the documentation by running:" - echo " cd key-wallet-ffi && python3 scripts/generate_ffi_docs.py" - echo "" - echo "Or use the make command:" - echo " make update-ffi-docs" - echo "" - exit 1 -else - echo "✅ FFI documentation is up to date" -fi diff --git a/key-wallet-ffi/scripts/generate_ffi_docs.py b/key-wallet-ffi/scripts/generate_ffi_docs.py index 73745e6b7..65a574e9a 100755 --- a/key-wallet-ffi/scripts/generate_ffi_docs.py +++ b/key-wallet-ffi/scripts/generate_ffi_docs.py @@ -203,7 +203,12 @@ def generate_markdown(functions: List[FFIFunction]) -> str: desc = func.doc_comment.split('.')[0] if func.doc_comment else "No description" desc = desc.replace('|', '\\|') # Escape pipes in description if len(desc) > 80: - desc = desc[:77] + "..." + # Truncate at last complete word before 77 chars to avoid mid-word breaks + truncate_pos = desc.rfind(' ', 0, 77) + if truncate_pos > 60: # Only if we find a space reasonably close + desc = desc[:truncate_pos] + "..." + else: + desc = desc[:77] + "..." md.append(f"| `{func.name}` | {desc} | {func.module} |") md.append("") From b889db01f35c74a997c6ed93d327e5f6b02af866 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Fri, 21 Nov 2025 00:45:44 +1000 Subject: [PATCH 11/14] Revert intentional typo fix for testing --- _typos.toml | 2 ++ dash/src/crypto/sighash.rs | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/_typos.toml b/_typos.toml index e964c5852..b93be9327 100644 --- a/_typos.toml +++ b/_typos.toml @@ -10,6 +10,8 @@ extend-ignore-re = [ '[0-9a-fA-F]{5,}', # Hex strings (5+ chars to catch "BA70D") 'b"[^"]*"', # Byte strings - never check these for typos "b'[^']*'", # Byte strings with single quotes + # Support for inline spellchecker directives + "(?Rm)^.*(#|//)\\s*spellchecker:disable", # Ignore lines with a "disable" marker ] [default.extend-words] diff --git a/dash/src/crypto/sighash.rs b/dash/src/crypto/sighash.rs index 86ed9c0d3..4e3861f06 100644 --- a/dash/src/crypto/sighash.rs +++ b/dash/src/crypto/sighash.rs @@ -1737,7 +1737,7 @@ mod tests { "SIGHASH_SINGLE| SIGHASH_ANYONECANPAY", "SIGHASH_ALL SIGHASH_ANYONECANPAY", "SIGHASH_NONE |", - "SIGHASH_SINGLE", + "SIGHASH_SIGNLE", // spellchecker:disable-line (intentional typo for test) "DEFAULT", "ALL", "sighash_none", From 8bcd6db75987ea87d8b58c5ffde00d10d516b602 Mon Sep 17 00:00:00 2001 From: xdustinface Date: Fri, 21 Nov 2025 23:14:01 +1000 Subject: [PATCH 12/14] Remove the redundant format/linting section in `CONTRIBUTING.md` --- CONTRIBUTING.md | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d0a06fab0..9055efc1b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -167,16 +167,6 @@ Pull request merge requirements: Follow idiomatic Rust and crate‑local patterns. -### Formatting & Linting - -Run `cargo fmt --all` before submitting PRs. Use `cargo clippy --workspace --all-targets -- -D warnings` to catch issues early. Avoid large, unrelated reformatting to keep diffs focused and `git blame` useful. - -You may check the [discussion on the formatting](https://github.com/rust-bitcoin/rust-bitcoin/issues/172) -and [how it is planned to coordinate it with crate refactoring](https://github.com/rust-bitcoin/rust-bitcoin/pull/525) - -For the new code it is recommended to follow style of the existing codebase and -avoid any end-line space characters. - ### MSRV The Minimal Supported Rust Version (MSRV) is 1.89; it is enforced by CI. Crates use mixed editions (2021/2024); consult `Cargo.toml` and README for details. From f84627e92217e1bdbbe4552700e93c47ada3c75c Mon Sep 17 00:00:00 2001 From: xdustinface Date: Wed, 26 Nov 2025 21:57:23 +1000 Subject: [PATCH 13/14] Change `id` from `clippy-workspace` to `clippy` --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 102980b54..b3a2a6ec3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,7 +77,7 @@ repos: files: ^(key-wallet-ffi|dash-spv-ffi)/.*\.(rs|toml|py)$ stages: [pre-push, manual] - - id: clippy-workspace + - id: clippy name: clippy (workspace strict) description: Strict clippy on entire workspace - deny all warnings entry: cargo clippy --workspace --all-features --all-targets -- -D warnings From d2ceb3b23f337af4b2b49198989013b43d4ed38f Mon Sep 17 00:00:00 2001 From: xdustinface Date: Thu, 27 Nov 2025 20:17:35 +1000 Subject: [PATCH 14/14] Fix `verify FFI` stage --- contrib/verify_ffi.py | 56 +++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 29 deletions(-) diff --git a/contrib/verify_ffi.py b/contrib/verify_ffi.py index 694653b7c..9580fff2d 100755 --- a/contrib/verify_ffi.py +++ b/contrib/verify_ffi.py @@ -6,20 +6,27 @@ from pathlib import Path from concurrent.futures import ThreadPoolExecutor +FFI_CRATES = ["key-wallet-ffi", "dash-spv-ffi"] -def build_ffi_crate(crate_dir: Path) -> tuple[str, int]: - """Build crate to regenerate headers.""" - print(f" Building {crate_dir.name}...") + +def build_ffi_crates(repo_root: Path) -> bool: + """Build all FFI crates to regenerate headers.""" + print(" Building FFI crates...") result = subprocess.run( - ["cargo", "build", "--quiet"], - cwd=crate_dir, + ["cargo", "build", "--quiet"] + [f"-p={crate}" for crate in FFI_CRATES], + cwd=repo_root, capture_output=True, text=True ) - return crate_dir.name, result.returncode + if result.returncode != 0: + print("Build failed:", file=sys.stderr) + if result.stderr: + print(result.stderr, file=sys.stderr) + return False + return True -def generate_ffi_docs(crate_dir: Path) -> tuple[str, int]: +def generate_ffi_docs(crate_dir: Path) -> tuple[str, int, str]: """Generate FFI documentation for a crate.""" print(f" Generating {crate_dir.name} docs...") result = subprocess.run( @@ -28,41 +35,32 @@ def generate_ffi_docs(crate_dir: Path) -> tuple[str, int]: capture_output=True, text=True ) - if result.returncode == 0: - if result.stdout: - for line in result.stdout.strip().split('\n'): - print(f" {line}") - return crate_dir.name, result.returncode + return crate_dir.name, result.returncode, result.stdout def main(): repo_root = Path(__file__).parent.parent - ffi_crates = [ - repo_root / "key-wallet-ffi", - repo_root / "dash-spv-ffi" - ] + ffi_crate_dirs = [repo_root / crate for crate in FFI_CRATES] print("Regenerating FFI headers and documentation") - # Build and generate docs for both crates in parallel - with ThreadPoolExecutor(max_workers=4) as executor: - build_futures = [executor.submit(build_ffi_crate, crate) for crate in ffi_crates] - doc_futures = [executor.submit(generate_ffi_docs, crate) for crate in ffi_crates] + # Build all FFI crates first + if not build_ffi_crates(repo_root): + sys.exit(1) - build_results = [f.result() for f in build_futures] + # Generate docs in parallel + with ThreadPoolExecutor(max_workers=2) as executor: + doc_futures = [executor.submit(generate_ffi_docs, crate) for crate in ffi_crate_dirs] doc_results = [f.result() for f in doc_futures] - # Check if any builds failed - for crate_name, returncode in build_results: - if returncode != 0: - print(f"Build failed for {crate_name}", file=sys.stderr) - sys.exit(1) - - # Check if any doc generation failed - for crate_name, returncode in doc_results: + # Check results and print output + for crate_name, returncode, stdout in doc_results: if returncode != 0: print(f"Documentation generation failed for {crate_name}", file=sys.stderr) sys.exit(1) + if stdout: + for line in stdout.strip().split('\n'): + print(f" {line}") print(" Generation complete, checking for changes...")