From 9eacf08c4062da43d1eaebc12f575c01db9cdd62 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 19:27:04 +1100 Subject: [PATCH 01/12] Add F# Type Provider --- .github/workflows/ci.yml | 35 + .vscode/launch.json | 11 + Agents.md | 34 +- CLAUDE.md | 3 +- DataProvider.sln | 78 +- .../DataProvider.Example.FSharp.fsproj | 38 + .../GetCustomers.lql | 4 + .../GetInvoices.lql | 4 + .../LqlValidator.fs | 66 + .../DataProvider.Example.FSharp/Program.fs | 45 + .../DataProviderIntegrationTests.cs | 30 +- .../DataProvider.SQLite.Cli/Program.cs | 45 +- .../DataProvider.SQLite.FSharp.fsproj | 25 + .../DataProvider.SQLite.FSharp/Program.fs | 11 + .../SimpleSqlite.fs | 21 + .../DataProvider.SQLite.FSharp/test.db | Bin 0 -> 12288 bytes .../DataProvider.SQLite.csproj | 38 +- .../DataProvider.SqlServer.csproj | 38 +- .../DbConnectionExtensionsTests.cs | 26 +- .../DataProvider.Tests/DbTransactTests.cs | 27 +- .../DbTransactionExtensionsTests.cs | 21 +- DataProvider/DataProvider/DataProvider.csproj | 35 +- .../DataProvider/DbConnectionExtensions.cs | 2 +- .../AuthorizationTests.cs | 10 +- .../Gatekeeper.Api.Tests/TokenServiceTests.cs | 580 ++-- Gatekeeper/Gatekeeper.Api/DataProvider.json | 2 +- .../Gatekeeper.Api/FileLoggerProvider.cs | 3 +- .../Gatekeeper.Api/Gatekeeper.Api.csproj | 2 +- Gatekeeper/Gatekeeper.Api/GlobalUsings.cs | 8 +- Gatekeeper/Gatekeeper.Api/TokenService.cs | 1 + .../DataProvider.json | 38 + .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 53 + .../TestDataSeeder.cs | 210 ++ .../DataProvider.json | 38 + .../Lql.TypeProvider.FSharp.Tests.fsproj | 52 + .../TypeProviderE2ETests.fs | 351 ++ .../typeprovider-test-schema.yaml | 84 + .../Lql.TypeProvider.FSharp.fsproj | 32 + .../LqlTypeProvider.fs | 78 + Lql/LqlExtension/examples/sample.lql | 279 +- Lql/LqlExtension/snippets/lql.json | 234 +- Lql/LqlExtension/syntaxes/lql.tmLanguage.json | 2 +- Lql/README.md | 252 +- Migration/Migration.Tests/LqlDefaultsTests.cs | 12 +- .../MigrationCornerCaseTests.cs | 1208 ++++--- .../SchemaYamlSerializerTests.cs | 62 +- .../Migration.Tests/SqliteMigrationTests.cs | 2905 +++++++++-------- .../Clinical/Clinical.Api/Clinical.Api.csproj | 2 +- .../Clinical/Clinical.Api/DataProvider.json | 2 +- .../Clinical.Api/FileLoggerProvider.cs | 3 +- .../Scheduling.Api/DataProvider.json | 2 +- .../Scheduling.Api/FileLoggerProvider.cs | 3 +- .../Scheduling.Api/Scheduling.Api.csproj | 2 +- .../Sync.Http.Tests/CrossDatabaseSyncTests.cs | 22 +- Sync/Sync.Http.Tests/HttpEndpointTests.cs | 5 +- .../HttpMappingSyncTests.cs | 24 +- .../CrossDatabaseSyncTests.cs | 12 +- .../ChangeApplierIntegrationTests.cs | 21 +- Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs | 22 +- .../SchemaAndTriggerTests.cs | 22 +- Sync/Sync.SQLite.Tests/SpecComplianceTests.cs | 22 +- .../Sync.SQLite.Tests/SpecConformanceTests.cs | 22 +- .../SqliteExtensionIntegrationTests.cs | 22 +- .../SubscriptionIntegrationTests.cs | 22 +- .../SyncRepositoryIntegrationTests.cs | 22 +- .../TombstoneIntegrationTests.cs | 40 +- Sync/Sync.Tests/SyncCoordinatorTests.cs | 38 +- Sync/Sync.Tests/SyncIntegrationTests.cs | 38 +- Sync/Sync.Tests/TestDb.cs | 25 +- Website/src/docs/lql.md | 243 -- 70 files changed, 4787 insertions(+), 2982 deletions(-) create mode 100644 DataProvider/DataProvider.Example.FSharp/DataProvider.Example.FSharp.fsproj create mode 100644 DataProvider/DataProvider.Example.FSharp/GetCustomers.lql create mode 100644 DataProvider/DataProvider.Example.FSharp/GetInvoices.lql create mode 100644 DataProvider/DataProvider.Example.FSharp/LqlValidator.fs create mode 100644 DataProvider/DataProvider.Example.FSharp/Program.fs create mode 100644 DataProvider/DataProvider.SQLite.FSharp/DataProvider.SQLite.FSharp.fsproj create mode 100644 DataProvider/DataProvider.SQLite.FSharp/Program.fs create mode 100644 DataProvider/DataProvider.SQLite.FSharp/SimpleSqlite.fs create mode 100644 DataProvider/DataProvider.SQLite.FSharp/test.db create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests.Data/DataProvider.json create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests.Data/TestDataSeeder.cs create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests/DataProvider.json create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests/TypeProviderE2ETests.fs create mode 100644 Lql/Lql.TypeProvider.FSharp.Tests/typeprovider-test-schema.yaml create mode 100644 Lql/Lql.TypeProvider.FSharp/Lql.TypeProvider.FSharp.fsproj create mode 100644 Lql/Lql.TypeProvider.FSharp/LqlTypeProvider.fs delete mode 100644 Website/src/docs/lql.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2e3752..3ea8147 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -206,6 +206,41 @@ jobs: name: test-results-lql-${{ strategy.job-index }} path: '**/TestResults/*.trx' + # LQL F# Type Provider tests + lql-fsharp-typeprovider-tests: + name: LQL F# Type Provider Tests + runs-on: ubuntu-latest + needs: [build, changes] + if: needs.changes.outputs.lql == 'true' + steps: + - uses: actions/checkout@v4 + + - name: Setup .NET + uses: actions/setup-dotnet@v4 + with: + dotnet-version: ${{ env.DOTNET_VERSION }} + + - name: Cache NuGet packages + uses: actions/cache@v4 + with: + path: ~/.nuget/packages + key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj', '**/*.fsproj') }} + restore-keys: | + ${{ runner.os }}-nuget- + + - name: Restore + run: dotnet restore Lql/Lql.TypeProvider.FSharp.Tests + + - name: Test + run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --no-restore --verbosity normal --logger "trx;LogFileName=test-results.trx" + + - name: Upload test results + uses: actions/upload-artifact@v4 + if: always() + with: + name: test-results-lql-fsharp-typeprovider + path: '**/TestResults/*.trx' + # Migration tests migration-tests: name: Migration Tests diff --git a/.vscode/launch.json b/.vscode/launch.json index 332add0..b415a89 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -39,6 +39,17 @@ "console": "internalConsole", "stopAtEntry": false }, + { + "name": "Run DataProvider.Example.FSharp", + "type": "coreclr", + "request": "launch", + "preLaunchTask": "build", + "program": "${workspaceFolder}/DataProvider/DataProvider.Example.FSharp/bin/Debug/net9.0/DataProvider.Example.FSharp.dll", + "args": [], + "cwd": "${workspaceFolder}/DataProvider/DataProvider.Example.FSharp", + "console": "internalConsole", + "stopAtEntry": false + }, { "name": "DB Browser", "type": "coreclr", diff --git a/Agents.md b/Agents.md index 7fa5299..e1ec4ec 100644 --- a/Agents.md +++ b/Agents.md @@ -8,7 +8,7 @@ ## Coding Rules -- **NEVER THROW** - Return `Result`. Wrap failures in try/catch +- **NEVER THROW** - Return `Result`. Wrap failures in try/catch - **No casting/!** - Pattern match on type only - **NO GIT** - Source control is illegal - **No suppressing warnings** - Illegal @@ -18,22 +18,32 @@ - **Copious ILogger** - Especially sync projects - **NO INTERFACES** - Use `Action`/`Func` - **Expressions over assignments** +- **Routinely format with csharpier** - `dotnet csharpier .` <- In root folder - **Named parameters** - No ordinal calls - **Close type hierarchies** - Private constructors: ```csharp public abstract partial record Result { private Result() { } } ``` -- **Extension methods on IDbConnection/IDbTransaction only** -- **Pattern match, don't if** - Switch expressions on type -- **No skipping tests** - Failing = OK, Skip = illegal -- **E2E tests only** - No mocks, integration testing -- **Type aliases for Results** - `using XResult = Result` -- **Immutable** - Records, `ImmutableList`, `FrozenSet`, `ImmutableArray` -- **NO REGEX** - ANTLR or SqlParserCS -- **XMLDOC on public members** - Except tests -- **< 450 LOC per file** -- **No commented code** - Delete it -- **No placeholders** - Leave compile errors with TODO +- **Skipping tests = ā›”ļø ILLEGAL** - Failing tests = OK. Aggressively unskip tests +- **Test at the highest level** - Avoid mocks. Only full integration testing +- **Keep files under 450 LOC and functions under 20 LOC** +- **Always use type aliases (using) for result types** - Don't write like this: `new Result.Ok` +- **All tables must have a SINGLE primary key** +- **Primary keys MUST be UUIDs** +- **No singletons** - Inject `Func` into static methods +- **Immutable types!** - Use records. Don't use `List`. Use `ImmutableList` `FrozenSet` or `ImmutableArray` +- **No in-memory dbs** - Real dbs all the way +- **NO REGEX** - Parse SQL with ANTLR .g4 grammars or SqlParserCS library +- **All public members require XMLDOC** - Except in test projects +- **One type per file** (except small records) +- **No commented-out code** - Delete it +- **No consecutive Console.WriteLine** - Use single string interpolation +- **No placeholders** - If incomplete, leave LOUD compilation error with TODO +- **Never use Fluent Assertions** + +## CSS +- **MINIMAL CSS** - Do not duplicate CSS clases +- **Name classes after component, NOT section** - Sections should not have their own CSS classes ## Testing - E2E with zero mocking diff --git a/CLAUDE.md b/CLAUDE.md index 82fe71c..045bd52 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -8,7 +8,7 @@ ## Coding Rules -- **NEVER THROW** - Return `Result`. Wrap failures in try/catch +- **NEVER THROW** - Return `Result``. Wrap failures in try/catch - **No casting/!** - Pattern match on type only - **NO GIT** - Source control is illegal - **No suppressing warnings** - Illegal @@ -32,6 +32,7 @@ public abstract partial record Result { private Result() { } - **Primary keys MUST be UUIDs** - **No singletons** - Inject `Func` into static methods - **Immutable types!** - Use records. Don't use `List`. Use `ImmutableList` `FrozenSet` or `ImmutableArray` +- **No in-memory dbs** - Real dbs all the way - **NO REGEX** - Parse SQL with ANTLR .g4 grammars or SqlParserCS library - **All public members require XMLDOC** - Except in test projects - **One type per file** (except small records) diff --git a/DataProvider.sln b/DataProvider.sln index 3a9342c..18c4532 100644 --- a/DataProvider.sln +++ b/DataProvider.sln @@ -31,7 +31,13 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DataProvider.Example.Tests" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DataProvider.Example", "DataProvider\DataProvider.Example\DataProvider.Example.csproj", "{EA9A0385-249F-4141-AD03-D67649110A84}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lql.Browser", "Lql\Lql.Browser\Lql.Browser.csproj", "{1B5BAB33-4256-400B-A4F8-F318418A3548}" +Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "Lql.TypeProvider.FSharp", "Lql\Lql.TypeProvider.FSharp\Lql.TypeProvider.FSharp.fsproj", "{B1234567-89AB-CDEF-0123-456789ABCDEF}" +EndProject +Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "DataProvider.SQLite.FSharp", "DataProvider\DataProvider.SQLite.FSharp\DataProvider.SQLite.FSharp.fsproj", "{D1234567-89AB-CDEF-0123-456789ABCDEF}" +EndProject +Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "DataProvider.Example.FSharp", "DataProvider\DataProvider.Example.FSharp\DataProvider.Example.FSharp.fsproj", "{5C11B1F1-F6FF-45B9-B037-EDD054EED3F3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lql.Browser", "Lql\Lql.Browser\Lql.Browser.csproj", "{0D96933C-DE5D-472B-9E9F-68DD15B85CF7}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Sync", "Sync", "{5E63119C-E70B-5D45-ECC9-8CBACC584223}" EndProject @@ -107,6 +113,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dashboard.Web", "Samples\Da EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dashboard.Web.Tests", "Samples\Dashboard\Dashboard.Web.Tests\Dashboard.Web.Tests.csproj", "{25C125F3-B766-4DCD-8032-DB89818FFBC3}" EndProject +Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Lql.TypeProvider.FSharp.Tests", "Lql\Lql.TypeProvider.FSharp.Tests\Lql.TypeProvider.FSharp.Tests.fsproj", "{B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lql.TypeProvider.FSharp.Tests.Data", "Lql\Lql.TypeProvider.FSharp.Tests.Data\Lql.TypeProvider.FSharp.Tests.Data.csproj", "{0D6A831B-4759-46F2-8527-51C8A9CB6F6F}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -261,18 +271,24 @@ Global {EA9A0385-249F-4141-AD03-D67649110A84}.Release|x64.Build.0 = Release|Any CPU {EA9A0385-249F-4141-AD03-D67649110A84}.Release|x86.ActiveCfg = Release|Any CPU {EA9A0385-249F-4141-AD03-D67649110A84}.Release|x86.Build.0 = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|x64.ActiveCfg = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|x64.Build.0 = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|x86.ActiveCfg = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Debug|x86.Build.0 = Debug|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|Any CPU.Build.0 = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|x64.ActiveCfg = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|x64.Build.0 = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|x86.ActiveCfg = Release|Any CPU - {1B5BAB33-4256-400B-A4F8-F318418A3548}.Release|x86.Build.0 = Release|Any CPU + {B1234567-89AB-CDEF-0123-456789ABCDEF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B1234567-89AB-CDEF-0123-456789ABCDEF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D1234567-89AB-CDEF-0123-456789ABCDEF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D1234567-89AB-CDEF-0123-456789ABCDEF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5C11B1F1-F6FF-45B9-B037-EDD054EED3F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5C11B1F1-F6FF-45B9-B037-EDD054EED3F3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|x64.ActiveCfg = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|x64.Build.0 = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|x86.ActiveCfg = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Debug|x86.Build.0 = Debug|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|Any CPU.Build.0 = Release|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|x64.ActiveCfg = Release|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|x64.Build.0 = Release|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|x86.ActiveCfg = Release|Any CPU + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7}.Release|x86.Build.0 = Release|Any CPU {C0B4116E-0635-4597-971D-6B70229FA30A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C0B4116E-0635-4597-971D-6B70229FA30A}.Debug|Any CPU.Build.0 = Debug|Any CPU {C0B4116E-0635-4597-971D-6B70229FA30A}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -645,6 +661,30 @@ Global {25C125F3-B766-4DCD-8032-DB89818FFBC3}.Release|x64.Build.0 = Release|Any CPU {25C125F3-B766-4DCD-8032-DB89818FFBC3}.Release|x86.ActiveCfg = Release|Any CPU {25C125F3-B766-4DCD-8032-DB89818FFBC3}.Release|x86.Build.0 = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|x64.ActiveCfg = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|x64.Build.0 = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|x86.ActiveCfg = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Debug|x86.Build.0 = Debug|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|Any CPU.Build.0 = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|x64.ActiveCfg = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|x64.Build.0 = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|x86.ActiveCfg = Release|Any CPU + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92}.Release|x86.Build.0 = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|x64.ActiveCfg = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|x64.Build.0 = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|x86.ActiveCfg = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Debug|x86.Build.0 = Debug|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|Any CPU.Build.0 = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|x64.ActiveCfg = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|x64.Build.0 = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|x86.ActiveCfg = Release|Any CPU + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -662,7 +702,10 @@ Global {A7EC2050-FE5E-4BBD-AF5F-7F07D3688118} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} {16FA9B36-CB2A-4B79-A3BE-937C94BF03F8} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} {EA9A0385-249F-4141-AD03-D67649110A84} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} - {1B5BAB33-4256-400B-A4F8-F318418A3548} = {54B846BA-A27D-B76F-8730-402A5742FF43} + {B1234567-89AB-CDEF-0123-456789ABCDEF} = {54B846BA-A27D-B76F-8730-402A5742FF43} + {D1234567-89AB-CDEF-0123-456789ABCDEF} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} + {5C11B1F1-F6FF-45B9-B037-EDD054EED3F3} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} + {0D96933C-DE5D-472B-9E9F-68DD15B85CF7} = {54B846BA-A27D-B76F-8730-402A5742FF43} {C0B4116E-0635-4597-971D-6B70229FA30A} = {5E63119C-E70B-5D45-ECC9-8CBACC584223} {9B303409-0052-45B9-8616-CC1ED80A5595} = {5E63119C-E70B-5D45-ECC9-8CBACC584223} {50CFDEC4-66C8-4330-8D5F-9D96A764378B} = {5E63119C-E70B-5D45-ECC9-8CBACC584223} @@ -688,8 +731,15 @@ Global {4EB6CC28-7D1B-4E39-80F2-84CA4494AF23} = {048F5F03-6DDC-C04F-70D5-B8139DC8E373} {2FD305AC-927E-4D24-9FA6-923C30E4E4A8} = {048F5F03-6DDC-C04F-70D5-B8139DC8E373} {57572A45-33CD-4928-9C30-13480AEDB313} = {C7F49633-8D5E-7E19-1580-A6459B2EAE66} + {A8A70E6D-1D43-437F-9971-44A4FA1BDD74} = {43BAF0A3-C050-BE83-B489-7FC6F9FDE235} + {0858FE19-C59B-4A77-B76E-7053E8AFCC8D} = {C7F49633-8D5E-7E19-1580-A6459B2EAE66} + {CA395494-F072-4A5B-9DD4-950530A69E0E} = {5D20AA90-6969-D8BD-9DCD-8634F4692FDA} + {1AE87774-E914-40BC-95BA-56FB45D78C0D} = {54B846BA-A27D-B76F-8730-402A5742FF43} + {6AB2EA96-4A75-49DB-AC65-B247BBFAE9A3} = {54B846BA-A27D-B76F-8730-402A5742FF43} {A82453CD-8E3C-44B7-A78F-97F392016385} = {B03CA193-C175-FB88-B41C-CBBC0E037C7E} {25C125F3-B766-4DCD-8032-DB89818FFBC3} = {B03CA193-C175-FB88-B41C-CBBC0E037C7E} + {B0104C42-1B46-4CA5-9E91-A5F09D7E5B92} = {54B846BA-A27D-B76F-8730-402A5742FF43} + {0D6A831B-4759-46F2-8527-51C8A9CB6F6F} = {54B846BA-A27D-B76F-8730-402A5742FF43} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {53128A75-E7B6-4B83-B079-A309FCC2AD9C} diff --git a/DataProvider/DataProvider.Example.FSharp/DataProvider.Example.FSharp.fsproj b/DataProvider/DataProvider.Example.FSharp/DataProvider.Example.FSharp.fsproj new file mode 100644 index 0000000..aece4b1 --- /dev/null +++ b/DataProvider/DataProvider.Example.FSharp/DataProvider.Example.FSharp.fsproj @@ -0,0 +1,38 @@ + + + + Exe + net9.0 + true + preview + false + 3 + + + + + + + + + + PreserveNewest + + + PreserveNewest + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/DataProvider/DataProvider.Example.FSharp/GetCustomers.lql b/DataProvider/DataProvider.Example.FSharp/GetCustomers.lql new file mode 100644 index 0000000..18db85f --- /dev/null +++ b/DataProvider/DataProvider.Example.FSharp/GetCustomers.lql @@ -0,0 +1,4 @@ +Customer +|> join(Address, on = Customer.Id = Address.CustomerId) +|> select(Customer.Id, Customer.CustomerName, Customer.Email, Customer.Phone, Customer.CreatedDate, Address.Id AS AddressId, Address.CustomerId, Address.Street, Address.City, Address.State, Address.ZipCode, Address.Country) +|> order_by(Customer.CustomerName) \ No newline at end of file diff --git a/DataProvider/DataProvider.Example.FSharp/GetInvoices.lql b/DataProvider/DataProvider.Example.FSharp/GetInvoices.lql new file mode 100644 index 0000000..9c4fc9d --- /dev/null +++ b/DataProvider/DataProvider.Example.FSharp/GetInvoices.lql @@ -0,0 +1,4 @@ +Invoice +|> join(InvoiceLine, on = Invoice.Id = InvoiceLine.InvoiceId) +|> select(Invoice.Id, Invoice.InvoiceNumber, Invoice.InvoiceDate, Invoice.CustomerName, Invoice.CustomerEmail, Invoice.TotalAmount, InvoiceLine.Description, InvoiceLine.Quantity, InvoiceLine.UnitPrice, InvoiceLine.Amount) +|> order_by(Invoice.InvoiceDate) \ No newline at end of file diff --git a/DataProvider/DataProvider.Example.FSharp/LqlValidator.fs b/DataProvider/DataProvider.Example.FSharp/LqlValidator.fs new file mode 100644 index 0000000..94f03a5 --- /dev/null +++ b/DataProvider/DataProvider.Example.FSharp/LqlValidator.fs @@ -0,0 +1,66 @@ +module LqlValidator + +open System +open Microsoft.Data.Sqlite +open Lql +open Lql.SQLite +open Outcome +open Selecta + +//TODO: this does not belong here. Move to core code + +/// Validates LQL at compile time and provides execution methods +type LqlQuery private() = + + /// Validates and executes an LQL query + static member inline Execute(connectionString: string, [] lqlQuery: string) = + // Validate at compile time + let statementResult = LqlStatementConverter.ToStatement(lqlQuery) + match statementResult with + | :? Outcome.Result.Ok as success -> + let lqlStatement = success.Value + match lqlStatement.AstNode with + | :? Pipeline as pipeline -> + let sqliteContext = SQLiteContext() + let sql = PipelineProcessor.ConvertPipelineToSql(pipeline, sqliteContext) + + // Execute the query + use conn = new SqliteConnection(connectionString) + conn.Open() + use cmd = new SqliteCommand(sql, conn) + use reader = cmd.ExecuteReader() + + let results = ResizeArray>() + while reader.Read() do + let row = + [| for i in 0 .. reader.FieldCount - 1 -> + let name = reader.GetName(i) + let value = if reader.IsDBNull(i) then box DBNull.Value else reader.GetValue(i) + (name, value) |] + |> Map.ofArray + results.Add(row) + + Ok(results |> List.ofSeq) + | _ -> + Error "Invalid LQL statement type" + | :? Outcome.Result.Error as failure -> + Error(sprintf "Invalid LQL syntax: %s" failure.Value.Message) + | _ -> + Error "Unknown result type from LQL parser" + + /// Gets the SQL for an LQL query (for debugging) + static member inline ToSql([] lqlQuery: string) = + let statementResult = LqlStatementConverter.ToStatement(lqlQuery) + match statementResult with + | :? Outcome.Result.Ok as success -> + let lqlStatement = success.Value + match lqlStatement.AstNode with + | :? Pipeline as pipeline -> + let sqliteContext = SQLiteContext() + Ok(PipelineProcessor.ConvertPipelineToSql(pipeline, sqliteContext)) + | _ -> + Error "Invalid LQL statement type" + | :? Outcome.Result.Error as failure -> + Error(sprintf "Invalid LQL syntax: %s" failure.Value.Message) + | _ -> + Error "Unknown result type from LQL parser" \ No newline at end of file diff --git a/DataProvider/DataProvider.Example.FSharp/Program.fs b/DataProvider/DataProvider.Example.FSharp/Program.fs new file mode 100644 index 0000000..3c4b1fc --- /dev/null +++ b/DataProvider/DataProvider.Example.FSharp/Program.fs @@ -0,0 +1,45 @@ +open Microsoft.Data.Sqlite +open Lql + +// āœ… VALID LQL using TRUE type provider with static parameter +type ValidQuery = LqlCommand<"Customer |> select(*)"> + +// āŒ INVALID LQL - This WILL cause COMPILATION FAILURE +// Uncomment the line below to test: +// type InvalidQuery = LqlCommand<"Customer |> seflect(*)"> // misspelled "select" + +[] +let main _ = + let connStr = "Data Source=test.db" + + // Setup database with data + use conn = new SqliteConnection(connStr) + conn.Open() + use cmd = new SqliteCommand("DROP TABLE IF EXISTS Customer; CREATE TABLE Customer (Id INTEGER PRIMARY KEY, CustomerName TEXT); INSERT INTO Customer VALUES (1, 'Acme Corp'), (2, 'Tech Corp');", conn) + cmd.ExecuteNonQuery() |> ignore + + printfn "šŸ”„ TESTING TRUE F# TYPE PROVIDER WITH STATIC PARAMETERS šŸ”„" + printfn "============================================================" + + printfn "āœ… Valid LQL compiles successfully:" + printfn " LQL: %s" ValidQuery.Query + printfn " SQL: %s" ValidQuery.Sql + + // Execute the generated SQL directly + use queryCmd = new SqliteCommand(ValidQuery.Sql, conn) + use reader = queryCmd.ExecuteReader() + + printfn "\nāœ… Execution Results:" + while reader.Read() do + let id = reader.GetValue(0) + let name = reader.GetValue(1) + printfn " ID: %A, Name: %A" id name + + conn.Close() + + printfn "\nšŸŽ‰ TRUE TYPE PROVIDER WORKING!" + printfn " - Valid LQL with static parameter compiles successfully" + printfn " - Invalid LQL (when uncommented) WILL cause TRUE COMPILATION FAILURE" + printfn " - This follows the EXACT FSharp.Data.SqlClient pattern" + + 0 \ No newline at end of file diff --git a/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs b/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs index 2a8df7d..b61291d 100644 --- a/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs +++ b/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs @@ -13,11 +13,14 @@ namespace DataProvider.Example.Tests; /// public sealed class DataProviderIntegrationTests : IDisposable { - private readonly string _connectionString = "Data Source=:memory:"; + private readonly string _dbPath; + private readonly string _connectionString; private readonly SqliteConnection _connection; public DataProviderIntegrationTests() { + _dbPath = Path.Combine(Path.GetTempPath(), $"dataprovider_integration_tests_{Guid.NewGuid()}.db"); + _connectionString = $"Data Source={_dbPath}"; _connection = new SqliteConnection(_connectionString); } @@ -711,7 +714,10 @@ public async Task PredicateBuilder_Or_E2E_CombinesPredicatesWithOrLogic() var predicate = PredicateBuilder.False(); predicate = predicate.Or(c => c.CustomerName == "Acme Corp"); predicate = predicate.Or(c => c.CustomerName == "Tech Solutions"); - var query = SelectStatement.From("Customer").Where(predicate).OrderBy(c => c.CustomerName); + var query = SelectStatement + .From("Customer") + .Where(predicate) + .OrderBy(c => c.CustomerName); // Act var statement = query.ToSqlStatement(); @@ -817,7 +823,10 @@ public async Task PredicateBuilder_DynamicAndConditions_E2E_BuildsFilterChains() predicate = predicate.And(c => c.Email != null); predicate = predicate.And(c => c.CustomerName != null); - var query = SelectStatement.From("Customer").Where(predicate).OrderBy(c => c.CustomerName); + var query = SelectStatement + .From("Customer") + .Where(predicate) + .OrderBy(c => c.CustomerName); var statement = query.ToSqlStatement(); var result = _connection.GetRecords(statement, s => s.ToSQLite(), MapCustomer); @@ -1221,23 +1230,18 @@ public void FluentQueryBuilder_AllComparisonOperators_GenerateCorrectSQL() public void Dispose() { _connection?.Dispose(); - - // Clean up test database file - var dbFileName = _connectionString.Replace("Data Source=", "", StringComparison.Ordinal); - if (File.Exists(dbFileName)) + if (File.Exists(_dbPath)) { try { - File.Delete(dbFileName); + File.Delete(_dbPath); } +#pragma warning disable CA1031 // Do not catch general exception types - file cleanup is best-effort catch (IOException) { - // File might be in use, ignore - } - catch (UnauthorizedAccessException) - { - // No permission to delete, ignore + /* File may be locked */ } +#pragma warning restore CA1031 } } } diff --git a/DataProvider/DataProvider.SQLite.Cli/Program.cs b/DataProvider/DataProvider.SQLite.Cli/Program.cs index f036f13..56c9a56 100644 --- a/DataProvider/DataProvider.SQLite.Cli/Program.cs +++ b/DataProvider/DataProvider.SQLite.Cli/Program.cs @@ -85,10 +85,16 @@ DirectoryInfo outDir return 1; } + // Make the connection string path absolute relative to project directory + var absoluteConnectionString = MakeConnectionStringAbsolute( + cfg.ConnectionString, + projectDir.FullName + ); + // Verify DB exists and is accessible; if empty, run schema file try { - using var conn = new Microsoft.Data.Sqlite.SqliteConnection(cfg.ConnectionString); + using var conn = new Microsoft.Data.Sqlite.SqliteConnection(absoluteConnectionString); await conn.OpenAsync().ConfigureAwait(false); // Check if any tables exist @@ -196,7 +202,7 @@ is Result.Error< ).Value; var colsResult = await SqliteCodeGenerator - .GetColumnMetadataFromSqlAsync(cfg.ConnectionString, sql, stmt.Parameters) + .GetColumnMetadataFromSqlAsync(absoluteConnectionString, sql, stmt.Parameters) .ConfigureAwait(false); if ( colsResult @@ -238,7 +244,7 @@ as Result, SqlError>.Error< baseName, sql, stmt, - cfg.ConnectionString, + absoluteConnectionString, cols.Value, hasCustomImplementation: false, grouping @@ -296,7 +302,7 @@ as Result, SqlError>.Error< // Use SQLite's native schema inspection to get table metadata using var conn = new Microsoft.Data.Sqlite.SqliteConnection( - cfg.ConnectionString + absoluteConnectionString ); await conn.OpenAsync().ConfigureAwait(false); @@ -484,6 +490,37 @@ private static string FormatSqliteMetadataMessage(string detailed) return final; } + /// + /// Makes a SQLite connection string's Data Source path absolute relative to a project directory. + /// + private static string MakeConnectionStringAbsolute(string connectionString, string projectDir) + { + // Parse "Data Source=path" from connection string + const string dataSourcePrefix = "Data Source="; + var idx = connectionString.IndexOf(dataSourcePrefix, StringComparison.OrdinalIgnoreCase); + if (idx < 0) + return connectionString; + + var pathStart = idx + dataSourcePrefix.Length; + var semicolonIdx = connectionString.IndexOf(';', pathStart); + var dbPath = + semicolonIdx >= 0 + ? connectionString[pathStart..semicolonIdx] + : connectionString[pathStart..]; + + // If already absolute or special (like :memory:), return as-is + if (Path.IsPathRooted(dbPath) || dbPath.StartsWith(':')) + return connectionString; + + // Make path absolute relative to project directory + var absolutePath = Path.GetFullPath(Path.Combine(projectDir, dbPath)); + + // Reconstruct connection string + var prefix = connectionString[..idx]; + var suffix = semicolonIdx >= 0 ? connectionString[semicolonIdx..] : string.Empty; + return $"{prefix}{dataSourcePrefix}{absolutePath}{suffix}"; + } + /// /// Maps SQLite types to C# types /// diff --git a/DataProvider/DataProvider.SQLite.FSharp/DataProvider.SQLite.FSharp.fsproj b/DataProvider/DataProvider.SQLite.FSharp/DataProvider.SQLite.FSharp.fsproj new file mode 100644 index 0000000..4c051d3 --- /dev/null +++ b/DataProvider/DataProvider.SQLite.FSharp/DataProvider.SQLite.FSharp.fsproj @@ -0,0 +1,25 @@ + + + + Exe + net9.0 + true + preview + false + 3 + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/DataProvider/DataProvider.SQLite.FSharp/Program.fs b/DataProvider/DataProvider.SQLite.FSharp/Program.fs new file mode 100644 index 0000000..a1db064 --- /dev/null +++ b/DataProvider/DataProvider.SQLite.FSharp/Program.fs @@ -0,0 +1,11 @@ +open System + +printfn "F# SQLite Data Provider Example" +printfn "================================" + +printfn "āœ… F# project references the C# DataProvider.SQLite library" +printfn "āœ… No code duplication - uses existing C# implementation" + +[] +let main args = + 0 \ No newline at end of file diff --git a/DataProvider/DataProvider.SQLite.FSharp/SimpleSqlite.fs b/DataProvider/DataProvider.SQLite.FSharp/SimpleSqlite.fs new file mode 100644 index 0000000..b280c03 --- /dev/null +++ b/DataProvider/DataProvider.SQLite.FSharp/SimpleSqlite.fs @@ -0,0 +1,21 @@ +namespace DataProvider.SQLite.FSharp + +open System.Data +open DataProvider + +/// +/// F# bindings for the existing C# DataProvider functionality +/// +module SimpleSqlite = + + /// + /// Execute query using existing C# DbConnectionExtensions + /// + let executeQuery (connection: IDbConnection) (sql: string) mapper = + DbConnectionExtensions.Query(connection, sql, null, mapper) + + /// + /// Execute parameterized query using existing C# DbConnectionExtensions + /// + let executeQueryWithParams (connection: IDbConnection) (sql: string) (parameters: IDataParameter seq) mapper = + DbConnectionExtensions.Query(connection, sql, parameters, mapper) \ No newline at end of file diff --git a/DataProvider/DataProvider.SQLite.FSharp/test.db b/DataProvider/DataProvider.SQLite.FSharp/test.db new file mode 100644 index 0000000000000000000000000000000000000000..98434a018f1135d7df88b296a3839e9fd0d7733b GIT binary patch literal 12288 zcmeI%!HUyB7zgl4+HPH;jkg|Taq7iZ?3#ApQ7^K1 z@#@V-@EJV&2p)X~AHiuPxWx@5;6d;|kYxJtec_wmoF<(8&zy*wAIv9X_xaeU`?a)6P|zGt7XRw#&xMp^qH%;m|pu z$L!=zZvJS!N<_rt3w8FyrS=oc$}dX0u-DOy$JAv{WqPraLr)$r3l_K{@j@yWZxwj^ z(Zpfq*xoFac789ZE9;M#!|V~8(kx6nX3)LVqx - false - false + DataProvider.SQLite - 1.0.0 - DataProvider - SQLite source generator for DataProvider + 0.1.0-beta + ChristianFindlay + SQLite source generator for DataProvider. Provides compile-time safe database access with automatic code generation from SQL files for SQLite databases. + source-generator;sql;sqlite;database;compile-time-safety;code-generation + https://github.com/MelbourneDeveloper/DataProvider + https://github.com/MelbourneDeveloper/DataProvider + git + MIT + README.md + false + Initial beta release of DataProvider.SQLite source generator. + + + true CA1849;CA2100;EPC13;CA1307;CS3021;CS0108 @@ -36,4 +46,22 @@ + + + + + + + + + true + portable + true + true + snupkg + true + true + true + true + \ No newline at end of file diff --git a/DataProvider/DataProvider.SqlServer/DataProvider.SqlServer.csproj b/DataProvider/DataProvider.SqlServer/DataProvider.SqlServer.csproj index e4e57ce..476a2d1 100644 --- a/DataProvider/DataProvider.SqlServer/DataProvider.SqlServer.csproj +++ b/DataProvider/DataProvider.SqlServer/DataProvider.SqlServer.csproj @@ -1,11 +1,21 @@ - false - false + DataProvider.SqlServer - 1.0.0 - DataProvider - SQL Server source generator for DataProvider + 0.1.0-beta + ChristianFindlay + SQL Server source generator for DataProvider. Provides compile-time safe database access with automatic code generation from SQL files for SQL Server databases. + source-generator;sql;sqlserver;database;compile-time-safety;code-generation + https://github.com/MelbourneDeveloper/DataProvider + https://github.com/MelbourneDeveloper/DataProvider + git + MIT + README.md + false + Initial beta release of DataProvider.SqlServer source generator. + + + true @@ -16,4 +26,22 @@ + + + + + + + + + true + portable + true + true + snupkg + true + true + true + true + \ No newline at end of file diff --git a/DataProvider/DataProvider.Tests/DbConnectionExtensionsTests.cs b/DataProvider/DataProvider.Tests/DbConnectionExtensionsTests.cs index d114538..be6041c 100644 --- a/DataProvider/DataProvider.Tests/DbConnectionExtensionsTests.cs +++ b/DataProvider/DataProvider.Tests/DbConnectionExtensionsTests.cs @@ -10,10 +10,12 @@ namespace DataProvider.Tests; public sealed class DbConnectionExtensionsTests : IDisposable { private readonly SqliteConnection _connection; + private readonly string _dbPath; public DbConnectionExtensionsTests() { - _connection = new SqliteConnection("Data Source=:memory:"); + _dbPath = Path.Combine(Path.GetTempPath(), $"dbconn_ext_tests_{Guid.NewGuid()}.db"); + _connection = new SqliteConnection($"Data Source={_dbPath}"); _connection.Open(); CreateSchema(); } @@ -316,7 +318,27 @@ public void Scalar_WithInvalidSql_ReturnsError() Assert.False(result is NullableStringOk); } - public void Dispose() => _connection?.Dispose(); + public void Dispose() + { + _connection?.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } +#pragma warning disable CA1031 // Cleanup is best-effort + catch (IOException) + { + // File may be locked by another process + } + catch (UnauthorizedAccessException) + { + // May not have permission + } +#pragma warning restore CA1031 + } + } private sealed record TestRecord { diff --git a/DataProvider/DataProvider.Tests/DbTransactTests.cs b/DataProvider/DataProvider.Tests/DbTransactTests.cs index adb122e..dabb27d 100644 --- a/DataProvider/DataProvider.Tests/DbTransactTests.cs +++ b/DataProvider/DataProvider.Tests/DbTransactTests.cs @@ -10,11 +10,14 @@ namespace DataProvider.Tests; /// public sealed class DbTransactTests : IDisposable { - private readonly string _connectionString = "Data Source=:memory:"; + private readonly string _dbPath; + private readonly string _connectionString; private readonly SqliteConnection _connection; public DbTransactTests() { + _dbPath = Path.Combine(Path.GetTempPath(), $"dbtransact_tests_{Guid.NewGuid()}.db"); + _connectionString = $"Data Source={_dbPath}"; _connection = new SqliteConnection(_connectionString); } @@ -241,5 +244,25 @@ Name TEXT NOT NULL await command.ExecuteNonQueryAsync().ConfigureAwait(false); } - public void Dispose() => _connection?.Dispose(); + public void Dispose() + { + _connection?.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } +#pragma warning disable CA1031 // Cleanup is best-effort + catch (IOException) + { + // File may be locked by another process + } + catch (UnauthorizedAccessException) + { + // May not have permission + } +#pragma warning restore CA1031 + } + } } diff --git a/DataProvider/DataProvider.Tests/DbTransactionExtensionsTests.cs b/DataProvider/DataProvider.Tests/DbTransactionExtensionsTests.cs index a8dc8ab..8ff1710 100644 --- a/DataProvider/DataProvider.Tests/DbTransactionExtensionsTests.cs +++ b/DataProvider/DataProvider.Tests/DbTransactionExtensionsTests.cs @@ -24,13 +24,15 @@ public sealed class DbTransactionExtensionsTests : IDisposable { private readonly SqliteConnection _connection; private readonly SqliteTransaction _transaction; + private readonly string _dbPath; /// /// Initializes a new instance of . /// public DbTransactionExtensionsTests() { - _connection = new SqliteConnection("Data Source=:memory:"); + _dbPath = Path.Combine(Path.GetTempPath(), $"dbtrans_ext_tests_{Guid.NewGuid()}.db"); + _connection = new SqliteConnection($"Data Source={_dbPath}"); _connection.Open(); CreateSchema(); _transaction = _connection.BeginTransaction(); @@ -151,6 +153,23 @@ public void Dispose() { _transaction?.Dispose(); _connection?.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } +#pragma warning disable CA1031 // Cleanup is best-effort + catch (IOException) + { + // File may be locked by another process + } + catch (UnauthorizedAccessException) + { + // May not have permission + } +#pragma warning restore CA1031 + } } [Fact] diff --git a/DataProvider/DataProvider/DataProvider.csproj b/DataProvider/DataProvider/DataProvider.csproj index b5cd412..b7a79ed 100644 --- a/DataProvider/DataProvider/DataProvider.csproj +++ b/DataProvider/DataProvider/DataProvider.csproj @@ -1,9 +1,22 @@ - - false + + DataProvider + 0.1.0-beta + ChristianFindlay + A source generator that creates compile-time safe extension methods for database operations from SQL files. Generates strongly-typed C# code based on your SQL queries and database schema, ensuring type safety and eliminating runtime SQL errors. + source-generator;sql;database;compile-time-safety;code-generation;sqlite;sqlserver + https://github.com/christianfindlay/DataProvider + https://github.com/christianfindlay/DataProvider + git + MIT + README.md false + Initial release of DataProvider source generator for compile-time safe database operations. + + + true @@ -12,9 +25,27 @@ + + + + + $(NoWarn);EPC13;EPS06;CA1002;CA1822;CA1859 + + + true + portable + true + true + snupkg + true + true + true + true + + diff --git a/DataProvider/DataProvider/DbConnectionExtensions.cs b/DataProvider/DataProvider/DbConnectionExtensions.cs index 25fa74d..a023be0 100644 --- a/DataProvider/DataProvider/DbConnectionExtensions.cs +++ b/DataProvider/DataProvider/DbConnectionExtensions.cs @@ -10,7 +10,7 @@ namespace DataProvider; /// /// /// -/// using var connection = new SqliteConnection("Data Source=:memory:"); +/// using var connection = new SqliteConnection("Data Source=mydb.db"); /// connection.Open(); /// /// // Execute a query with mapping diff --git a/Gatekeeper/Gatekeeper.Api.Tests/AuthorizationTests.cs b/Gatekeeper/Gatekeeper.Api.Tests/AuthorizationTests.cs index 3d1faa1..cbeda03 100644 --- a/Gatekeeper/Gatekeeper.Api.Tests/AuthorizationTests.cs +++ b/Gatekeeper/Gatekeeper.Api.Tests/AuthorizationTests.cs @@ -528,7 +528,8 @@ string permissionCode ), }; - var permId = existingPerm?.id + var permId = + existingPerm?.id ?? throw new InvalidOperationException( $"Permission '{permissionCode}' not found in seeded database" ); @@ -552,7 +553,9 @@ string permissionCode if (grantResult is Result.Error grantErr) { - throw new InvalidOperationException($"Failed to insert grant: {grantErr.Value.Message}"); + throw new InvalidOperationException( + $"Failed to insert grant: {grantErr.Value.Message}" + ); } tx.Commit(); @@ -585,7 +588,8 @@ string permissionCode ), }; - var permId = existingPerm?.id + var permId = + existingPerm?.id ?? throw new InvalidOperationException( $"Permission '{permissionCode}' not found in seeded database" ); diff --git a/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs b/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs index eee498a..edb35bd 100644 --- a/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs +++ b/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs @@ -124,300 +124,348 @@ public void CreateToken_ExpirationIsCorrect() [Fact] public async Task ValidateTokenAsync_ValidToken_ReturnsOk() { - using var conn = CreateInMemoryDb(); - - var token = TokenService.CreateToken( - "user-valid", - "Valid User", - "valid@example.com", - ["user"], - TestSigningKey, - TimeSpan.FromHours(1) - ); - - var result = await TokenService.ValidateTokenAsync( - conn, - token, - TestSigningKey, - checkRevocation: false - ); - - Assert.IsType(result); - var ok = (TokenService.TokenValidationOk)result; - Assert.Equal("user-valid", ok.Claims.UserId); - Assert.Equal("Valid User", ok.Claims.DisplayName); - Assert.Equal("valid@example.com", ok.Claims.Email); - Assert.Contains("user", ok.Claims.Roles); + var (conn, dbPath) = CreateTestDb(); + try + { + var token = TokenService.CreateToken( + "user-valid", + "Valid User", + "valid@example.com", + ["user"], + TestSigningKey, + TimeSpan.FromHours(1) + ); + + var result = await TokenService.ValidateTokenAsync( + conn, + token, + TestSigningKey, + checkRevocation: false + ); + + Assert.IsType(result); + var ok = (TokenService.TokenValidationOk)result; + Assert.Equal("user-valid", ok.Claims.UserId); + Assert.Equal("Valid User", ok.Claims.DisplayName); + Assert.Equal("valid@example.com", ok.Claims.Email); + Assert.Contains("user", ok.Claims.Roles); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_InvalidFormat_ReturnsError() { - using var conn = CreateInMemoryDb(); - - var result = await TokenService.ValidateTokenAsync( - conn, - "not-a-jwt", - TestSigningKey, - checkRevocation: false - ); - - Assert.IsType(result); - var error = (TokenService.TokenValidationError)result; - Assert.Equal("Invalid token format", error.Reason); + var (conn, dbPath) = CreateTestDb(); + try + { + var result = await TokenService.ValidateTokenAsync( + conn, + "not-a-jwt", + TestSigningKey, + checkRevocation: false + ); + + Assert.IsType(result); + var error = (TokenService.TokenValidationError)result; + Assert.Equal("Invalid token format", error.Reason); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_TwoPartToken_ReturnsError() { - using var conn = CreateInMemoryDb(); - - var result = await TokenService.ValidateTokenAsync( - conn, - "header.payload", - TestSigningKey, - checkRevocation: false - ); - - Assert.IsType(result); - var error = (TokenService.TokenValidationError)result; - Assert.Equal("Invalid token format", error.Reason); + var (conn, dbPath) = CreateTestDb(); + try + { + var result = await TokenService.ValidateTokenAsync( + conn, + "header.payload", + TestSigningKey, + checkRevocation: false + ); + + Assert.IsType(result); + var error = (TokenService.TokenValidationError)result; + Assert.Equal("Invalid token format", error.Reason); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_InvalidSignature_ReturnsError() { - using var conn = CreateInMemoryDb(); - - var token = TokenService.CreateToken( - "user-sig", - "Sig User", - "sig@example.com", - [], - TestSigningKey, - TimeSpan.FromHours(1) - ); - - // Use different key for validation - var differentKey = new byte[32]; - differentKey[0] = 0xFF; - - var result = await TokenService.ValidateTokenAsync( - conn, - token, - differentKey, - checkRevocation: false - ); - - Assert.IsType(result); - var error = (TokenService.TokenValidationError)result; - Assert.Equal("Invalid signature", error.Reason); + var (conn, dbPath) = CreateTestDb(); + try + { + var token = TokenService.CreateToken( + "user-sig", + "Sig User", + "sig@example.com", + [], + TestSigningKey, + TimeSpan.FromHours(1) + ); + + // Use different key for validation + var differentKey = new byte[32]; + differentKey[0] = 0xFF; + + var result = await TokenService.ValidateTokenAsync( + conn, + token, + differentKey, + checkRevocation: false + ); + + Assert.IsType(result); + var error = (TokenService.TokenValidationError)result; + Assert.Equal("Invalid signature", error.Reason); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_ExpiredToken_ReturnsError() { - using var conn = CreateInMemoryDb(); - - // Create token that expired 1 hour ago - var token = TokenService.CreateToken( - "user-expired", - "Expired User", - "expired@example.com", - [], - TestSigningKey, - TimeSpan.FromHours(-2) // Negative = already expired - ); - - var result = await TokenService.ValidateTokenAsync( - conn, - token, - TestSigningKey, - checkRevocation: false - ); - - Assert.IsType(result); - var error = (TokenService.TokenValidationError)result; - Assert.Equal("Token expired", error.Reason); + var (conn, dbPath) = CreateTestDb(); + try + { + // Create token that expired 1 hour ago + var token = TokenService.CreateToken( + "user-expired", + "Expired User", + "expired@example.com", + [], + TestSigningKey, + TimeSpan.FromHours(-2) // Negative = already expired + ); + + var result = await TokenService.ValidateTokenAsync( + conn, + token, + TestSigningKey, + checkRevocation: false + ); + + Assert.IsType(result); + var error = (TokenService.TokenValidationError)result; + Assert.Equal("Token expired", error.Reason); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_RevokedToken_ReturnsError() { - using var conn = CreateInMemoryDb(); - - var token = TokenService.CreateToken( - "user-revoked", - "Revoked User", - "revoked@example.com", - [], - TestSigningKey, - TimeSpan.FromHours(1) - ); - - // Extract JTI and revoke - var parts = token.Split('.'); - var payloadJson = Base64UrlDecode(parts[1]); - var payload = JsonDocument.Parse(payloadJson); - var jti = payload.RootElement.GetProperty("jti").GetString()!; - - var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); - var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); - - // Insert user and revoked session using raw SQL (consistent with other tests) - using var tx = conn.BeginTransaction(); - - using var userCmd = conn.CreateCommand(); - userCmd.Transaction = tx; - userCmd.CommandText = - @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) - VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; - userCmd.Parameters.AddWithValue("@id", "user-revoked"); - userCmd.Parameters.AddWithValue("@name", "Revoked User"); - userCmd.Parameters.AddWithValue("@email", DBNull.Value); - userCmd.Parameters.AddWithValue("@now", now); - await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - using var sessionCmd = conn.CreateCommand(); - sessionCmd.Transaction = tx; - sessionCmd.CommandText = - @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) - VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 1)"; - sessionCmd.Parameters.AddWithValue("@id", jti); - sessionCmd.Parameters.AddWithValue("@user_id", "user-revoked"); - sessionCmd.Parameters.AddWithValue("@created", now); - sessionCmd.Parameters.AddWithValue("@expires", exp); - sessionCmd.Parameters.AddWithValue("@activity", now); - await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - tx.Commit(); - - var result = await TokenService.ValidateTokenAsync( - conn, - token, - TestSigningKey, - checkRevocation: true - ); - - Assert.IsType(result); - var error = (TokenService.TokenValidationError)result; - Assert.Equal("Token revoked", error.Reason); + var (conn, dbPath) = CreateTestDb(); + try + { + var token = TokenService.CreateToken( + "user-revoked", + "Revoked User", + "revoked@example.com", + [], + TestSigningKey, + TimeSpan.FromHours(1) + ); + + // Extract JTI and revoke + var parts = token.Split('.'); + var payloadJson = Base64UrlDecode(parts[1]); + var payload = JsonDocument.Parse(payloadJson); + var jti = payload.RootElement.GetProperty("jti").GetString()!; + + var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); + var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); + + // Insert user and revoked session using raw SQL (consistent with other tests) + using var tx = conn.BeginTransaction(); + + using var userCmd = conn.CreateCommand(); + userCmd.Transaction = tx; + userCmd.CommandText = + @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) + VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; + userCmd.Parameters.AddWithValue("@id", "user-revoked"); + userCmd.Parameters.AddWithValue("@name", "Revoked User"); + userCmd.Parameters.AddWithValue("@email", DBNull.Value); + userCmd.Parameters.AddWithValue("@now", now); + await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using var sessionCmd = conn.CreateCommand(); + sessionCmd.Transaction = tx; + sessionCmd.CommandText = + @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) + VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 1)"; + sessionCmd.Parameters.AddWithValue("@id", jti); + sessionCmd.Parameters.AddWithValue("@user_id", "user-revoked"); + sessionCmd.Parameters.AddWithValue("@created", now); + sessionCmd.Parameters.AddWithValue("@expires", exp); + sessionCmd.Parameters.AddWithValue("@activity", now); + await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + tx.Commit(); + + var result = await TokenService.ValidateTokenAsync( + conn, + token, + TestSigningKey, + checkRevocation: true + ); + + Assert.IsType(result); + var error = (TokenService.TokenValidationError)result; + Assert.Equal("Token revoked", error.Reason); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task ValidateTokenAsync_RevokedToken_IgnoredWhenCheckRevocationFalse() { - using var conn = CreateInMemoryDb(); - - var token = TokenService.CreateToken( - "user-revoked2", - "Revoked User 2", - "revoked2@example.com", - [], - TestSigningKey, - TimeSpan.FromHours(1) - ); - - // Extract JTI and revoke - var parts = token.Split('.'); - var payloadJson = Base64UrlDecode(parts[1]); - var payload = JsonDocument.Parse(payloadJson); - var jti = payload.RootElement.GetProperty("jti").GetString()!; - - var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); - var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); - - // Insert user and revoked session using raw SQL (consistent with other tests) - using var tx = conn.BeginTransaction(); - - using var userCmd = conn.CreateCommand(); - userCmd.Transaction = tx; - userCmd.CommandText = - @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) - VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; - userCmd.Parameters.AddWithValue("@id", "user-revoked2"); - userCmd.Parameters.AddWithValue("@name", "Revoked User 2"); - userCmd.Parameters.AddWithValue("@email", DBNull.Value); - userCmd.Parameters.AddWithValue("@now", now); - await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - using var sessionCmd = conn.CreateCommand(); - sessionCmd.Transaction = tx; - sessionCmd.CommandText = - @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) - VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 1)"; - sessionCmd.Parameters.AddWithValue("@id", jti); - sessionCmd.Parameters.AddWithValue("@user_id", "user-revoked2"); - sessionCmd.Parameters.AddWithValue("@created", now); - sessionCmd.Parameters.AddWithValue("@expires", exp); - sessionCmd.Parameters.AddWithValue("@activity", now); - await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - tx.Commit(); - - // With checkRevocation: false, should still validate - var result = await TokenService.ValidateTokenAsync( - conn, - token, - TestSigningKey, - checkRevocation: false - ); - - Assert.IsType(result); + var (conn, dbPath) = CreateTestDb(); + try + { + var token = TokenService.CreateToken( + "user-revoked2", + "Revoked User 2", + "revoked2@example.com", + [], + TestSigningKey, + TimeSpan.FromHours(1) + ); + + // Extract JTI and revoke + var parts = token.Split('.'); + var payloadJson = Base64UrlDecode(parts[1]); + var payload = JsonDocument.Parse(payloadJson); + var jti = payload.RootElement.GetProperty("jti").GetString()!; + + var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); + var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); + + // Insert user and revoked session using raw SQL (consistent with other tests) + using var tx = conn.BeginTransaction(); + + using var userCmd = conn.CreateCommand(); + userCmd.Transaction = tx; + userCmd.CommandText = + @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) + VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; + userCmd.Parameters.AddWithValue("@id", "user-revoked2"); + userCmd.Parameters.AddWithValue("@name", "Revoked User 2"); + userCmd.Parameters.AddWithValue("@email", DBNull.Value); + userCmd.Parameters.AddWithValue("@now", now); + await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using var sessionCmd = conn.CreateCommand(); + sessionCmd.Transaction = tx; + sessionCmd.CommandText = + @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) + VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 1)"; + sessionCmd.Parameters.AddWithValue("@id", jti); + sessionCmd.Parameters.AddWithValue("@user_id", "user-revoked2"); + sessionCmd.Parameters.AddWithValue("@created", now); + sessionCmd.Parameters.AddWithValue("@expires", exp); + sessionCmd.Parameters.AddWithValue("@activity", now); + await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + tx.Commit(); + + // With checkRevocation: false, should still validate + var result = await TokenService.ValidateTokenAsync( + conn, + token, + TestSigningKey, + checkRevocation: false + ); + + Assert.IsType(result); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] public async Task RevokeTokenAsync_SetsIsRevokedFlag() { - using var conn = CreateInMemoryDb(); - - var jti = Guid.NewGuid().ToString(); - var userId = "user-test"; - var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); - var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); - - // Insert user and session using raw SQL (TEXT PK doesn't return rowid) - using var tx = conn.BeginTransaction(); - - using var userCmd = conn.CreateCommand(); - userCmd.Transaction = tx; - userCmd.CommandText = - @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) - VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; - userCmd.Parameters.AddWithValue("@id", userId); - userCmd.Parameters.AddWithValue("@name", "Test User"); - userCmd.Parameters.AddWithValue("@email", DBNull.Value); - userCmd.Parameters.AddWithValue("@now", now); - await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - using var sessionCmd = conn.CreateCommand(); - sessionCmd.Transaction = tx; - sessionCmd.CommandText = - @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) - VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 0)"; - sessionCmd.Parameters.AddWithValue("@id", jti); - sessionCmd.Parameters.AddWithValue("@user_id", userId); - sessionCmd.Parameters.AddWithValue("@created", now); - sessionCmd.Parameters.AddWithValue("@expires", exp); - sessionCmd.Parameters.AddWithValue("@activity", now); - await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); - - tx.Commit(); - - // Revoke - await TokenService.RevokeTokenAsync(conn, jti); - - // Verify using DataProvider generated method - var revokedResult = await conn.GetSessionRevokedAsync(jti); - var isRevoked = revokedResult switch + var (conn, dbPath) = CreateTestDb(); + try { - GetSessionRevokedOk ok => ok.Value.FirstOrDefault()?.is_revoked ?? -1L, - GetSessionRevokedError err => throw new InvalidOperationException( - $"GetSessionRevoked failed: {err.Value.Message}, {err.Value.InnerException?.Message}" - ), - }; + var jti = Guid.NewGuid().ToString(); + var userId = "user-test"; + var now = DateTime.UtcNow.ToString("o", CultureInfo.InvariantCulture); + var exp = DateTime.UtcNow.AddHours(1).ToString("o", CultureInfo.InvariantCulture); + + // Insert user and session using raw SQL (TEXT PK doesn't return rowid) + using var tx = conn.BeginTransaction(); + + using var userCmd = conn.CreateCommand(); + userCmd.Transaction = tx; + userCmd.CommandText = + @"INSERT INTO gk_user (id, display_name, email, created_at, last_login_at, is_active, metadata) + VALUES (@id, @name, @email, @now, NULL, 1, NULL)"; + userCmd.Parameters.AddWithValue("@id", userId); + userCmd.Parameters.AddWithValue("@name", "Test User"); + userCmd.Parameters.AddWithValue("@email", DBNull.Value); + userCmd.Parameters.AddWithValue("@now", now); + await userCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using var sessionCmd = conn.CreateCommand(); + sessionCmd.Transaction = tx; + sessionCmd.CommandText = + @"INSERT INTO gk_session (id, user_id, credential_id, created_at, expires_at, last_activity_at, ip_address, user_agent, is_revoked) + VALUES (@id, @user_id, NULL, @created, @expires, @activity, NULL, NULL, 0)"; + sessionCmd.Parameters.AddWithValue("@id", jti); + sessionCmd.Parameters.AddWithValue("@user_id", userId); + sessionCmd.Parameters.AddWithValue("@created", now); + sessionCmd.Parameters.AddWithValue("@expires", exp); + sessionCmd.Parameters.AddWithValue("@activity", now); + await sessionCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + tx.Commit(); + + // Revoke + await TokenService.RevokeTokenAsync(conn, jti); + + // Verify using DataProvider generated method + var revokedResult = await conn.GetSessionRevokedAsync(jti); + var isRevoked = revokedResult switch + { + GetSessionRevokedOk ok => ok.Value.FirstOrDefault()?.is_revoked ?? -1L, + GetSessionRevokedError err => throw new InvalidOperationException( + $"GetSessionRevoked failed: {err.Value.Message}, {err.Value.InnerException?.Message}" + ), + }; - Assert.Equal(1L, isRevoked); + Assert.Equal(1L, isRevoked); + } + finally + { + CleanupTestDb(conn, dbPath); + } } [Fact] @@ -460,9 +508,10 @@ public void ExtractBearerToken_BearerWithoutSpace_ReturnsNull() Assert.Null(token); } - private static SqliteConnection CreateInMemoryDb() + private static (SqliteConnection Connection, string DbPath) CreateTestDb() { - var conn = new SqliteConnection("Data Source=:memory:"); + var dbPath = Path.Combine(Path.GetTempPath(), $"tokenservice_{Guid.NewGuid():N}.db"); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); // Use the YAML schema to create only the needed tables @@ -491,7 +540,18 @@ var statement in ddl.Split( } } - return conn; + return (conn, dbPath); + } + + private static void CleanupTestDb(SqliteConnection connection, string dbPath) + { + connection.Close(); + connection.Dispose(); + if (File.Exists(dbPath)) + { + try { File.Delete(dbPath); } + catch { /* File may be locked */ } + } } private static string Base64UrlDecode(string input) diff --git a/Gatekeeper/Gatekeeper.Api/DataProvider.json b/Gatekeeper/Gatekeeper.Api/DataProvider.json index 1eade90..5aa5ad0 100644 --- a/Gatekeeper/Gatekeeper.Api/DataProvider.json +++ b/Gatekeeper/Gatekeeper.Api/DataProvider.json @@ -30,5 +30,5 @@ { "schema": "main", "name": "gk_role", "generateInsert": true, "excludeColumns": ["id"], "primaryKeyColumns": ["id"] }, { "schema": "main", "name": "gk_role_permission", "generateInsert": true, "excludeColumns": [], "primaryKeyColumns": ["role_id", "permission_id"] } ], - "connectionString": "Data Source=gatekeeper-build.db" + "connectionString": "Data Source=gatekeeper.db" } diff --git a/Gatekeeper/Gatekeeper.Api/FileLoggerProvider.cs b/Gatekeeper/Gatekeeper.Api/FileLoggerProvider.cs index 7846a68..8514a99 100644 --- a/Gatekeeper/Gatekeeper.Api/FileLoggerProvider.cs +++ b/Gatekeeper/Gatekeeper.Api/FileLoggerProvider.cs @@ -95,8 +95,7 @@ public void Log( } var message = formatter(state, exception); - var line = - $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; + var line = $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; if (exception != null) { line += Environment.NewLine + exception; diff --git a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj index d5b0435..732a5f7 100644 --- a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj +++ b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj @@ -35,7 +35,7 @@ - + diff --git a/Gatekeeper/Gatekeeper.Api/GlobalUsings.cs b/Gatekeeper/Gatekeeper.Api/GlobalUsings.cs index 43efc50..21a8113 100644 --- a/Gatekeeper/Gatekeeper.Api/GlobalUsings.cs +++ b/Gatekeeper/Gatekeeper.Api/GlobalUsings.cs @@ -28,14 +28,14 @@ System.Collections.Immutable.ImmutableList, Selecta.SqlError >.Ok, Selecta.SqlError>; -global using GetSessionRevokedOk = Outcome.Result< - System.Collections.Immutable.ImmutableList, - Selecta.SqlError ->.Ok, Selecta.SqlError>; global using GetSessionRevokedError = Outcome.Result< System.Collections.Immutable.ImmutableList, Selecta.SqlError >.Error, Selecta.SqlError>; +global using GetSessionRevokedOk = Outcome.Result< + System.Collections.Immutable.ImmutableList, + Selecta.SqlError +>.Ok, Selecta.SqlError>; // Query result type aliases global using GetUserByEmailOk = Outcome.Result< System.Collections.Immutable.ImmutableList, diff --git a/Gatekeeper/Gatekeeper.Api/TokenService.cs b/Gatekeeper/Gatekeeper.Api/TokenService.cs index 1ee8a3a..73d47ec 100644 --- a/Gatekeeper/Gatekeeper.Api/TokenService.cs +++ b/Gatekeeper/Gatekeeper.Api/TokenService.cs @@ -2,6 +2,7 @@ using System.Text; namespace Gatekeeper.Api; + /// /// JWT token generation and validation service. /// diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/DataProvider.json b/Lql/Lql.TypeProvider.FSharp.Tests.Data/DataProvider.json new file mode 100644 index 0000000..43eb1e3 --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/DataProvider.json @@ -0,0 +1,38 @@ +{ + "queries": [], + "tables": [ + { + "schema": "main", + "name": "Customer", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Users", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Orders", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Products", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + } + ], + "connectionString": "Data Source=../Lql.TypeProvider.FSharp.Tests/typeprovider-test.db" +} diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj new file mode 100644 index 0000000..8478b1a --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -0,0 +1,53 @@ + + + net9.0 + enable + enable + false + false + + + + + + + + + + + + + + + + + + + + + + + + PreserveNewest + typeprovider-test-schema.yaml + + + + + + + + + + + + + + + + + + + + + diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/TestDataSeeder.cs b/Lql/Lql.TypeProvider.FSharp.Tests.Data/TestDataSeeder.cs new file mode 100644 index 0000000..333b765 --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/TestDataSeeder.cs @@ -0,0 +1,210 @@ +using System.Data; +using Generated; +using Microsoft.Data.Sqlite; +using Outcome; +using Selecta; + +namespace Lql.TypeProvider.FSharp.Tests.Data; + +/// +/// Seeds the test database with sample data using generated DataProvider extensions +/// +public static class TestDataSeeder +{ + /// + /// Clears all test data from the database using generated delete extension + /// + /// The database transaction + /// Result indicating success or failure + public static async Task> ClearDataAsync(IDbTransaction transaction) + { + if (transaction.Connection is null) + return new Result.Error( + new SqlError("Transaction has no connection") + ); + + // Delete in order respecting foreign keys (Orders references Users) + using ( + var cmd = new SqliteCommand( + "DELETE FROM Orders", + (SqliteConnection)transaction.Connection, + (SqliteTransaction)transaction + ) + ) + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using ( + var cmd = new SqliteCommand( + "DELETE FROM Users", + (SqliteConnection)transaction.Connection, + (SqliteTransaction)transaction + ) + ) + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using ( + var cmd = new SqliteCommand( + "DELETE FROM Products", + (SqliteConnection)transaction.Connection, + (SqliteTransaction)transaction + ) + ) + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + using ( + var cmd = new SqliteCommand( + "DELETE FROM Customer", + (SqliteConnection)transaction.Connection, + (SqliteTransaction)transaction + ) + ) + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + return new Result.Ok(0); + } + + /// + /// Seeds the database with test data using generated insert methods + /// + /// The database transaction + /// Result indicating success or failure + public static async Task> SeedDataAsync(IDbTransaction transaction) + { + // Clear existing data first + var clearResult = await ClearDataAsync(transaction).ConfigureAwait(false); + if (clearResult is Result.Error clearErr) + return new Result.Error(clearErr.Value); + + // Insert Customers using generated extensions + var c1 = await transaction + .InsertCustomerAsync("c1", "Acme Corp", "acme@example.com", 10, "active") + .ConfigureAwait(false); + if (c1 is Result.Error c1Err) + return new Result.Error(c1Err.Value); + + var c2 = await transaction + .InsertCustomerAsync("c2", "Tech Corp", "tech@example.com", 5, "active") + .ConfigureAwait(false); + if (c2 is Result.Error c2Err) + return new Result.Error(c2Err.Value); + + var c3 = await transaction + .InsertCustomerAsync("c3", "New Corp", "new@example.com", 1, "pending") + .ConfigureAwait(false); + if (c3 is Result.Error c3Err) + return new Result.Error(c3Err.Value); + + // Insert Users using generated extensions + var u1 = await transaction + .InsertUsersAsync( + "u1", + "Alice", + "alice@example.com", + 30, + "active", + "admin", + "2024-01-01" + ) + .ConfigureAwait(false); + if (u1 is Result.Error u1Err) + return new Result.Error(u1Err.Value); + + var u2 = await transaction + .InsertUsersAsync("u2", "Bob", "bob@example.com", 16, "active", "user", "2024-01-02") + .ConfigureAwait(false); + if (u2 is Result.Error u2Err) + return new Result.Error(u2Err.Value); + + var u3 = await transaction + .InsertUsersAsync( + "u3", + "Charlie", + "charlie@example.com", + 25, + "inactive", + "user", + "2024-01-03" + ) + .ConfigureAwait(false); + if (u3 is Result.Error u3Err) + return new Result.Error(u3Err.Value); + + var u4 = await transaction + .InsertUsersAsync( + "u4", + "Diana", + "diana@example.com", + 15, + "active", + "admin", + "2024-01-04" + ) + .ConfigureAwait(false); + if (u4 is Result.Error u4Err) + return new Result.Error(u4Err.Value); + + // Insert Products using generated extensions + var p1 = await transaction + .InsertProductsAsync("p1", "Widget", 10.00, 100) + .ConfigureAwait(false); + if (p1 is Result.Error p1Err) + return new Result.Error(p1Err.Value); + + var p2 = await transaction + .InsertProductsAsync("p2", "Gadget", 25.50, 50) + .ConfigureAwait(false); + if (p2 is Result.Error p2Err) + return new Result.Error(p2Err.Value); + + var p3 = await transaction + .InsertProductsAsync("p3", "Gizmo", 5.00, 200) + .ConfigureAwait(false); + if (p3 is Result.Error p3Err) + return new Result.Error(p3Err.Value); + + // Insert Orders using generated extensions (user 1 has 6 orders, user 2 has 1) + var o1 = await transaction + .InsertOrdersAsync("o1", "u1", "p1", 100.00, 90.00, 10.00, 0.00, "completed") + .ConfigureAwait(false); + if (o1 is Result.Error o1Err) + return new Result.Error(o1Err.Value); + + var o2 = await transaction + .InsertOrdersAsync("o2", "u1", "p2", 50.00, 45.00, 5.00, 0.00, "completed") + .ConfigureAwait(false); + if (o2 is Result.Error o2Err) + return new Result.Error(o2Err.Value); + + var o3 = await transaction + .InsertOrdersAsync("o3", "u1", "p1", 75.00, 68.00, 7.00, 0.00, "pending") + .ConfigureAwait(false); + if (o3 is Result.Error o3Err) + return new Result.Error(o3Err.Value); + + var o4 = await transaction + .InsertOrdersAsync("o4", "u1", "p3", 25.00, 22.50, 2.50, 0.00, "completed") + .ConfigureAwait(false); + if (o4 is Result.Error o4Err) + return new Result.Error(o4Err.Value); + + var o5 = await transaction + .InsertOrdersAsync("o5", "u1", "p2", 125.00, 112.50, 12.50, 0.00, "completed") + .ConfigureAwait(false); + if (o5 is Result.Error o5Err) + return new Result.Error(o5Err.Value); + + var o6 = await transaction + .InsertOrdersAsync("o6", "u1", "p1", 200.00, 180.00, 20.00, 0.00, "pending") + .ConfigureAwait(false); + if (o6 is Result.Error o6Err) + return new Result.Error(o6Err.Value); + + var o7 = await transaction + .InsertOrdersAsync("o7", "u2", "p3", 30.00, 27.00, 3.00, 0.00, "completed") + .ConfigureAwait(false); + if (o7 is Result.Error o7Err) + return new Result.Error(o7Err.Value); + + return new Result.Ok("Test data seeded successfully"); + } +} diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/DataProvider.json b/Lql/Lql.TypeProvider.FSharp.Tests/DataProvider.json new file mode 100644 index 0000000..e5c32d2 --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests/DataProvider.json @@ -0,0 +1,38 @@ +{ + "queries": [], + "tables": [ + { + "schema": "main", + "name": "Customer", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Users", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Products", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + }, + { + "schema": "main", + "name": "Orders", + "generateInsert": true, + "generateUpdate": false, + "generateDelete": true, + "primaryKeyColumns": ["Id"] + } + ], + "connectionString": "Data Source=typeprovider-test.db" +} diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj new file mode 100644 index 0000000..2dfebbe --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj @@ -0,0 +1,52 @@ + + + + net9.0 + preview + false + true + false + false + 3 + $(NoWarn);1591 + + + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + PreserveNewest + + + + + + + + + diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/TypeProviderE2ETests.fs b/Lql/Lql.TypeProvider.FSharp.Tests/TypeProviderE2ETests.fs new file mode 100644 index 0000000..e0d7a0f --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests/TypeProviderE2ETests.fs @@ -0,0 +1,351 @@ +module Lql.TypeProvider.Tests + +open System +open System.IO +open Microsoft.Data.Sqlite +open Xunit +open Lql +open Lql.TypeProvider +open Lql.TypeProvider.FSharp.Tests.Data + +// ============================================================================= +// COMPILE-TIME VALIDATED LQL QUERIES +// These are validated at COMPILE TIME by the F# type provider +// Invalid LQL will cause a COMPILATION ERROR, not a runtime error +// ============================================================================= + +// Basic select queries +type SelectAll = LqlCommand<"Customer |> select(*)"> +type SelectColumns = LqlCommand<"Users |> select(Users.Id, Users.Name, Users.Email)"> +type SelectWithAlias = LqlCommand<"Users |> select(Users.Id, Users.Name as username)"> + +// Filter queries +type FilterSimple = LqlCommand<"Users |> filter(fn(row) => row.Users.Age > 18) |> select(Users.Name)"> +type FilterComplex = LqlCommand<"Users |> filter(fn(row) => row.Users.Age > 18 and row.Users.Status = 'active') |> select(*)"> +type FilterOr = LqlCommand<"Users |> filter(fn(row) => row.Users.Age < 18 or row.Users.Role = 'admin') |> select(*)"> + +// Join queries +type JoinSimple = LqlCommand<"Users |> join(Orders, on = Users.Id = Orders.UserId) |> select(Users.Name, Orders.Total)"> +type JoinLeft = LqlCommand<"Users |> left_join(Orders, on = Users.Id = Orders.UserId) |> select(Users.Name, Orders.Total)"> +type JoinMultiple = LqlCommand<"Users |> join(Orders, on = Users.Id = Orders.UserId) |> join(Products, on = Orders.ProductId = Products.Id) |> select(Users.Name, Products.Name)"> + +// Aggregation queries +type GroupBy = LqlCommand<"Orders |> group_by(Orders.UserId) |> select(Orders.UserId, count(*) as order_count)"> +type Aggregates = LqlCommand<"Orders |> group_by(Orders.Status) |> select(Orders.Status, sum(Orders.Total) as total_sum, avg(Orders.Total) as avg_total)"> +type Having = LqlCommand<"Orders |> group_by(Orders.UserId) |> having(fn(g) => count(*) > 5) |> select(Orders.UserId, count(*) as cnt)"> + +// Order and limit +type OrderBy = LqlCommand<"Users |> order_by(Users.Name asc) |> select(*)"> +type OrderByDesc = LqlCommand<"Users |> order_by(Users.CreatedAt desc) |> select(*)"> +type Limit = LqlCommand<"Users |> order_by(Users.Id) |> limit(10) |> select(*)"> +type Offset = LqlCommand<"Users |> order_by(Users.Id) |> limit(10) |> offset(20) |> select(*)"> + +// Arithmetic expressions +type ArithmeticBasic = LqlCommand<"Products |> select(Products.Price * Products.Quantity as total)"> +type ArithmeticComplex = LqlCommand<"Orders |> select(Orders.Subtotal + Orders.Tax - Orders.Discount as final_total)"> + +// ============================================================================= +// E2E TEST FIXTURES - Test the type provider with REAL SQLite database file +// Schema is created by Migration.CLI from YAML - NO raw SQL for schema! +// ============================================================================= + +module TestFixtures = + /// Get the path to the test database file (created by Migration.CLI from YAML) + let getTestDbPath() = + let baseDir = AppDomain.CurrentDomain.BaseDirectory + // The database is created in the project directory by MSBuild target + // bin/Debug/net9.0 -> go up 3 levels to project dir + let projectDir = Path.GetFullPath(Path.Combine(baseDir, "..", "..", "..")) + Path.Combine(projectDir, "typeprovider-test.db") + + /// Open connection to the REAL SQLite database file + let openTestDatabase() = + let dbPath = getTestDbPath() + if not (File.Exists(dbPath)) then + failwithf "Test database not found at %s. Run 'dotnet build' first to create it via Migration.CLI." dbPath + let conn = new SqliteConnection($"Data Source={dbPath}") + conn.Open() + conn + + /// Create test database connection with fresh test data using C# seeder with generated extensions + let createTestDatabase() = + let conn = openTestDatabase() + use transaction = conn.BeginTransaction() + let result = TestDataSeeder.SeedDataAsync(transaction).GetAwaiter().GetResult() + match result with + | :? Outcome.Result.Ok -> + transaction.Commit() + | :? Outcome.Result.Error as err -> + transaction.Rollback() + failwithf "Failed to seed test data: %s" (err.Value.ToString()) + | _ -> + transaction.Rollback() + failwith "Unknown result type from SeedDataAsync" + conn + + let executeQuery (conn: SqliteConnection) (sql: string) = + use cmd = new SqliteCommand(sql, conn) + use reader = cmd.ExecuteReader() + let results = ResizeArray>() + while reader.Read() do + let row = + [| for i in 0 .. reader.FieldCount - 1 -> + let name = reader.GetName(i) + let value = if reader.IsDBNull(i) then box DBNull.Value else reader.GetValue(i) + (name, value) |] + |> Map.ofArray + results.Add(row) + results |> List.ofSeq + +// ============================================================================= +// E2E TESTS - Comprehensive tests for the F# Type Provider +// ============================================================================= + +[] +type TypeProviderCompileTimeValidationTests() = + + [] + member _.``Type provider generates Query property for simple select``() = + Assert.Equal("Customer |> select(*)", SelectAll.Query) + + [] + member _.``Type provider generates Sql property for simple select``() = + Assert.NotNull(SelectAll.Sql) + Assert.Contains("SELECT", SelectAll.Sql.ToUpperInvariant()) + + [] + member _.``Type provider generates correct SQL for column selection``() = + let sql = SelectColumns.Sql.ToUpperInvariant() + Assert.Contains("SELECT", sql) + Assert.Contains("USERS", sql) + + [] + member _.``Type provider generates SQL with alias``() = + let sql = SelectWithAlias.Sql + Assert.Contains("AS", sql.ToUpperInvariant()) + +[] +type TypeProviderFilterTests() = + + [] + member _.``Filter query generates WHERE clause``() = + let sql = FilterSimple.Sql.ToUpperInvariant() + Assert.Contains("WHERE", sql) + + [] + member _.``Complex filter with AND generates correct SQL``() = + let sql = FilterComplex.Sql.ToUpperInvariant() + Assert.Contains("WHERE", sql) + Assert.Contains("AND", sql) + + [] + member _.``Filter with OR generates correct SQL``() = + let sql = FilterOr.Sql.ToUpperInvariant() + Assert.Contains("WHERE", sql) + Assert.Contains("OR", sql) + +[] +type TypeProviderJoinTests() = + + [] + member _.``Simple join generates JOIN clause``() = + let sql = JoinSimple.Sql.ToUpperInvariant() + Assert.Contains("JOIN", sql) + Assert.Contains("ON", sql) + + [] + member _.``Left join generates LEFT JOIN clause``() = + let sql = JoinLeft.Sql.ToUpperInvariant() + Assert.Contains("LEFT", sql) + Assert.Contains("JOIN", sql) + + [] + member _.``Multiple joins are chained correctly``() = + let sql = JoinMultiple.Sql.ToUpperInvariant() + // Should have at least 2 JOINs + let joinCount = sql.Split([|"JOIN"|], StringSplitOptions.None).Length - 1 + Assert.True(joinCount >= 2, sprintf "Expected at least 2 JOINs but got %d" joinCount) + +[] +type TypeProviderAggregationTests() = + + [] + member _.``Group by generates GROUP BY clause``() = + let sql = GroupBy.Sql.ToUpperInvariant() + Assert.Contains("GROUP BY", sql) + Assert.Contains("COUNT", sql) + + [] + member _.``Multiple aggregates work correctly``() = + let sql = Aggregates.Sql.ToUpperInvariant() + Assert.Contains("SUM", sql) + Assert.Contains("AVG", sql) + + [] + member _.``Having clause generates HAVING``() = + let sql = Having.Sql.ToUpperInvariant() + Assert.Contains("HAVING", sql) + +[] +type TypeProviderOrderingTests() = + + [] + member _.``Order by generates ORDER BY clause``() = + let sql = OrderBy.Sql.ToUpperInvariant() + Assert.Contains("ORDER BY", sql) + + [] + member _.``Order by desc includes DESC``() = + let sql = OrderByDesc.Sql.ToUpperInvariant() + Assert.Contains("DESC", sql) + + [] + member _.``Limit generates LIMIT clause``() = + let sql = Limit.Sql.ToUpperInvariant() + Assert.Contains("LIMIT", sql) + + [] + member _.``Offset generates OFFSET clause``() = + let sql = Offset.Sql.ToUpperInvariant() + Assert.Contains("OFFSET", sql) + +[] +type TypeProviderArithmeticTests() = + + [] + member _.``Basic arithmetic in select``() = + let sql = ArithmeticBasic.Sql + Assert.Contains("*", sql) // multiplication + + [] + member _.``Complex arithmetic with multiple operators``() = + let sql = ArithmeticComplex.Sql + Assert.Contains("+", sql) + Assert.Contains("-", sql) + +[] +type TypeProviderE2EExecutionTests() = + + [] + member _.``Execute simple select against real SQLite database``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn SelectAll.Sql + Assert.Equal(3, results.Length) + + [] + member _.``Execute filter query and verify results``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn FilterSimple.Sql + // Should return users with age > 18 (Alice=30, Charlie=25) + Assert.Equal(2, results.Length) + + [] + member _.``Execute join query and verify results``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn JoinSimple.Sql + // Should return joined user-order records + Assert.True(results.Length > 0) + + [] + member _.``Execute group by query and verify aggregation``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn GroupBy.Sql + // Should have aggregated results + Assert.True(results.Length > 0) + for row in results do + Assert.True(row.ContainsKey("order_count") || row.ContainsKey("COUNT(*)")) + + [] + member _.``Execute having query and verify filtering on aggregates``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn Having.Sql + // User 1 has 6 orders, which is > 5 + Assert.True(results.Length > 0) + + [] + member _.``Execute order by with limit``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn Limit.Sql + Assert.True(results.Length <= 10) + + [] + member _.``Execute arithmetic expression query``() = + use conn = TestFixtures.createTestDatabase() + let results = TestFixtures.executeQuery conn ArithmeticBasic.Sql + Assert.True(results.Length > 0) + // Verify the computed column exists + for row in results do + Assert.True(row.ContainsKey("total")) + +[] +type TypeProviderRealWorldScenarioTests() = + + [] + member _.``E2E: Query customers and execute against database``() = + use conn = TestFixtures.createTestDatabase() + + // Use the type provider validated query + let sql = SelectAll.Sql + let results = TestFixtures.executeQuery conn sql + + // Verify we got all customers + Assert.Equal(3, results.Length) + + // Verify customer data + let names = results |> List.map (fun r -> r.["Name"] :?> string) |> Set.ofList + Assert.Contains("Acme Corp", names) + Assert.Contains("Tech Corp", names) + + [] + member _.``E2E: Filter active adult users``() = + use conn = TestFixtures.createTestDatabase() + + // The type provider validates this at compile time + let sql = FilterComplex.Sql + let results = TestFixtures.executeQuery conn sql + + // Should only get Alice (age 30, active) + // Charlie is inactive, Bob and Diana are under 18 + Assert.Equal(1, results.Length) + + [] + member _.``E2E: Join users with orders and calculate totals``() = + use conn = TestFixtures.createTestDatabase() + + let sql = JoinSimple.Sql + let results = TestFixtures.executeQuery conn sql + + // Alice has 6 orders, Bob has 1 + Assert.Equal(7, results.Length) + + [] + member _.``E2E: Aggregate order totals by user``() = + use conn = TestFixtures.createTestDatabase() + + let sql = GroupBy.Sql + let results = TestFixtures.executeQuery conn sql + + // Should have 2 users with orders (user 1 and user 2) + Assert.Equal(2, results.Length) + +[] +type TypeProviderQueryPropertyTests() = + + [] + member _.``Query property returns original LQL for all query types``() = + // Verify each type provider generated type has correct Query property + Assert.Equal("Customer |> select(*)", SelectAll.Query) + Assert.Equal("Users |> select(Users.Id, Users.Name, Users.Email)", SelectColumns.Query) + Assert.Equal("Users |> filter(fn(row) => row.Users.Age > 18) |> select(Users.Name)", FilterSimple.Query) + Assert.Equal("Users |> join(Orders, on = Users.Id = Orders.UserId) |> select(Users.Name, Orders.Total)", JoinSimple.Query) + Assert.Equal("Orders |> group_by(Orders.UserId) |> select(Orders.UserId, count(*) as order_count)", GroupBy.Query) + + [] + member _.``Sql property is never null or empty``() = + Assert.False(String.IsNullOrWhiteSpace(SelectAll.Sql)) + Assert.False(String.IsNullOrWhiteSpace(SelectColumns.Sql)) + Assert.False(String.IsNullOrWhiteSpace(FilterSimple.Sql)) + Assert.False(String.IsNullOrWhiteSpace(JoinSimple.Sql)) + Assert.False(String.IsNullOrWhiteSpace(GroupBy.Sql)) + Assert.False(String.IsNullOrWhiteSpace(OrderBy.Sql)) + Assert.False(String.IsNullOrWhiteSpace(Limit.Sql)) diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/typeprovider-test-schema.yaml b/Lql/Lql.TypeProvider.FSharp.Tests/typeprovider-test-schema.yaml new file mode 100644 index 0000000..6b3e900 --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp.Tests/typeprovider-test-schema.yaml @@ -0,0 +1,84 @@ +name: typeprovider_test +tables: +- name: Customer + columns: + - name: Id + type: Text + - name: Name + type: Text + - name: Email + type: Text + - name: Age + type: Integer + - name: Status + type: Text + primaryKey: + name: PK_Customer + columns: + - Id + +- name: Users + columns: + - name: Id + type: Text + - name: Name + type: Text + - name: Email + type: Text + - name: Age + type: Integer + - name: Status + type: Text + - name: Role + type: Text + - name: CreatedAt + type: Text + primaryKey: + name: PK_Users + columns: + - Id + +- name: Orders + columns: + - name: Id + type: Text + - name: UserId + type: Text + - name: ProductId + type: Text + - name: Total + type: Double + - name: Subtotal + type: Double + - name: Tax + type: Double + - name: Discount + type: Double + - name: Status + type: Text + foreignKeys: + - name: FK_Orders_UserId + columns: + - UserId + referencedTable: Users + referencedColumns: + - Id + primaryKey: + name: PK_Orders + columns: + - Id + +- name: Products + columns: + - name: Id + type: Text + - name: Name + type: Text + - name: Price + type: Double + - name: Quantity + type: Integer + primaryKey: + name: PK_Products + columns: + - Id diff --git a/Lql/Lql.TypeProvider.FSharp/Lql.TypeProvider.FSharp.fsproj b/Lql/Lql.TypeProvider.FSharp/Lql.TypeProvider.FSharp.fsproj new file mode 100644 index 0000000..e9ff345 --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp/Lql.TypeProvider.FSharp.fsproj @@ -0,0 +1,32 @@ + + + + net9.0 + true + preview + false + 3 + false + false + true + + + + + + + + + + Always + true + + + + + + + + + + \ No newline at end of file diff --git a/Lql/Lql.TypeProvider.FSharp/LqlTypeProvider.fs b/Lql/Lql.TypeProvider.FSharp/LqlTypeProvider.fs new file mode 100644 index 0000000..a1a63ae --- /dev/null +++ b/Lql/Lql.TypeProvider.FSharp/LqlTypeProvider.fs @@ -0,0 +1,78 @@ +namespace Lql.TypeProvider + +open System +open System.Reflection +open Microsoft.FSharp.Core.CompilerServices +open Microsoft.FSharp.Quotations +open ProviderImplementation.ProvidedTypes +open Lql +open Lql.SQLite +open Outcome +open Selecta + +[] +type public LqlTypeProvider(config: TypeProviderConfig) as this = + inherit TypeProviderForNamespaces(config) + + let namespaceName = "Lql" + let thisAssembly = Assembly.GetExecutingAssembly() + + let createValidatedType(typeName: string, lqlQuery: string, sql: string) = + let t = ProvidedTypeDefinition(thisAssembly, namespaceName, typeName, Some typeof, isErased = true) + + // Add static Query property + let queryProp = ProvidedProperty("Query", typeof, isStatic = true, getterCode = fun _ -> <@@ lqlQuery @@>) + queryProp.AddXmlDoc(sprintf "The validated LQL query: %s" lqlQuery) + t.AddMember(queryProp) + + // Add static Sql property + let sqlProp = ProvidedProperty("Sql", typeof, isStatic = true, getterCode = fun _ -> <@@ sql @@>) + sqlProp.AddXmlDoc(sprintf "The generated SQL: %s" sql) + t.AddMember(sqlProp) + + t.AddXmlDoc(sprintf "āœ… Compile-time validated LQL: '%s' → SQL: '%s'" lqlQuery sql) + t + + let rootType = ProvidedTypeDefinition(thisAssembly, namespaceName, "LqlCommand", Some typeof, isErased = true) + + do + rootType.DefineStaticParameters( + [ProvidedStaticParameter("Query", typeof)], + fun typeName args -> + let lqlQuery = args.[0] :?> string + + // *** COMPILE-TIME VALIDATION *** + if String.IsNullOrWhiteSpace lqlQuery then + invalidArg "Query" "LQL query cannot be null or empty!" + + try + let result = LqlStatementConverter.ToStatement lqlQuery + match result with + | :? Outcome.Result.Ok as success -> + // Valid LQL - convert to SQL + let sqlResult = success.Value.ToSQLite() + match sqlResult with + | :? Outcome.Result.Ok as sqlSuccess -> + let sql = sqlSuccess.Value + createValidatedType(typeName, lqlQuery, sql) + | :? Outcome.Result.Error as sqlFailure -> + failwith (sprintf "āŒ COMPILATION FAILED: SQL generation error - %s for LQL: '%s'" sqlFailure.Value.Message lqlQuery) + | _ -> + failwith (sprintf "āŒ COMPILATION FAILED: Unknown SQL generation error for LQL: '%s'" lqlQuery) + | :? Outcome.Result.Error as failure -> + let error = failure.Value + let position = + match error.Position with + | null -> "" + | pos -> sprintf " at line %d, column %d" pos.Line pos.Column + failwith (sprintf "āŒ COMPILATION FAILED: Invalid LQL syntax - %s%s in query: '%s'" error.Message position lqlQuery) + | _ -> + failwith (sprintf "āŒ COMPILATION FAILED: Unknown LQL parsing error in query: '%s'" lqlQuery) + with ex -> + failwith (sprintf "āŒ COMPILATION FAILED: Exception during validation: %s for LQL: '%s'" ex.Message lqlQuery) + ) + + this.AddNamespace(namespaceName, [rootType]) + +[] +do () \ No newline at end of file diff --git a/Lql/LqlExtension/examples/sample.lql b/Lql/LqlExtension/examples/sample.lql index 74dfdc3..3c6615a 100644 --- a/Lql/LqlExtension/examples/sample.lql +++ b/Lql/LqlExtension/examples/sample.lql @@ -1,156 +1,137 @@ -- Lambda Query Language Example -- This file demonstrates LQL syntax and features --- Simple select with filter -users -|> filter (age > 18 and status = 'active') -|> select name, email, age, created_at -|> order_by created_at desc -|> limit 10 - --- Join operation with aggregation -let active_users = users |> filter (status = 'active') in -orders -|> join active_users on orders.user_id = active_users.id -|> group_by active_users.name, active_users.email -|> select - active_users.name, - active_users.email, - count(*) as total_orders, - sum(orders.amount) as total_spent, - avg(orders.amount) as avg_order_value -|> having total_orders > 5 -|> order_by total_spent desc - --- Complex query with arithmetic and functions -products -|> select - name, - category, - price, - round(price * 0.1, 2) as tax, - round(price + (price * 0.1), 2) as total_price, - upper(category) as category_upper, - length(name) as name_length -|> filter (total_price > 100 and category in ('electronics', 'books')) -|> order_by total_price desc, name asc +-- Simple select all columns +Customer +|> select(*) + +-- Select specific columns +Users +|> select(Users.Id, Users.Name, Users.Email) + +-- Select with column alias +Users +|> select(Users.Id, Users.Name as username) + +-- Filter using lambda syntax with single condition +Users +|> filter(fn(row) => row.Users.Age > 18) +|> select(Users.Name) + +-- Filter with AND condition +Users +|> filter(fn(row) => row.Users.Age > 18 and row.Users.Status = 'active') +|> select(*) + +-- Filter with OR condition +Users +|> filter(fn(row) => row.Users.Age < 18 or row.Users.Role = 'admin') +|> select(*) + +-- Inner join +Users +|> join(Orders, on = Users.Id = Orders.UserId) +|> select(Users.Name, Orders.Total) + +-- Left join +Users +|> left_join(Orders, on = Users.Id = Orders.UserId) +|> select(Users.Name, Orders.Total) + +-- Multiple joins +Users +|> join(Orders, on = Users.Id = Orders.UserId) +|> join(Products, on = Orders.ProductId = Products.Id) +|> select(Users.Name, Products.Name) + +-- Cross join +Categories +|> cross_join(Statuses) +|> select(Categories.Name, Statuses.Value) + +-- Group by with count +Orders +|> group_by(Orders.UserId) +|> select(Orders.UserId, count(*) as order_count) + +-- Group by with multiple aggregates +Orders +|> group_by(Orders.Status) +|> select(Orders.Status, sum(Orders.Total) as total_sum, avg(Orders.Total) as avg_total) + +-- Group by with having clause (lambda syntax) +Orders +|> group_by(Orders.UserId) +|> having(fn(g) => count(*) > 5) +|> select(Orders.UserId, count(*) as cnt) + +-- Order by ascending +Users +|> order_by(Users.Name asc) +|> select(*) + +-- Order by descending +Users +|> order_by(Users.CreatedAt desc) +|> select(*) + +-- Limit results +Users +|> order_by(Users.Id) +|> limit(10) +|> select(*) + +-- Limit with offset for pagination +Users +|> order_by(Users.Id) +|> limit(10) +|> offset(20) +|> select(*) + +-- Select distinct +Users +|> select_distinct(Users.Status) + +-- Arithmetic expressions in select +Products +|> select(Products.Price * Products.Quantity as total) + +-- Complex arithmetic +Orders +|> select(Orders.Subtotal + Orders.Tax - Orders.Discount as final_total) + +-- Let binding for query variables +let active_users = Users |> filter(fn(row) => row.Users.Status = 'active') |> select(*) -- Union query -let recent_orders = orders |> filter (created_at > '2023-01-01') in -let old_orders = orders |> filter (created_at <= '2023-01-01') in - -recent_orders -|> select user_id, amount, 'recent' as order_type -union -old_orders -|> select user_id, amount, 'old' as order_type -|> order_by amount desc - --- Insert statement -insert into users ( - name, - email, - age, - status, - created_at -) values ( - 'John Doe', - 'john.doe@example.com', - 25, - 'active', - now() -) +let q1 = Table1 |> select(Table1.Name) +let q2 = Table2 |> select(Table2.Name) --- Update with filter -users -|> filter (last_login < '2023-01-01') -|> update { - status = 'inactive', - updated_at = now() -} - --- Lambda function example -let calculate_discount = fn price => - case - when price > 1000 then price * 0.1 - when price > 500 then price * 0.05 - else 0 - end -in - -products -|> select - name, - price, - calculate_discount(price) as discount, - price - calculate_discount(price) as final_price -|> filter (discount > 0) - --- Nested query with case expression -employees -|> select - name, - department, - salary, - case - when salary > 100000 then 'Senior' - when salary > 50000 then 'Mid-level' - else 'Junior' - end as level, +Table1 +|> select(Table1.Name) +|> union(q2) + +-- Union all query +Table1 +|> select(Table1.Name) +|> union_all(Table2 |> select(Table2.Name)) + +-- Case expression in select +Users +|> select( + Users.Name, case - when department = 'Engineering' then salary * 1.1 - when department = 'Sales' then salary * 1.05 - else salary - end as adjusted_salary -|> group_by department, level -|> select - department, - level, - count(*) as employee_count, - avg(adjusted_salary) as avg_salary, - min(adjusted_salary) as min_salary, - max(adjusted_salary) as max_salary -|> order_by department, level - --- String manipulation functions -customers -|> select - concat(first_name, ' ', last_name) as full_name, - upper(email) as email_upper, - substring(phone, 1, 3) as area_code, - trim(address) as clean_address, - length(notes) as notes_length -|> filter (notes_length > 0) - --- Date functions -events -|> select - title, - event_date, - year(event_date) as event_year, - month(event_date) as event_month, - day(event_date) as event_day, - hour(event_date) as event_hour -|> filter (event_year = 2023 and event_month >= 6) -|> order_by event_date - --- Mathematical functions -measurements -|> select - sample_id, - value, - abs(value) as absolute_value, - round(value, 2) as rounded_value, - floor(value) as floor_value, - ceil(value) as ceiling_value, - sqrt(abs(value)) as square_root -|> filter (abs(value) > 10) - --- Conditional functions -user_profiles -|> select - user_id, - coalesce(nickname, username, 'Anonymous') as display_name, - nullif(bio, '') as clean_bio, - case when avatar_url is null then false else true end as has_avatar -|> filter (clean_bio is not null) \ No newline at end of file + when Users.Age > 65 then 'Senior' + when Users.Age > 18 then 'Adult' + else 'Minor' + end as age_group +) + +-- Full pipeline example +Users +|> filter(fn(row) => row.Users.Age > 18 and row.Users.Status = 'active') +|> join(Orders, on = Users.Id = Orders.UserId) +|> group_by(Users.Id, Users.Name) +|> select(Users.Name, sum(Orders.Total) as TotalSpent) +|> order_by(TotalSpent desc) +|> limit(10) diff --git a/Lql/LqlExtension/snippets/lql.json b/Lql/LqlExtension/snippets/lql.json index 4e8d2af..40bb0a7 100644 --- a/Lql/LqlExtension/snippets/lql.json +++ b/Lql/LqlExtension/snippets/lql.json @@ -2,104 +2,172 @@ "Select All": { "prefix": "select", "body": [ - "${1:table_name}", - "|> select ${2:*}" + "${1:TableName}", + "|> select(*)" ], - "description": "Basic select statement" + "description": "Basic select all columns" + }, + "Select Columns": { + "prefix": "selectc", + "body": [ + "${1:TableName}", + "|> select(${1:TableName}.${2:Column1}, ${1:TableName}.${3:Column2})" + ], + "description": "Select specific columns" }, "Select with Filter": { "prefix": "selectf", "body": [ - "${1:table_name}", - "|> filter (${2:condition})", - "|> select ${3:*}" + "${1:TableName}", + "|> filter(fn(row) => row.${1:TableName}.${2:Column} ${3|=,>,<,>=,<=,!=|} ${4:value})", + "|> select(*)" + ], + "description": "Select with filter using lambda syntax" + }, + "Filter with AND": { + "prefix": "filterand", + "body": [ + "${1:TableName}", + "|> filter(fn(row) => row.${1:TableName}.${2:Column1} ${3|=,>,<|} ${4:value1} and row.${1:TableName}.${5:Column2} ${6|=,>,<|} ${7:value2})", + "|> select(*)" + ], + "description": "Filter with AND condition" + }, + "Filter with OR": { + "prefix": "filteror", + "body": [ + "${1:TableName}", + "|> filter(fn(row) => row.${1:TableName}.${2:Column1} ${3|=,>,<|} ${4:value1} or row.${1:TableName}.${5:Column2} ${6|=,>,<|} ${7:value2})", + "|> select(*)" ], - "description": "Select with filter" + "description": "Filter with OR condition" }, "Join Tables": { "prefix": "join", "body": [ - "${1:table1}", - "|> join ${2:table2} on ${3:condition}", - "|> select ${4:*}" + "${1:Table1}", + "|> join(${2:Table2}, on = ${1:Table1}.${3:Id} = ${2:Table2}.${4:ForeignKey})", + "|> select(${1:Table1}.${5:Column1}, ${2:Table2}.${6:Column2})" + ], + "description": "Inner join two tables" + }, + "Left Join": { + "prefix": "leftjoin", + "body": [ + "${1:Table1}", + "|> left_join(${2:Table2}, on = ${1:Table1}.${3:Id} = ${2:Table2}.${4:ForeignKey})", + "|> select(${1:Table1}.${5:Column1}, ${2:Table2}.${6:Column2})" ], - "description": "Join two tables" + "description": "Left join two tables" }, - "Group By with Aggregation": { + "Cross Join": { + "prefix": "crossjoin", + "body": [ + "${1:Table1}", + "|> cross_join(${2:Table2})", + "|> select(${1:Table1}.${3:Column1}, ${2:Table2}.${4:Column2})" + ], + "description": "Cross join two tables" + }, + "Group By with Count": { "prefix": "groupby", "body": [ - "${1:table_name}", - "|> group_by ${2:column}", - "|> select ${2:column}, ${3:count(*)} as ${4:count}" + "${1:TableName}", + "|> group_by(${1:TableName}.${2:Column})", + "|> select(${1:TableName}.${2:Column}, count(*) as ${3:count})" + ], + "description": "Group by with count aggregation" + }, + "Group By with Sum": { + "prefix": "groupbysum", + "body": [ + "${1:TableName}", + "|> group_by(${1:TableName}.${2:GroupColumn})", + "|> select(${1:TableName}.${2:GroupColumn}, sum(${1:TableName}.${3:SumColumn}) as ${4:total})" + ], + "description": "Group by with sum aggregation" + }, + "Group By with Having": { + "prefix": "groupbyhaving", + "body": [ + "${1:TableName}", + "|> group_by(${1:TableName}.${2:Column})", + "|> having(fn(g) => count(*) > ${3:5})", + "|> select(${1:TableName}.${2:Column}, count(*) as ${4:cnt})" ], - "description": "Group by with aggregation" + "description": "Group by with having clause" }, - "Order By": { + "Order By Ascending": { "prefix": "orderby", "body": [ - "${1:table_name}", - "|> order_by ${2:column} ${3|asc,desc|}", - "|> select ${4:*}" + "${1:TableName}", + "|> order_by(${1:TableName}.${2:Column} asc)", + "|> select(*)" ], - "description": "Order by clause" + "description": "Order by ascending" }, - "Let Binding": { - "prefix": "let", + "Order By Descending": { + "prefix": "orderbydesc", + "body": [ + "${1:TableName}", + "|> order_by(${1:TableName}.${2:Column} desc)", + "|> select(*)" + ], + "description": "Order by descending" + }, + "Limit": { + "prefix": "limit", "body": [ - "let ${1:variable_name} = ${2:expression} in", - "${3:query}" + "${1:TableName}", + "|> order_by(${1:TableName}.${2:Column})", + "|> limit(${3:10})", + "|> select(*)" ], - "description": "Let binding for variables" + "description": "Limit results" }, - "Insert Statement": { - "prefix": "insert", + "Limit with Offset": { + "prefix": "limitoffset", "body": [ - "insert into ${1:table_name} (", - " ${2:column1}, ${3:column2}", - ") values (", - " ${4:value1}, ${5:value2}", - ")" + "${1:TableName}", + "|> order_by(${1:TableName}.${2:Column})", + "|> limit(${3:10})", + "|> offset(${4:20})", + "|> select(*)" ], - "description": "Insert statement" + "description": "Limit with offset for pagination" }, - "Update Statement": { - "prefix": "update", + "Select Distinct": { + "prefix": "distinct", "body": [ - "${1:table_name}", - "|> filter (${2:condition})", - "|> update {", - " ${3:column} = ${4:value}", - "}" + "${1:TableName}", + "|> select_distinct(${1:TableName}.${2:Column})" ], - "description": "Update statement" + "description": "Select distinct values" }, - "Union Query": { + "Union": { "prefix": "union", "body": [ - "${1:query1}", - "union", - "${2:query2}" + "${1:Table1}", + "|> select(${1:Table1}.${2:Column})", + "|> union(${3:Table2} |> select(${3:Table2}.${4:Column}))" ], "description": "Union of two queries" }, - "Having Clause": { - "prefix": "having", + "Union All": { + "prefix": "unionall", "body": [ - "${1:table_name}", - "|> group_by ${2:column}", - "|> having ${3:condition}", - "|> select ${4:*}" + "${1:Table1}", + "|> select(${1:Table1}.${2:Column})", + "|> union_all(${3:Table2} |> select(${3:Table2}.${4:Column}))" ], - "description": "Having clause with group by" + "description": "Union all of two queries" }, - "Limit and Offset": { - "prefix": "limit", + "Let Binding": { + "prefix": "let", "body": [ - "${1:table_name}", - "|> limit ${2:10} offset ${3:0}", - "|> select ${4:*}" + "let ${1:queryName} = ${2:TableName} |> select(*)" ], - "description": "Limit with offset" + "description": "Let binding for query variables" }, "Case Expression": { "prefix": "case", @@ -112,35 +180,41 @@ ], "description": "Case expression" }, - "Lambda Function": { - "prefix": "lambda", + "Lambda Filter": { + "prefix": "fn", "body": [ - "fn ${1:param} => ${2:expression}" + "fn(${1:row}) => ${2:row.Table.Column} ${3|=,>,<,>=,<=,!=|} ${4:value}" ], - "description": "Lambda function" + "description": "Lambda function for filter/having" }, - "Arithmetic Operations": { + "Arithmetic in Select": { "prefix": "arith", "body": [ - "${1:table_name}", - "|> select ${2:column1} ${3|+,-,*,/,%|} ${4:column2} as ${5:result}" + "${1:TableName}", + "|> select(${1:TableName}.${2:Column1} ${3|+,-,*,/|} ${1:TableName}.${4:Column2} as ${5:result})" ], - "description": "Arithmetic operations" + "description": "Arithmetic operations in select" }, - "String Functions": { - "prefix": "string", + "Aggregate Functions": { + "prefix": "agg", "body": [ - "${1:table_name}", - "|> select ${2|concat,substring,length,trim,upper,lower|}(${3:column}) as ${4:result}" + "${1:TableName}", + "|> group_by(${1:TableName}.${2:GroupColumn})", + "|> select(${1:TableName}.${2:GroupColumn}, ${3|count,sum,avg,min,max|}(${1:TableName}.${4:Column}) as ${5:result})" ], - "description": "String functions" + "description": "Aggregate function in select" }, - "Math Functions": { - "prefix": "math", - "body": [ - "${1:table_name}", - "|> select ${2|round,floor,ceil,abs,sqrt|}(${3:column}) as ${4:result}" - ], - "description": "Math functions" + "Full Pipeline": { + "prefix": "pipeline", + "body": [ + "${1:TableName}", + "|> filter(fn(row) => row.${1:TableName}.${2:Column} ${3|=,>,<|} ${4:value})", + "|> join(${5:OtherTable}, on = ${1:TableName}.${6:Id} = ${5:OtherTable}.${7:ForeignKey})", + "|> group_by(${1:TableName}.${8:GroupColumn})", + "|> order_by(${1:TableName}.${8:GroupColumn} ${9|asc,desc|})", + "|> limit(${10:10})", + "|> select(${1:TableName}.${8:GroupColumn}, count(*) as count)" + ], + "description": "Full pipeline with filter, join, group, order, limit" } -} \ No newline at end of file +} diff --git a/Lql/LqlExtension/syntaxes/lql.tmLanguage.json b/Lql/LqlExtension/syntaxes/lql.tmLanguage.json index ad3e23e..2291c67 100644 --- a/Lql/LqlExtension/syntaxes/lql.tmLanguage.json +++ b/Lql/LqlExtension/syntaxes/lql.tmLanguage.json @@ -87,7 +87,7 @@ "patterns": [ { "name": "entity.name.function.query.lql", - "match": "\\b(select|filter|join|group_by|order_by|having|limit|offset|union|union_all|insert|update|delete)\\b" + "match": "\\b(select|select_distinct|filter|join|left_join|cross_join|group_by|order_by|having|limit|offset|union|union_all|insert|distinct)\\b" }, { "name": "entity.name.function.aggregate.lql", diff --git a/Lql/README.md b/Lql/README.md index b1f9a9b..93d5c49 100644 --- a/Lql/README.md +++ b/Lql/README.md @@ -1,238 +1,106 @@ # Lambda Query Language (LQL) -A functional pipeline-style DSL that transpiles to SQL. LQL provides an intuitive, composable way to write database queries using lambda expressions and pipeline operators, making complex queries more readable and maintainable. +A functional pipeline-style DSL that transpiles to SQL. Write database logic once, run it anywhere. -## Website +## The Problem -Visit [lql.dev](https://lql.dev) for interactive playground and documentation. +SQL dialects differ. PostgreSQL, SQLite, and SQL Server each have their own quirks. This creates problems: -## Features +- **Migrations** - Schema changes need different SQL for each database +- **Business Logic** - Triggers, stored procedures, and constraints vary by vendor +- **Sync Logic** - Offline-first apps need identical logic on client (SQLite) and server (Postgres) +- **Testing** - Running tests against SQLite while production uses Postgres -- **Pipeline Syntax** - Chain operations using `|>` operator -- **Lambda Expressions** - Use familiar lambda syntax for filtering -- **Cross-Database Support** - Transpiles to PostgreSQL, SQLite, and SQL Server -- **Type Safety** - Integrates with DataProvider for compile-time validation -- **VS Code Extension** - Syntax highlighting and IntelliSense support -- **CLI Tools** - Command-line transpilation and validation +## The Solution -## Syntax Overview +LQL is a single query language that transpiles to any SQL dialect. Write once, deploy everywhere. -### Basic Pipeline ```lql -users |> select(id, name, email) -``` - -### With Filtering -```lql -employees -|> filter(fn(row) => row.salary > 50000) -|> select(id, name, salary) -``` - -### Joins -```lql -Customer -|> join(Order, on = Customer.Id = Order.CustomerId) -|> select(Customer.Name, Order.Total) -``` - -### Complex Queries -```lql -let high_value_customers = Customer -|> join(Order, on = Customer.Id = Order.CustomerId) -|> filter(fn(row) => row.Order.Total > 1000) -|> group_by(Customer.Id, Customer.Name) -|> having(fn(row) => SUM(row.Order.Total) > 5000) -|> select(Customer.Name, SUM(Order.Total) AS TotalSpent) -|> order_by(TotalSpent DESC) +Users +|> filter(fn(row) => row.Age > 18 and row.Status = 'active') +|> join(Orders, on = Users.Id = Orders.UserId) +|> group_by(Users.Id, Users.Name) +|> select(Users.Name, sum(Orders.Total) as TotalSpent) +|> order_by(TotalSpent desc) |> limit(10) ``` -## Pipeline Operations +This transpiles to correct SQL for PostgreSQL, SQLite, or SQL Server. -| Operation | Description | SQL Equivalent | -|-----------|-------------|----------------| -| `select(cols...)` | Choose columns | `SELECT` | -| `filter(fn(row) => ...)` | Filter rows | `WHERE` | -| `join(table, on = ...)` | Join tables | `JOIN` | -| `left_join(table, on = ...)` | Left join | `LEFT JOIN` | -| `group_by(cols...)` | Group rows | `GROUP BY` | -| `having(fn(row) => ...)` | Filter groups | `HAVING` | -| `order_by(col [ASC/DESC])` | Sort results | `ORDER BY` | -| `limit(n)` | Limit rows | `LIMIT` | -| `offset(n)` | Skip rows | `OFFSET` | -| `distinct()` | Unique rows | `DISTINCT` | -| `union(query)` | Combine queries | `UNION` | -| `union_all(query)` | Combine with duplicates | `UNION ALL` | - -## Installation - -### CLI Tool (SQLite) -```bash -dotnet tool install -g LqlCli.SQLite -``` +## Use Cases -### VS Code Extension -Search for "LQL" in VS Code Extensions or: -```bash -code --install-extension lql-lang -``` +### Cross-Database Migrations +Define schema changes in LQL. Migration.CLI generates the right SQL for your target database. -### NuGet Packages -```xml - - +### Business Logic +Write triggers and constraints in LQL. Deploy the same logic to any database. - - +### Offline-First Sync +Sync framework uses LQL for conflict resolution. Same logic runs on mobile (SQLite) and server (Postgres). - - -``` +### Integration Testing +Test against SQLite locally, deploy to Postgres in production. Same queries, same results. -## CLI Usage +## Quick Start -### Transpile to SQL +### CLI Tool ```bash +dotnet tool install -g LqlCli.SQLite lql --input query.lql --output query.sql ``` -### Validate Syntax -```bash -lql --input query.lql --validate -``` - -### Print to Console -```bash -lql --input query.lql +### NuGet Packages +```xml + + + ``` -## Programmatic Usage - +### Programmatic Usage ```csharp using Lql; using Lql.SQLite; -// Parse LQL -var lqlCode = "users |> filter(fn(row) => row.age > 21) |> select(name, email)"; -var statement = LqlCodeParser.Parse(lqlCode); - -// Convert to SQL -var context = new SQLiteContext(); -var sql = statement.ToSql(context); - -Console.WriteLine(sql); -// Output: SELECT name, email FROM users WHERE age > 21 -``` - -## Function Support - -### Aggregate Functions -- `COUNT()`, `SUM()`, `AVG()`, `MIN()`, `MAX()` - -### String Functions -- `UPPER()`, `LOWER()`, `LENGTH()`, `CONCAT()` - -### Date Functions -- `NOW()`, `DATE()`, `YEAR()`, `MONTH()` - -### Conditional -- `CASE WHEN ... THEN ... ELSE ... END` -- `COALESCE()`, `NULLIF()` - -## Expression Support - -### Arithmetic -```lql -products |> select(price * quantity AS total) -``` - -### Comparisons -```lql -orders |> filter(fn(row) => row.date >= '2024-01-01' AND row.status != 'cancelled') -``` - -### Pattern Matching -```lql -customers |> filter(fn(row) => row.name LIKE 'John%') -``` - -### Subqueries -```lql -orders |> filter(fn(row) => row.customer_id IN ( - customers |> filter(fn(c) => c.country = 'USA') |> select(id) -)) +var lql = "Users |> filter(fn(row) => row.Age > 21) |> select(Name, Email)"; +var sql = LqlCodeParser.Parse(lql).ToSql(new SQLiteContext()); ``` -## VS Code Extension Features - -- Syntax highlighting -- Auto-completion -- Error diagnostics -- Format on save -- Snippets for common patterns +## F# Type Provider -## Architecture +Validate LQL queries at compile time. Invalid queries cause compilation errors, not runtime errors. -``` -Lql/ -ā”œā”€ā”€ Lql/ # Core transpiler -│ ā”œā”€ā”€ Parsing/ # ANTLR grammar and parser -│ ā”œā”€ā”€ FunctionMapping/ # Database-specific functions -│ └── Pipeline steps # AST transformation -ā”œā”€ā”€ Lql.SQLite/ # SQLite dialect -ā”œā”€ā”€ Lql.SqlServer/ # SQL Server dialect -ā”œā”€ā”€ Lql.Postgres/ # PostgreSQL dialect -ā”œā”€ā”€ LqlCli.SQLite/ # CLI tool -ā”œā”€ā”€ LqlExtension/ # VS Code extension -└── Website/ # lql.dev website -``` +```fsharp +open Lql.TypeProvider -## Testing +type GetUsers = LqlCommand<"Users |> select(Id, Name, Email)"> +type ActiveUsers = LqlCommand<"Users |> filter(fn(row) => row.Status = 'active') |> select(*)"> -```bash -dotnet test Lql.Tests/Lql.Tests.csproj +let sql = GetUsers.Sql // SQL generated at compile time ``` -## Examples - -See the `Lql.Tests/TestData/Lql/` directory for comprehensive examples of LQL queries and their SQL equivalents. - -## Error Handling - -LQL provides detailed error messages: - -```lql -// Invalid: Identifier cannot start with number -123table |> select(id) -// Error: Syntax error at line 1:0 - Identifier cannot start with a number - -// Invalid: Undefined variable -undefined_var |> select(name) -// Error: Syntax error at line 1:0 - Undefined variable -``` +## Pipeline Operations -## Integration with DataProvider +| Operation | Description | +|-----------|-------------| +| `select(cols...)` | Choose columns | +| `filter(fn(row) => ...)` | Filter rows | +| `join(table, on = ...)` | Join tables | +| `left_join(table, on = ...)` | Left join | +| `group_by(cols...)` | Group rows | +| `having(fn(row) => ...)` | Filter groups | +| `order_by(col [asc/desc])` | Sort results | +| `limit(n)` / `offset(n)` | Pagination | +| `distinct()` | Unique rows | +| `union(query)` | Combine queries | -LQL files are automatically processed by DataProvider source generators: +## VS Code Extension -1. Write `.lql` files in your project -2. DataProvider transpiles to SQL during build -3. Generates type-safe C# extension methods -4. Use with full IntelliSense support +Search for "LQL" in VS Code Extensions for syntax highlighting and IntelliSense. -## Contributing +## Website -1. Follow functional programming principles -2. Add tests for new features -3. Update grammar file for syntax changes -4. Ensure all dialects are supported -5. Run tests before submitting PRs +Visit [lql.dev](https://lql.dev) for interactive playground. ## License MIT License - -## Author - -MelbourneDeveloper - [ChristianFindlay.com](https://christianfindlay.com) \ No newline at end of file diff --git a/Migration/Migration.Tests/LqlDefaultsTests.cs b/Migration/Migration.Tests/LqlDefaultsTests.cs index 25db195..b20c527 100644 --- a/Migration/Migration.Tests/LqlDefaultsTests.cs +++ b/Migration/Migration.Tests/LqlDefaultsTests.cs @@ -13,6 +13,7 @@ public sealed class LqlDefaultsTests : IAsyncLifetime private PostgreSqlContainer _postgres = null!; private NpgsqlConnection _pgConnection = null!; private SqliteConnection _sqliteConnection = null!; + private string _sqliteDbPath = null!; private readonly ILogger _logger = NullLogger.Instance; public async Task InitializeAsync() @@ -30,8 +31,9 @@ public async Task InitializeAsync() _pgConnection = new NpgsqlConnection(_postgres.GetConnectionString()); await _pgConnection.OpenAsync().ConfigureAwait(false); - // Setup SQLite (in-memory) - _sqliteConnection = new SqliteConnection("Data Source=:memory:"); + // Setup SQLite with file-based database + _sqliteDbPath = Path.Combine(Path.GetTempPath(), $"lql_defaults_{Guid.NewGuid():N}.db"); + _sqliteConnection = new SqliteConnection($"Data Source={_sqliteDbPath}"); await _sqliteConnection.OpenAsync().ConfigureAwait(false); } @@ -40,6 +42,12 @@ public async Task DisposeAsync() await _pgConnection.DisposeAsync().ConfigureAwait(false); await _postgres.DisposeAsync().ConfigureAwait(false); _sqliteConnection.Dispose(); + if (File.Exists(_sqliteDbPath)) + { + try { File.Delete(_sqliteDbPath); } + catch (IOException) { /* File may be locked */ } + catch (UnauthorizedAccessException) { /* May not have permission */ } + } } // ========================================================================= diff --git a/Migration/Migration.Tests/MigrationCornerCaseTests.cs b/Migration/Migration.Tests/MigrationCornerCaseTests.cs index 0f402cc..ac049ee 100644 --- a/Migration/Migration.Tests/MigrationCornerCaseTests.cs +++ b/Migration/Migration.Tests/MigrationCornerCaseTests.cs @@ -8,108 +8,153 @@ public sealed class MigrationCornerCaseTests { private readonly ILogger _logger = NullLogger.Instance; + private static (SqliteConnection Connection, string DbPath) CreateTestDb() + { + var dbPath = Path.Combine(Path.GetTempPath(), $"cornercases_{Guid.NewGuid()}.db"); + var connection = new SqliteConnection($"Data Source={dbPath}"); + connection.Open(); + return (connection, dbPath); + } + + private static void CleanupTestDb(SqliteConnection connection, string dbPath) + { + connection.Close(); + connection.Dispose(); + if (File.Exists(dbPath)) + { + try + { + File.Delete(dbPath); + } + catch + { + /* File may be locked */ + } + } + } + #region Special Characters and Reserved Words [Fact] public void TableName_WithUnderscores_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "user_roles_history", - t => - t.Column("id", PortableTypes.BigInt, c => c.PrimaryKey()) - .Column("user_id", PortableTypes.Uuid) - .Column("role_name", PortableTypes.VarChar(100)) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - VerifyTableExists(connection, "user_roles_history"); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "user_roles_history", + t => + t.Column("id", PortableTypes.BigInt, c => c.PrimaryKey()) + .Column("user_id", PortableTypes.Uuid) + .Column("role_name", PortableTypes.VarChar(100)) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + VerifyTableExists(connection, "user_roles_history"); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void ColumnName_IsReservedWord_HandledCorrectly() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // Common reserved words as column names - var schema = Schema - .Define("Test") - .Table( - "DataTable", - t => - t.Column("index", PortableTypes.Int, c => c.PrimaryKey()) - .Column("order", PortableTypes.Int) - .Column("group", PortableTypes.VarChar(50)) - .Column("select", PortableTypes.Text) - .Column("where", PortableTypes.Boolean) - .Column("from", PortableTypes.DateTime()) - .Column("table", PortableTypes.VarChar(100)) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - var table = inspected.Tables.Single(); - Assert.Equal(7, table.Columns.Count); + var (connection, dbPath) = CreateTestDb(); + try + { + // Common reserved words as column names + var schema = Schema + .Define("Test") + .Table( + "DataTable", + t => + t.Column("index", PortableTypes.Int, c => c.PrimaryKey()) + .Column("order", PortableTypes.Int) + .Column("group", PortableTypes.VarChar(50)) + .Column("select", PortableTypes.Text) + .Column("where", PortableTypes.Boolean) + .Column("from", PortableTypes.DateTime()) + .Column("table", PortableTypes.VarChar(100)) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var table = inspected.Tables.Single(); + Assert.Equal(7, table.Columns.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void TableName_CamelCase_PreservedCorrectly() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "UserAccountSettings", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("UserId", PortableTypes.Uuid) - .Column("EnableNotifications", PortableTypes.Boolean) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - Assert.Contains(inspected.Tables, t => t.Name == "UserAccountSettings"); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "UserAccountSettings", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("UserId", PortableTypes.Uuid) + .Column("EnableNotifications", PortableTypes.Boolean) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + Assert.Contains(inspected.Tables, t => t.Name == "UserAccountSettings"); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void ColumnName_WithNumbers_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Metrics", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("value1", PortableTypes.Decimal(10, 2)) - .Column("value2", PortableTypes.Decimal(10, 2)) - .Column("metric99", PortableTypes.Float) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Metrics", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("value1", PortableTypes.Decimal(10, 2)) + .Column("value2", PortableTypes.Decimal(10, 2)) + .Column("metric99", PortableTypes.Float) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -119,81 +164,96 @@ public void ColumnName_WithNumbers_Success() [Fact] public void Table_ManyColumns_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // Build wide table with many columns - var schema = Schema - .Define("Test") - .Table( - "WideTable", - t => - { - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()); - // Add 20 columns (enough to test wide tables) - for (var i = 1; i <= 20; i++) + var (connection, dbPath) = CreateTestDb(); + try + { + // Build wide table with many columns + var schema = Schema + .Define("Test") + .Table( + "WideTable", + t => { - t.Column($"Col{i}", PortableTypes.VarChar(100)); + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()); + // Add 20 columns (enough to test wide tables) + for (var i = 1; i <= 20; i++) + { + t.Column($"Col{i}", PortableTypes.VarChar(100)); + } } - } - ) - .Build(); + ) + .Build(); - var result = ApplySchema(connection, schema); + var result = ApplySchema(connection, schema); - Assert.True(result is MigrationApplyResultOk); + Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - Assert.Equal(21, inspected.Tables.Single().Columns.Count); + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + Assert.Equal(21, inspected.Tables.Single().Columns.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Column_MaximumVarCharLength_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "LargeText", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("ShortText", PortableTypes.VarChar(10)) - .Column("MediumText", PortableTypes.VarChar(4000)) - .Column("LargeText", PortableTypes.VarChar(8000)) - .Column("MaxText", PortableTypes.Text) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "LargeText", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("ShortText", PortableTypes.VarChar(10)) + .Column("MediumText", PortableTypes.VarChar(4000)) + .Column("LargeText", PortableTypes.VarChar(8000)) + .Column("MaxText", PortableTypes.Text) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Decimal_ExtremeScaleAndPrecision_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Financials", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("TinyMoney", PortableTypes.Decimal(5, 2)) - .Column("StandardMoney", PortableTypes.Decimal(10, 2)) - .Column("BigMoney", PortableTypes.Decimal(18, 4)) - .Column("HugeMoney", PortableTypes.Decimal(28, 8)) - .Column("CryptoValue", PortableTypes.Decimal(38, 18)) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Financials", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("TinyMoney", PortableTypes.Decimal(5, 2)) + .Column("StandardMoney", PortableTypes.Decimal(10, 2)) + .Column("BigMoney", PortableTypes.Decimal(18, 4)) + .Column("HugeMoney", PortableTypes.Decimal(28, 8)) + .Column("CryptoValue", PortableTypes.Decimal(38, 18)) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -203,118 +263,138 @@ public void Decimal_ExtremeScaleAndPrecision_Success() [Fact] public void Table_MultiColumnUniqueConstraint_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // Test multi-column unique constraint (composite PK requires different builder API) - var schema = Schema - .Define("Test") - .Table( - "CompositeUnique", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("TenantId", PortableTypes.Uuid, c => c.NotNull()) - .Column("EntityId", PortableTypes.Uuid, c => c.NotNull()) - .Column("Data", PortableTypes.Text) - .Unique("UQ_tenant_entity", "TenantId", "EntityId") - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + // Test multi-column unique constraint (composite PK requires different builder API) + var schema = Schema + .Define("Test") + .Table( + "CompositeUnique", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("TenantId", PortableTypes.Uuid, c => c.NotNull()) + .Column("EntityId", PortableTypes.Uuid, c => c.NotNull()) + .Column("Data", PortableTypes.Text) + .Unique("UQ_tenant_entity", "TenantId", "EntityId") + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Table_MultiColumnIndex_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Events", - t => - t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey()) - .Column("TenantId", PortableTypes.Uuid) - .Column("EntityType", PortableTypes.VarChar(100)) - .Column("EntityId", PortableTypes.Uuid) - .Column("EventDate", PortableTypes.DateTime()) - .Index("idx_events_tenant_entity", ["TenantId", "EntityType", "EntityId"]) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - Assert.Single(inspected.Tables.Single().Indexes); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Events", + t => + t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey()) + .Column("TenantId", PortableTypes.Uuid) + .Column("EntityType", PortableTypes.VarChar(100)) + .Column("EntityId", PortableTypes.Uuid) + .Column("EventDate", PortableTypes.DateTime()) + .Index("idx_events_tenant_entity", ["TenantId", "EntityType", "EntityId"]) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + Assert.Single(inspected.Tables.Single().Indexes); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Table_SelfReferencingForeignKey_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // Enable foreign keys - using (var cmd = connection.CreateCommand()) + var (connection, dbPath) = CreateTestDb(); + try { - cmd.CommandText = "PRAGMA foreign_keys = ON"; - cmd.ExecuteNonQuery(); + // Enable foreign keys + using (var cmd = connection.CreateCommand()) + { + cmd.CommandText = "PRAGMA foreign_keys = ON"; + cmd.ExecuteNonQuery(); + } + + var schema = Schema + .Define("Test") + .Table( + "Categories", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) + .Column("ParentId", PortableTypes.Int) + .ForeignKey("ParentId", "Categories", "Id", ForeignKeyAction.SetNull) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var table = inspected.Tables.Single(); + Assert.Single(table.ForeignKeys); + Assert.Equal("Categories", table.ForeignKeys[0].ReferencedTable); + } + finally + { + CleanupTestDb(connection, dbPath); } - - var schema = Schema - .Define("Test") - .Table( - "Categories", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) - .Column("ParentId", PortableTypes.Int) - .ForeignKey("ParentId", "Categories", "Id", ForeignKeyAction.SetNull) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - var table = inspected.Tables.Single(); - Assert.Single(table.ForeignKeys); - Assert.Equal("Categories", table.ForeignKeys[0].ReferencedTable); } [Fact] public void Table_MultipleIndexesOnSameColumn_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Documents", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Title", PortableTypes.VarChar(500)) - .Column("Status", PortableTypes.VarChar(20)) - .Column("CreatedAt", PortableTypes.DateTime()) - .Index("idx_docs_title", "Title") - .Index("idx_docs_status", "Status") - .Index("idx_docs_status_created", ["Status", "CreatedAt"]) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - Assert.Equal(3, inspected.Tables.Single().Indexes.Count); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Documents", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Title", PortableTypes.VarChar(500)) + .Column("Status", PortableTypes.VarChar(20)) + .Column("CreatedAt", PortableTypes.DateTime()) + .Index("idx_docs_title", "Title") + .Index("idx_docs_status", "Status") + .Index("idx_docs_status_created", ["Status", "CreatedAt"]) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + Assert.Equal(3, inspected.Tables.Single().Indexes.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -324,61 +404,71 @@ public void Table_MultipleIndexesOnSameColumn_Success() [Fact] public void AllColumnsNullable_ExceptPrimaryKey_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "OptionalData", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(100)) - .Column("Email", PortableTypes.VarChar(255)) - .Column("Age", PortableTypes.Int) - .Column("Balance", PortableTypes.Decimal(10, 2)) - .Column("Active", PortableTypes.Boolean) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); - - // Verify all columns except Id are nullable - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - var table = inspected.Tables.Single(); - foreach (var col in table.Columns.Where(c => c.Name != "Id")) + var (connection, dbPath) = CreateTestDb(); + try { - Assert.True(col.IsNullable, $"Column {col.Name} should be nullable"); + var schema = Schema + .Define("Test") + .Table( + "OptionalData", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(100)) + .Column("Email", PortableTypes.VarChar(255)) + .Column("Age", PortableTypes.Int) + .Column("Balance", PortableTypes.Decimal(10, 2)) + .Column("Active", PortableTypes.Boolean) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + + // Verify all columns except Id are nullable + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var table = inspected.Tables.Single(); + foreach (var col in table.Columns.Where(c => c.Name != "Id")) + { + Assert.True(col.IsNullable, $"Column {col.Name} should be nullable"); + } + } + finally + { + CleanupTestDb(connection, dbPath); } } [Fact] public void AllColumnsNotNull_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "RequiredData", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) - .Column("Email", PortableTypes.VarChar(255), c => c.NotNull()) - .Column( - "Status", - PortableTypes.VarChar(20), - c => c.NotNull().Default("'active'") - ) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "RequiredData", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) + .Column("Email", PortableTypes.VarChar(255), c => c.NotNull()) + .Column( + "Status", + PortableTypes.VarChar(20), + c => c.NotNull().Default("'active'") + ) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -388,93 +478,113 @@ public void AllColumnsNotNull_Success() [Fact] public void DefaultValue_StringWithQuotes_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Defaults", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Status", PortableTypes.VarChar(50), c => c.Default("'pending'")) - .Column("Type", PortableTypes.VarChar(50), c => c.Default("'default'")) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Defaults", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Status", PortableTypes.VarChar(50), c => c.Default("'pending'")) + .Column("Type", PortableTypes.VarChar(50), c => c.Default("'default'")) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void DefaultValue_NumericZero_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Counters", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Count", PortableTypes.Int, c => c.Default("0")) - .Column("Balance", PortableTypes.Decimal(10, 2), c => c.Default("0.00")) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Counters", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Count", PortableTypes.Int, c => c.Default("0")) + .Column("Balance", PortableTypes.Decimal(10, 2), c => c.Default("0.00")) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void DefaultValue_BooleanFalse_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Flags", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("IsActive", PortableTypes.Boolean, c => c.Default("0")) - .Column("IsVerified", PortableTypes.Boolean, c => c.Default("1")) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Flags", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("IsActive", PortableTypes.Boolean, c => c.Default("0")) + .Column("IsVerified", PortableTypes.Boolean, c => c.Default("1")) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void DefaultValue_CurrentTimestamp_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Auditable", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column( - "CreatedAt", - PortableTypes.DateTime(), - c => c.Default("CURRENT_TIMESTAMP") - ) - .Column("UpdatedAt", PortableTypes.DateTime()) - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Auditable", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column( + "CreatedAt", + PortableTypes.DateTime(), + c => c.Default("CURRENT_TIMESTAMP") + ) + .Column("UpdatedAt", PortableTypes.DateTime()) + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -484,73 +594,88 @@ public void DefaultValue_CurrentTimestamp_Success() [Fact] public void EmptySchema_NoOperations() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema.Define("Empty").Build(); - var schema = Schema.Define("Empty").Build(); + var emptyDbSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; - var emptyDbSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptyDbSchema, schema, logger: _logger) + ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptyDbSchema, schema, logger: _logger) - ).Value; - - Assert.Empty(operations); + Assert.Empty(operations); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void TableWithOnlyPrimaryKey_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table("Simple", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table("Simple", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); - var result = ApplySchema(connection, schema); + var result = ApplySchema(connection, schema); - Assert.True(result is MigrationApplyResultOk); + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void MultipleTables_CircularForeignKeys_DeferredConstraints() { // This tests a common real-world scenario where tables reference each other - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - using (var cmd = connection.CreateCommand()) - { - cmd.CommandText = "PRAGMA foreign_keys = ON"; - cmd.ExecuteNonQuery(); - } - - // Create tables without FK first, then add FKs - var schema = Schema - .Define("Test") - .Table( - "Authors", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(100)) - ) - .Table( - "Books", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Title", PortableTypes.VarChar(200)) - .Column("AuthorId", PortableTypes.Int) - .ForeignKey("AuthorId", "Authors", "Id") - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + using (var cmd = connection.CreateCommand()) + { + cmd.CommandText = "PRAGMA foreign_keys = ON"; + cmd.ExecuteNonQuery(); + } + + // Create tables without FK first, then add FKs + var schema = Schema + .Define("Test") + .Table( + "Authors", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(100)) + ) + .Table( + "Books", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Title", PortableTypes.VarChar(200)) + .Column("AuthorId", PortableTypes.Int) + .ForeignKey("AuthorId", "Authors", "Id") + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -560,119 +685,129 @@ public void MultipleTables_CircularForeignKeys_DeferredConstraints() [Fact] public void UpgradeFrom_EmptyTable_ToFullSchema_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // Start with minimal table - var v1 = Schema - .Define("Test") - .Table("Products", t => t.Column("Id", PortableTypes.Int, c => c.PrimaryKey())) - .Build(); - - ApplySchema(connection, v1); - - // Upgrade to full table - var v2 = Schema - .Define("Test") - .Table( - "Products", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .Column("Price", PortableTypes.Decimal(10, 2)) - .Column("CategoryId", PortableTypes.Int) - .Column("CreatedAt", PortableTypes.DateTime()) - .Index("idx_products_name", "Name") - .Index("idx_products_category", "CategoryId") - ) - .Build(); - - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) - ).Value; - - // Should have 4 AddColumn + 2 CreateIndex - Assert.Equal(6, operations.Count); - Assert.Equal(4, operations.Count(op => op is AddColumnOperation)); - Assert.Equal(2, operations.Count(op => op is CreateIndexOperation)); - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + // Start with minimal table + var v1 = Schema + .Define("Test") + .Table("Products", t => t.Column("Id", PortableTypes.Int, c => c.PrimaryKey())) + .Build(); + + ApplySchema(connection, v1); + + // Upgrade to full table + var v2 = Schema + .Define("Test") + .Table( + "Products", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .Column("Price", PortableTypes.Decimal(10, 2)) + .Column("CategoryId", PortableTypes.Int) + .Column("CreatedAt", PortableTypes.DateTime()) + .Index("idx_products_name", "Name") + .Index("idx_products_category", "CategoryId") + ) + .Build(); + + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) + ).Value; + + // Should have 4 AddColumn + 2 CreateIndex + Assert.Equal(6, operations.Count); + Assert.Equal(4, operations.Count(op => op is AddColumnOperation)); + Assert.Equal(2, operations.Count(op => op is CreateIndexOperation)); + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void AddIndex_ThenAddAnother_Success() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // V1 with one index - var v1 = Schema - .Define("Test") - .Table( - "Items", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Code", PortableTypes.VarChar(50)) - .Column("Category", PortableTypes.VarChar(50)) - .Index("idx_items_code", "Code") - ) - .Build(); - - ApplySchema(connection, v1); - - // V2 - add another index (additive change) - var v2 = Schema - .Define("Test") - .Table( - "Items", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Code", PortableTypes.VarChar(50)) - .Column("Category", PortableTypes.VarChar(50)) - .Index("idx_items_code", "Code") - .Index("idx_items_category", "Category") - ) - .Build(); - - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var operations = ( - (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) - ).Value; - - // Should add the new index - Assert.Single(operations); - Assert.IsType(operations[0]); - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - Assert.True(result is MigrationApplyResultOk); - - var finalSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - Assert.Equal(2, finalSchema.Tables.Single().Indexes.Count); + var (connection, dbPath) = CreateTestDb(); + try + { + // V1 with one index + var v1 = Schema + .Define("Test") + .Table( + "Items", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Code", PortableTypes.VarChar(50)) + .Column("Category", PortableTypes.VarChar(50)) + .Index("idx_items_code", "Code") + ) + .Build(); + + ApplySchema(connection, v1); + + // V2 - add another index (additive change) + var v2 = Schema + .Define("Test") + .Table( + "Items", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Code", PortableTypes.VarChar(50)) + .Column("Category", PortableTypes.VarChar(50)) + .Index("idx_items_code", "Code") + .Index("idx_items_category", "Category") + ) + .Build(); + + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk) + SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) + ).Value; + + // Should add the new index + Assert.Single(operations); + Assert.IsType(operations[0]); + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + Assert.True(result is MigrationApplyResultOk); + + var finalSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + Assert.Equal(2, finalSchema.Tables.Single().Indexes.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion @@ -682,24 +817,29 @@ public void AddIndex_ThenAddAnother_Success() [Fact] public void Table_MultipleIdentityColumns_OnePerTable() { - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - // SQLite only allows one ROWID alias (INTEGER PRIMARY KEY) - var schema = Schema - .Define("Test") - .Table( - "Sequenced", - t => - t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) - .Column("Name", PortableTypes.VarChar(100)) - .Column("OrderNum", PortableTypes.Int) // Not identity - ) - .Build(); - - var result = ApplySchema(connection, schema); - - Assert.True(result is MigrationApplyResultOk); + var (connection, dbPath) = CreateTestDb(); + try + { + // SQLite only allows one ROWID alias (INTEGER PRIMARY KEY) + var schema = Schema + .Define("Test") + .Table( + "Sequenced", + t => + t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) + .Column("Name", PortableTypes.VarChar(100)) + .Column("OrderNum", PortableTypes.Int) // Not identity + ) + .Build(); + + var result = ApplySchema(connection, schema); + + Assert.True(result is MigrationApplyResultOk); + } + finally + { + CleanupTestDb(connection, dbPath); + } } #endregion diff --git a/Migration/Migration.Tests/SchemaYamlSerializerTests.cs b/Migration/Migration.Tests/SchemaYamlSerializerTests.cs index 9f02834..740ec95 100644 --- a/Migration/Migration.Tests/SchemaYamlSerializerTests.cs +++ b/Migration/Migration.Tests/SchemaYamlSerializerTests.cs @@ -575,29 +575,49 @@ public void IntegrationTest_YamlToSqlite_CreatesDatabaseSuccessfully() var schema = SchemaYamlSerializer.FromYaml(yaml); // Act - Apply to SQLite - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - foreach (var table in schema.Tables) + var dbPath = Path.Combine(Path.GetTempPath(), $"schemayaml_{Guid.NewGuid()}.db"); + var connection = new SqliteConnection($"Data Source={dbPath}"); + try { - var ddl = SqliteDdlGenerator.Generate(new CreateTableOperation(table)); - using var cmd = connection.CreateCommand(); - cmd.CommandText = ddl; - cmd.ExecuteNonQuery(); + connection.Open(); + + foreach (var table in schema.Tables) + { + var ddl = SqliteDdlGenerator.Generate(new CreateTableOperation(table)); + using var cmd = connection.CreateCommand(); + cmd.CommandText = ddl; + cmd.ExecuteNonQuery(); + } + + // Assert - Verify tables exist + using var verifyCmd = connection.CreateCommand(); + verifyCmd.CommandText = + "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name IN ('Users', 'Orders')"; + var tableCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); + Assert.Equal(2, tableCount); + + // Verify index exists + verifyCmd.CommandText = + "SELECT COUNT(*) FROM sqlite_master WHERE type='index' AND name='idx_users_email'"; + var indexCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); + Assert.Equal(1, indexCount); + } + finally + { + connection.Close(); + connection.Dispose(); + if (File.Exists(dbPath)) + { + try + { + File.Delete(dbPath); + } + catch + { + /* File may be locked */ + } + } } - - // Assert - Verify tables exist - using var verifyCmd = connection.CreateCommand(); - verifyCmd.CommandText = - "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name IN ('Users', 'Orders')"; - var tableCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); - Assert.Equal(2, tableCount); - - // Verify index exists - verifyCmd.CommandText = - "SELECT COUNT(*) FROM sqlite_master WHERE type='index' AND name='idx_users_email'"; - var indexCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); - Assert.Equal(1, indexCount); } [Fact] diff --git a/Migration/Migration.Tests/SqliteMigrationTests.cs b/Migration/Migration.Tests/SqliteMigrationTests.cs index 4bb550c..f3fe112 100644 --- a/Migration/Migration.Tests/SqliteMigrationTests.cs +++ b/Migration/Migration.Tests/SqliteMigrationTests.cs @@ -7,969 +7,1013 @@ public sealed class SqliteMigrationTests { private readonly ILogger _logger = NullLogger.Instance; + private static (SqliteConnection Connection, string DbPath) CreateTestDb() + { + var dbPath = Path.Combine(Path.GetTempPath(), $"sqlitemigration_{Guid.NewGuid()}.db"); + var connection = new SqliteConnection($"Data Source={dbPath}"); + connection.Open(); + return (connection, dbPath); + } + + private static void CleanupTestDb(SqliteConnection connection, string dbPath) + { + connection.Close(); + connection.Dispose(); + if (File.Exists(dbPath)) + { + try { File.Delete(dbPath); } + catch { /* File may be locked */ } + } + } + [Fact] public void CreateDatabaseFromScratch_SingleTable_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) + .Column("Name", PortableTypes.NVarChar(100)) + .Index("idx_users_email", "Email", unique: true) + ) + .Build(); + + // Act + var emptySchema = SqliteSchemaInspector.Inspect(connection, _logger); + Assert.True(emptySchema is SchemaResultOk); + + var operations = SchemaDiff.Calculate( + ((SchemaResultOk)emptySchema).Value, + schema, + logger: _logger + ); + Assert.True(operations is OperationsResultOk); + + var ops = ((OperationsResultOk)operations).Value; + + var result = MigrationRunner.Apply( + connection, + ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) - .Column("Name", PortableTypes.NVarChar(100)) - .Index("idx_users_email", "Email", unique: true) - ) - .Build(); - - // Act - var emptySchema = SqliteSchemaInspector.Inspect(connection, _logger); - Assert.True(emptySchema is SchemaResultOk); - - var operations = SchemaDiff.Calculate( - ((SchemaResultOk)emptySchema).Value, - schema, - logger: _logger - ); - Assert.True(operations is OperationsResultOk); - - var ops = ((OperationsResultOk)operations).Value; - - var result = MigrationRunner.Apply( - connection, - ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Verify table exists - var inspected = SqliteSchemaInspector.Inspect(connection, _logger); - Assert.True(inspected is SchemaResultOk); - var inspectedSchema = ((SchemaResultOk)inspected).Value; - Assert.Single(inspectedSchema.Tables); - Assert.Equal("Users", inspectedSchema.Tables[0].Name); - Assert.Equal(3, inspectedSchema.Tables[0].Columns.Count); + // Verify table exists + var inspected = SqliteSchemaInspector.Inspect(connection, _logger); + Assert.True(inspected is SchemaResultOk); + var inspectedSchema = ((SchemaResultOk)inspected).Value; + Assert.Single(inspectedSchema.Tables); + Assert.Equal("Users", inspectedSchema.Tables[0].Name); + Assert.Equal(3, inspectedSchema.Tables[0].Columns.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void CreateDatabaseFromScratch_MultipleTablesWithForeignKeys_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + // Enable foreign keys + using (var cmd = connection.CreateCommand()) + { + cmd.CommandText = "PRAGMA foreign_keys = ON"; + cmd.ExecuteNonQuery(); + } - // Enable foreign keys - using (var cmd = connection.CreateCommand()) + var schema = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) + ) + .Table( + "Orders", + t => + t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) + .Column("UserId", PortableTypes.Uuid, c => c.NotNull()) + .Column("Total", PortableTypes.Decimal(10, 2), c => c.NotNull()) + .Column( + "CreatedAt", + PortableTypes.DateTime(), + c => c.NotNull().Default("CURRENT_TIMESTAMP") + ) + .ForeignKey("UserId", "Users", "Id", ForeignKeyAction.Cascade) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + + Assert.Equal(2, inspected.Tables.Count); + Assert.Contains(inspected.Tables, t => t.Name == "Users"); + Assert.Contains(inspected.Tables, t => t.Name == "Orders"); + + var ordersTable = inspected.Tables.First(t => t.Name == "Orders"); + Assert.Single(ordersTable.ForeignKeys); + Assert.Equal("Users", ordersTable.ForeignKeys[0].ReferencedTable); + } + finally { - cmd.CommandText = "PRAGMA foreign_keys = ON"; - cmd.ExecuteNonQuery(); + CleanupTestDb(connection, dbPath); } - - var schema = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) - ) - .Table( - "Orders", - t => - t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) - .Column("UserId", PortableTypes.Uuid, c => c.NotNull()) - .Column("Total", PortableTypes.Decimal(10, 2), c => c.NotNull()) - .Column( - "CreatedAt", - PortableTypes.DateTime(), - c => c.NotNull().Default("CURRENT_TIMESTAMP") - ) - .ForeignKey("UserId", "Users", "Id", ForeignKeyAction.Cascade) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - - Assert.Equal(2, inspected.Tables.Count); - Assert.Contains(inspected.Tables, t => t.Name == "Users"); - Assert.Contains(inspected.Tables, t => t.Name == "Orders"); - - var ordersTable = inspected.Tables.First(t => t.Name == "Orders"); - Assert.Single(ordersTable.ForeignKeys); - Assert.Equal("Users", ordersTable.ForeignKeys[0].ReferencedTable); } [Fact] public void UpgradeExistingDatabase_AddColumn_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + // Create initial schema + var v1 = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255)) + ) + .Build(); + + // Apply v1 + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Define v2 with new columns + var v2 = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255)) + .Column("Name", PortableTypes.NVarChar(100)) + .Column("CreatedAt", PortableTypes.DateTime()) + ) + .Build(); + + // Act - upgrade to v2 + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var upgradeOps = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) + ).Value; - // Create initial schema - var v1 = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255)) - ) - .Build(); - - // Apply v1 - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Define v2 with new columns - var v2 = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255)) - .Column("Name", PortableTypes.NVarChar(100)) - .Column("CreatedAt", PortableTypes.DateTime()) - ) - .Build(); - - // Act - upgrade to v2 - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var upgradeOps = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) - ).Value; - - // Should have 2 AddColumn operations - Assert.Equal(2, upgradeOps.Count); - Assert.All(upgradeOps, op => Assert.IsType(op)); - - var result = MigrationRunner.Apply( - connection, - upgradeOps, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var finalSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var users = finalSchema.Tables.Single(t => t.Name == "Users"); - Assert.Equal(4, users.Columns.Count); - Assert.Contains(users.Columns, c => c.Name == "Name"); - Assert.Contains(users.Columns, c => c.Name == "CreatedAt"); + // Should have 2 AddColumn operations + Assert.Equal(2, upgradeOps.Count); + Assert.All(upgradeOps, op => Assert.IsType(op)); + + var result = MigrationRunner.Apply( + connection, + upgradeOps, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); + + var finalSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var users = finalSchema.Tables.Single(t => t.Name == "Users"); + Assert.Equal(4, users.Columns.Count); + Assert.Contains(users.Columns, c => c.Name == "Name"); + Assert.Contains(users.Columns, c => c.Name == "CreatedAt"); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void UpgradeExistingDatabase_AddTable_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var v1 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); - var v1 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); - - // Apply v1 - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // v2 adds a new table - var v2 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Table( - "Products", - t => - t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) - .Column("Name", PortableTypes.NVarChar(200), c => c.NotNull()) - .Column("Price", PortableTypes.Decimal(10, 2)) - ) - .Build(); - - // Act - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var upgradeOps = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) - ).Value; - - // Should have 1 CreateTable operation - Assert.Single(upgradeOps); - Assert.IsType(upgradeOps[0]); - - var result = MigrationRunner.Apply( - connection, - upgradeOps, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var finalSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - Assert.Equal(2, finalSchema.Tables.Count); - Assert.Contains(finalSchema.Tables, t => t.Name == "Products"); - } + // Apply v1 + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - [Fact] - public void UpgradeExistingDatabase_AddIndex_Success() - { - // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + // v2 adds a new table + var v2 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Table( + "Products", + t => + t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey().Identity()) + .Column("Name", PortableTypes.NVarChar(200), c => c.NotNull()) + .Column("Price", PortableTypes.Decimal(10, 2)) + ) + .Build(); + + // Act + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var upgradeOps = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) + ).Value; + + // Should have 1 CreateTable operation + Assert.Single(upgradeOps); + Assert.IsType(upgradeOps[0]); + + var result = MigrationRunner.Apply( + connection, + upgradeOps, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var v1 = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255)) - ) - .Build(); - - // Apply v1 - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // v2 adds an index - var v2 = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255)) - .Index("idx_users_email", "Email", unique: true) - ) - .Build(); - - // Act - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var upgradeOps = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) - ).Value; - - Assert.Single(upgradeOps); - Assert.IsType(upgradeOps[0]); - - var result = MigrationRunner.Apply( - connection, - upgradeOps, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var finalSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var users = finalSchema.Tables.Single(t => t.Name == "Users"); - Assert.Single(users.Indexes); - Assert.Equal("idx_users_email", users.Indexes[0].Name); - Assert.True(users.Indexes[0].IsUnique); + var finalSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + Assert.Equal(2, finalSchema.Tables.Count); + Assert.Contains(finalSchema.Tables, t => t.Name == "Products"); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] - public void Migration_IsIdempotent_NoErrorOnRerun() + public void UpgradeExistingDatabase_AddIndex_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Items", - t => - t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Name", PortableTypes.NVarChar(50)) - .Index("idx_items_name", "Name") - ) - .Build(); - - // Act - Run migration twice - for (var i = 0; i < 2; i++) + var (connection, dbPath) = CreateTestDb(); + try { + var v1 = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255)) + ) + .Build(); + + // Apply v1 + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // v2 adds an index + var v2 = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255)) + .Index("idx_users_email", "Email", unique: true) + ) + .Build(); + + // Act var currentSchema = ( (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, schema, logger: _logger) + var upgradeOps = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, v2, logger: _logger) ).Value; + Assert.Single(upgradeOps); + Assert.IsType(upgradeOps[0]); + var result = MigrationRunner.Apply( connection, - operations, + upgradeOps, SqliteDdlGenerator.Generate, MigrationOptions.Default, _logger ); + // Assert Assert.True(result is MigrationApplyResultOk); - // Second run should have 0 operations - if (i == 1) + var finalSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var users = finalSchema.Tables.Single(t => t.Name == "Users"); + Assert.Single(users.Indexes); + Assert.Equal("idx_users_email", users.Indexes[0].Name); + Assert.True(users.Indexes[0].IsUnique); + } + finally + { + CleanupTestDb(connection, dbPath); + } + } + + [Fact] + public void Migration_IsIdempotent_NoErrorOnRerun() + { + // Arrange + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Items", + t => + t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("Name", PortableTypes.NVarChar(50)) + .Index("idx_items_name", "Name") + ) + .Build(); + + // Act - Run migration twice + for (var i = 0; i < 2; i++) { - Assert.Empty(operations); + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, schema, logger: _logger) + ).Value; + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + Assert.True(result is MigrationApplyResultOk); + + // Second run should have 0 operations + if (i == 1) + { + Assert.Empty(operations); + } } } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void CreateTable_AllPortableTypes_Success() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "TypeTest", + t => + t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey()) + .Column("TinyInt", PortableTypes.TinyInt) + .Column("SmallInt", PortableTypes.SmallInt) + .Column("Int", PortableTypes.Int) + .Column("BigInt", PortableTypes.BigInt) + .Column("Decimal", PortableTypes.Decimal(18, 2)) + .Column("Float", PortableTypes.Float) + .Column("Double", PortableTypes.Double) + .Column("Money", PortableTypes.Money) + .Column("Bool", PortableTypes.Boolean) + .Column("Char", PortableTypes.Char(10)) + .Column("VarChar", PortableTypes.VarChar(50)) + .Column("NChar", PortableTypes.NChar(10)) + .Column("NVarChar", PortableTypes.NVarChar(100)) + .Column("Text", PortableTypes.Text) + .Column("Binary", PortableTypes.Binary(16)) + .Column("VarBinary", PortableTypes.VarBinary(256)) + .Column("Blob", PortableTypes.Blob) + .Column("Date", PortableTypes.Date) + .Column("Time", PortableTypes.Time()) + .Column("DateTime", PortableTypes.DateTime()) + .Column("DateTimeOffset", PortableTypes.DateTimeOffset) + .Column("Uuid", PortableTypes.Uuid) + .Column("Json", PortableTypes.Json) + .Column("Xml", PortableTypes.Xml) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; - var schema = Schema - .Define("Test") - .Table( - "TypeTest", - t => - t.Column("Id", PortableTypes.BigInt, c => c.PrimaryKey()) - .Column("TinyInt", PortableTypes.TinyInt) - .Column("SmallInt", PortableTypes.SmallInt) - .Column("Int", PortableTypes.Int) - .Column("BigInt", PortableTypes.BigInt) - .Column("Decimal", PortableTypes.Decimal(18, 2)) - .Column("Float", PortableTypes.Float) - .Column("Double", PortableTypes.Double) - .Column("Money", PortableTypes.Money) - .Column("Bool", PortableTypes.Boolean) - .Column("Char", PortableTypes.Char(10)) - .Column("VarChar", PortableTypes.VarChar(50)) - .Column("NChar", PortableTypes.NChar(10)) - .Column("NVarChar", PortableTypes.NVarChar(100)) - .Column("Text", PortableTypes.Text) - .Column("Binary", PortableTypes.Binary(16)) - .Column("VarBinary", PortableTypes.VarBinary(256)) - .Column("Blob", PortableTypes.Blob) - .Column("Date", PortableTypes.Date) - .Column("Time", PortableTypes.Time()) - .Column("DateTime", PortableTypes.DateTime()) - .Column("DateTimeOffset", PortableTypes.DateTimeOffset) - .Column("Uuid", PortableTypes.Uuid) - .Column("Json", PortableTypes.Json) - .Column("Xml", PortableTypes.Xml) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - var table = inspected.Tables.Single(); - Assert.Equal(25, table.Columns.Count); + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); + + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var table = inspected.Tables.Single(); + Assert.Equal(25, table.Columns.Count); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Destructive_DropTable_BlockedByDefault() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + // Create initial schema with 2 tables + var v1 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Table("Products", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); + + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // v2 removes Products table + var v2 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); - // Create initial schema with 2 tables - var v1 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Table("Products", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); - - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // v2 removes Products table - var v2 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); - - // Act - Calculate diff WITHOUT AllowDestructive - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var operations = ( - (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) - ).Value; - - // Assert - No drop operations should be generated - Assert.Empty(operations); + // Act - Calculate diff WITHOUT AllowDestructive + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk) + SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) + ).Value; + + // Assert - No drop operations should be generated + Assert.Empty(operations); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void Destructive_DropTable_AllowedWithOption() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var v1 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Table("Products", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); - var v1 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Table("Products", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); - - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - var v2 = Schema - .Define("Test") - .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) - .Build(); - - // Act - Calculate diff WITH AllowDestructive - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - - var operations = ( - (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) - ).Value; - - // Should have DropTableOperation - Assert.Single(operations); - Assert.IsType(operations[0]); - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Destructive, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - var finalSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - Assert.Single(finalSchema.Tables); - Assert.DoesNotContain(finalSchema.Tables, t => t.Name == "Products"); + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + var v2 = Schema + .Define("Test") + .Table("Users", t => t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey())) + .Build(); + + // Act - Calculate diff WITH AllowDestructive + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk) + SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) + ).Value; + + // Should have DropTableOperation + Assert.Single(operations); + Assert.IsType(operations[0]); + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Destructive, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); + + var finalSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + Assert.Single(finalSchema.Tables); + Assert.DoesNotContain(finalSchema.Tables, t => t.Name == "Products"); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void SchemaInspector_RoundTrip_Matches() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Users", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) + .Column("Active", PortableTypes.Boolean, c => c.Default("1")) + .Index("idx_users_email", "Email", unique: true) + ) + .Build(); + + // Create schema + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - var schema = Schema - .Define("Test") - .Table( - "Users", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Email", PortableTypes.NVarChar(255), c => c.NotNull()) - .Column("Active", PortableTypes.Boolean, c => c.Default("1")) - .Index("idx_users_email", "Email", unique: true) - ) - .Build(); - - // Create schema - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Act - Inspect and compare - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; - - // Calculate diff between original and inspected - should be empty - var diff = ( - (OperationsResultOk)SchemaDiff.Calculate(inspected, schema, logger: _logger) - ).Value; - - // Assert - Assert.Empty(diff); + // Act - Inspect and compare + var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + + // Calculate diff between original and inspected - should be empty + var diff = ( + (OperationsResultOk)SchemaDiff.Calculate(inspected, schema, logger: _logger) + ).Value; + + // Assert + Assert.Empty(diff); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void DestructiveOperation_BlockedByDefault_ReturnsUsefulError() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + // Create table first + using var cmd = connection.CreateCommand(); + cmd.CommandText = "CREATE TABLE ToBeDropped (Id INTEGER PRIMARY KEY)"; + cmd.ExecuteNonQuery(); - // Create table first - using var cmd = connection.CreateCommand(); - cmd.CommandText = "CREATE TABLE ToBeDropped (Id INTEGER PRIMARY KEY)"; - cmd.ExecuteNonQuery(); - - var dropOperation = new DropTableOperation("main", "ToBeDropped"); - - // Act - try to apply destructive operation with default options - var result = MigrationRunner.Apply( - connection, - [dropOperation], - SqliteDdlGenerator.Generate, - MigrationOptions.Default, // AllowDestructive = false - _logger - ); - - // Assert - should fail with useful error message - Assert.True(result is MigrationApplyResultError); - var error = ((MigrationApplyResultError)result).Value; - Assert.Contains("Destructive", error.Message); - Assert.Contains("DropTableOperation", error.Message); + var dropOperation = new DropTableOperation("main", "ToBeDropped"); + + // Act - try to apply destructive operation with default options + var result = MigrationRunner.Apply( + connection, + [dropOperation], + SqliteDdlGenerator.Generate, + MigrationOptions.Default, // AllowDestructive = false + _logger + ); + + // Assert - should fail with useful error message + Assert.True(result is MigrationApplyResultError); + var error = ((MigrationApplyResultError)result).Value; + Assert.Contains("Destructive", error.Message); + Assert.Contains("DropTableOperation", error.Message); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void InvalidSql_ReturnsUsefulError() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + // Create a custom operation that generates invalid SQL + var badTable = new TableDefinition + { + Schema = "main", + Name = "Bad\"Table", // Invalid table name with quote + Columns = [new ColumnDefinition { Name = "Id", Type = PortableTypes.Int }], + }; + + var createOp = new CreateTableOperation(badTable); + + // Act + var result = MigrationRunner.Apply( + connection, + [createOp], + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - // Create a custom operation that generates invalid SQL - var badTable = new TableDefinition + // Assert - should fail (invalid SQL) but not crash + // Note: SQLite may accept this - adjust test if needed + Assert.NotNull(result); + } + finally { - Schema = "main", - Name = "Bad\"Table", // Invalid table name with quote - Columns = [new ColumnDefinition { Name = "Id", Type = PortableTypes.Int }], - }; - - var createOp = new CreateTableOperation(badTable); - - // Act - var result = MigrationRunner.Apply( - connection, - [createOp], - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - should fail (invalid SQL) but not crash - // Note: SQLite may accept this - adjust test if needed - Assert.NotNull(result); + CleanupTestDb(connection, dbPath); + } } [Fact] public void SchemaCapture_ExistingDatabase_ReturnsCompleteSchema() { // Arrange - Create database with raw SQL (simulate existing DB) - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + using var cmd = connection.CreateCommand(); + cmd.CommandText = """ + CREATE TABLE customers ( + id TEXT PRIMARY KEY, + email TEXT NOT NULL, + name TEXT, + created_at TEXT DEFAULT CURRENT_TIMESTAMP + ); + CREATE UNIQUE INDEX idx_customers_email ON customers(email); + CREATE TABLE orders ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + customer_id TEXT NOT NULL, + total REAL, + FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE + ); + CREATE INDEX idx_orders_customer ON orders(customer_id); + """; + cmd.ExecuteNonQuery(); - using var cmd = connection.CreateCommand(); - cmd.CommandText = """ - CREATE TABLE customers ( - id TEXT PRIMARY KEY, - email TEXT NOT NULL, - name TEXT, - created_at TEXT DEFAULT CURRENT_TIMESTAMP - ); - CREATE UNIQUE INDEX idx_customers_email ON customers(email); - CREATE TABLE orders ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - customer_id TEXT NOT NULL, - total REAL, - FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE CASCADE - ); - CREATE INDEX idx_orders_customer ON orders(customer_id); - """; - cmd.ExecuteNonQuery(); - - // Act - CAPTURE the existing schema - var captureResult = SqliteSchemaInspector.Inspect(connection, _logger); - - // Assert - schema captured successfully - Assert.True(captureResult is SchemaResultOk); - var schema = ((SchemaResultOk)captureResult).Value; - - // Verify tables captured - Assert.Equal(2, schema.Tables.Count); - - var customers = schema.Tables.Single(t => t.Name == "customers"); - Assert.Equal(4, customers.Columns.Count); - Assert.Contains(customers.Columns, c => c.Name == "id"); - Assert.Contains(customers.Columns, c => c.Name == "email"); - Assert.Single(customers.Indexes); - Assert.Equal("idx_customers_email", customers.Indexes[0].Name); - Assert.True(customers.Indexes[0].IsUnique); - - var orders = schema.Tables.Single(t => t.Name == "orders"); - Assert.Equal(3, orders.Columns.Count); - Assert.Single(orders.ForeignKeys); - Assert.Equal("customers", orders.ForeignKeys[0].ReferencedTable); - Assert.Equal(ForeignKeyAction.Cascade, orders.ForeignKeys[0].OnDelete); - Assert.Single(orders.Indexes); - Assert.Equal("idx_orders_customer", orders.Indexes[0].Name); + // Act - CAPTURE the existing schema + var captureResult = SqliteSchemaInspector.Inspect(connection, _logger); + + // Assert - schema captured successfully + Assert.True(captureResult is SchemaResultOk); + var schema = ((SchemaResultOk)captureResult).Value; + + // Verify tables captured + Assert.Equal(2, schema.Tables.Count); + + var customers = schema.Tables.Single(t => t.Name == "customers"); + Assert.Equal(4, customers.Columns.Count); + Assert.Contains(customers.Columns, c => c.Name == "id"); + Assert.Contains(customers.Columns, c => c.Name == "email"); + Assert.Single(customers.Indexes); + Assert.Equal("idx_customers_email", customers.Indexes[0].Name); + Assert.True(customers.Indexes[0].IsUnique); + + var orders = schema.Tables.Single(t => t.Name == "orders"); + Assert.Equal(3, orders.Columns.Count); + Assert.Single(orders.ForeignKeys); + Assert.Equal("customers", orders.ForeignKeys[0].ReferencedTable); + Assert.Equal(ForeignKeyAction.Cascade, orders.ForeignKeys[0].OnDelete); + Assert.Single(orders.Indexes); + Assert.Equal("idx_orders_customer", orders.Indexes[0].Name); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void SchemaCapture_SerializesToJson_RoundTrip() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + using var cmd = connection.CreateCommand(); + cmd.CommandText = """ + CREATE TABLE products ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + price REAL, + active INTEGER DEFAULT 1 + ); + CREATE INDEX idx_products_name ON products(name); + """; + cmd.ExecuteNonQuery(); - using var cmd = connection.CreateCommand(); - cmd.CommandText = """ - CREATE TABLE products ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL, - price REAL, - active INTEGER DEFAULT 1 + // Act - Capture and serialize to JSON + var captureResult = SqliteSchemaInspector.Inspect(connection, _logger); + Assert.True(captureResult is SchemaResultOk); + var schema = ((SchemaResultOk)captureResult).Value; + + var json = SchemaSerializer.ToJson(schema); + + // Assert - JSON is valid and contains expected data + Assert.NotNull(json); + Assert.Contains("products", json); + Assert.Contains("name", json); + Assert.Contains("idx_products_name", json); + + // Deserialize and verify round-trip + var restored = SchemaSerializer.FromJson(json); + Assert.NotNull(restored); + Assert.Single(restored.Tables); + Assert.Equal("products", restored.Tables[0].Name); + Assert.Equal(4, restored.Tables[0].Columns.Count); + Assert.Single(restored.Tables[0].Indexes); + } + finally + { + CleanupTestDb(connection, dbPath); + } + } + + // ============================================================================= + // Expression Index Tests + // ============================================================================= + + [Fact] + public void ExpressionIndex_CreateWithLowerFunction_Success() + { + // Arrange - Create table with expression index for case-insensitive uniqueness + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Artists", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .ExpressionIndex("uq_artists_name", "lower(Name)", unique: true) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger ); - CREATE INDEX idx_products_name ON products(name); - """; - cmd.ExecuteNonQuery(); - - // Act - Capture and serialize to JSON - var captureResult = SqliteSchemaInspector.Inspect(connection, _logger); - Assert.True(captureResult is SchemaResultOk); - var schema = ((SchemaResultOk)captureResult).Value; - - var json = SchemaSerializer.ToJson(schema); - - // Assert - JSON is valid and contains expected data - Assert.NotNull(json); - Assert.Contains("products", json); - Assert.Contains("name", json); - Assert.Contains("idx_products_name", json); - - // Deserialize and verify round-trip - var restored = SchemaSerializer.FromJson(json); - Assert.NotNull(restored); - Assert.Single(restored.Tables); - Assert.Equal("products", restored.Tables[0].Name); - Assert.Equal(4, restored.Tables[0].Columns.Count); - Assert.Single(restored.Tables[0].Indexes); - } - // ============================================================================= - // Expression Index Tests - // ============================================================================= + // Assert - Migration succeeded + Assert.True( + result is MigrationApplyResultOk, + $"Migration failed: {(result as MigrationApplyResultError)?.Value}" + ); - [Fact] - public void ExpressionIndex_CreateWithLowerFunction_Success() - { - // Arrange - Create table with expression index for case-insensitive uniqueness - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + // Verify index exists + using var cmd = connection.CreateCommand(); + cmd.CommandText = + "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_artists_name'"; + var indexDef = cmd.ExecuteScalar() as string; - var schema = Schema - .Define("Test") - .Table( - "Artists", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .ExpressionIndex("uq_artists_name", "lower(Name)", unique: true) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Migration succeeded - Assert.True( - result is MigrationApplyResultOk, - $"Migration failed: {(result as MigrationApplyResultError)?.Value}" - ); - - // Verify index exists - using var cmd = connection.CreateCommand(); - cmd.CommandText = - "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_artists_name'"; - var indexDef = cmd.ExecuteScalar() as string; - - Assert.NotNull(indexDef); - Assert.Contains("UNIQUE", indexDef); - Assert.Contains("lower", indexDef); + Assert.NotNull(indexDef); + Assert.Contains("UNIQUE", indexDef); + Assert.Contains("lower", indexDef); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void ExpressionIndex_EnforcesCaseInsensitiveUniqueness() { // Arrange - Create table with expression index - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Venues", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .ExpressionIndex("uq_venues_name", "lower(Name)", unique: true) + ) + .Build(); + + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - var schema = Schema - .Define("Test") - .Table( - "Venues", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .ExpressionIndex("uq_venues_name", "lower(Name)", unique: true) - ) - .Build(); - - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Act - Insert first venue - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = - "INSERT INTO Venues (Id, Name) VALUES ('11111111-1111-1111-1111-111111111111', 'The Corner Hotel')"; - insertCmd.ExecuteNonQuery(); - - // Try to insert duplicate with different case - should fail - using var duplicateCmd = connection.CreateCommand(); - duplicateCmd.CommandText = - "INSERT INTO Venues (Id, Name) VALUES ('22222222-2222-2222-2222-222222222222', 'THE CORNER HOTEL')"; - - // Assert - Should throw unique constraint violation - var ex = Assert.Throws(() => duplicateCmd.ExecuteNonQuery()); - Assert.Contains("UNIQUE", ex.Message); - } + // Act - Insert first venue + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = + "INSERT INTO Venues (Id, Name) VALUES ('11111111-1111-1111-1111-111111111111', 'The Corner Hotel')"; + insertCmd.ExecuteNonQuery(); - [Fact] - public void ExpressionIndex_MultiExpression_CompositeIndexSuccess() - { - // Arrange - Create table with multi-expression index - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + // Try to insert duplicate with different case - should fail + using var duplicateCmd = connection.CreateCommand(); + duplicateCmd.CommandText = + "INSERT INTO Venues (Id, Name) VALUES ('22222222-2222-2222-2222-222222222222', 'THE CORNER HOTEL')"; - var schema = Schema - .Define("Test") - .Table( - "Suburbs", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) - ) - .Table( - "Places", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .Column("SuburbId", PortableTypes.Uuid, c => c.NotNull()) - .ExpressionIndex( - "uq_places_name_suburb", - ["lower(Name)", "SuburbId"], - unique: true - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True( - result is MigrationApplyResultOk, - $"Migration failed: {(result as MigrationApplyResultError)?.Value}" - ); - - // Verify composite expression index exists - using var cmd = connection.CreateCommand(); - cmd.CommandText = - "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_places_name_suburb'"; - var indexDef = cmd.ExecuteScalar() as string; - - Assert.NotNull(indexDef); - Assert.Contains("UNIQUE", indexDef); - Assert.Contains("lower", indexDef); - Assert.Contains("SuburbId", indexDef); + // Assert - Should throw unique constraint violation + var ex = Assert.Throws(() => duplicateCmd.ExecuteNonQuery()); + Assert.Contains("UNIQUE", ex.Message); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] - public void ExpressionIndex_Idempotent_NoErrorOnRerun() + public void ExpressionIndex_MultiExpression_CompositeIndexSuccess() { - // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "Bands", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .ExpressionIndex("uq_bands_name", "lower(Name)", unique: true) - ) - .Build(); - - // Act - Run migration twice - for (var i = 0; i < 2; i++) + // Arrange - Create table with multi-expression index + var (connection, dbPath) = CreateTestDb(); + try { - var currentSchema = ( + var schema = Schema + .Define("Test") + .Table( + "Suburbs", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(100), c => c.NotNull()) + ) + .Table( + "Places", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .Column("SuburbId", PortableTypes.Uuid, c => c.NotNull()) + .ExpressionIndex( + "uq_places_name_suburb", + ["lower(Name)", "SuburbId"], + unique: true + ) + ) + .Build(); + + // Act + var emptySchema = ( (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(currentSchema, schema, logger: _logger) + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) ).Value; var result = MigrationRunner.Apply( @@ -980,50 +1024,120 @@ public void ExpressionIndex_Idempotent_NoErrorOnRerun() _logger ); + // Assert Assert.True( result is MigrationApplyResultOk, - $"Migration {i + 1} failed: {(result as MigrationApplyResultError)?.Value}" + $"Migration failed: {(result as MigrationApplyResultError)?.Value}" ); - // Second run should have 0 operations (schema already matches) - if (i == 1) + // Verify composite expression index exists + using var cmd = connection.CreateCommand(); + cmd.CommandText = + "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_places_name_suburb'"; + var indexDef = cmd.ExecuteScalar() as string; + + Assert.NotNull(indexDef); + Assert.Contains("UNIQUE", indexDef); + Assert.Contains("lower", indexDef); + Assert.Contains("SuburbId", indexDef); + } + finally + { + CleanupTestDb(connection, dbPath); + } + } + + [Fact] + public void ExpressionIndex_Idempotent_NoErrorOnRerun() + { + // Arrange + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "Bands", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .ExpressionIndex("uq_bands_name", "lower(Name)", unique: true) + ) + .Build(); + + // Act - Run migration twice + for (var i = 0; i < 2; i++) { - Assert.Empty(operations); + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(currentSchema, schema, logger: _logger) + ).Value; + + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + Assert.True( + result is MigrationApplyResultOk, + $"Migration {i + 1} failed: {(result as MigrationApplyResultError)?.Value}" + ); + + // Second run should have 0 operations (schema already matches) + if (i == 1) + { + Assert.Empty(operations); + } } } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void ExpressionIndex_SchemaInspector_DetectsExpressionIndex() { // Arrange - Create expression index via raw SQL - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + using var cmd = connection.CreateCommand(); + cmd.CommandText = """ + CREATE TABLE artists ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL + ); + CREATE UNIQUE INDEX uq_artists_name ON artists(lower(name)); + """; + cmd.ExecuteNonQuery(); - using var cmd = connection.CreateCommand(); - cmd.CommandText = """ - CREATE TABLE artists ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL - ); - CREATE UNIQUE INDEX uq_artists_name ON artists(lower(name)); - """; - cmd.ExecuteNonQuery(); - - // Act - Inspect schema - var result = SqliteSchemaInspector.Inspect(connection, _logger); - - // Assert - Assert.True(result is SchemaResultOk); - var schema = ((SchemaResultOk)result).Value; - var artists = schema.Tables.Single(t => t.Name == "artists"); - - Assert.Single(artists.Indexes); - var index = artists.Indexes[0]; - Assert.Equal("uq_artists_name", index.Name); - Assert.True(index.IsUnique); - Assert.NotEmpty(index.Expressions); - Assert.Contains("lower(name)", index.Expressions); + // Act - Inspect schema + var result = SqliteSchemaInspector.Inspect(connection, _logger); + + // Assert + Assert.True(result is SchemaResultOk); + var schema = ((SchemaResultOk)result).Value; + var artists = schema.Tables.Single(t => t.Name == "artists"); + + Assert.Single(artists.Indexes); + var index = artists.Indexes[0]; + Assert.Equal("uq_artists_name", index.Name); + Assert.True(index.IsUnique); + Assert.NotEmpty(index.Expressions); + Assert.Contains("lower(name)", index.Expressions); + } + finally + { + CleanupTestDb(connection, dbPath); + } } // ============================================================================= @@ -1034,157 +1148,167 @@ name TEXT NOT NULL public void UpgradeIndex_ColumnToExpression_RequiresDropAndCreate() { // Arrange - Create table with regular column index - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var v1 = Schema + .Define("Test") + .Table( + "Artists", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .Index("idx_artists_name", "Name", unique: true) + ) + .Build(); + + // Apply v1 + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - var v1 = Schema - .Define("Test") - .Table( - "Artists", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .Index("idx_artists_name", "Name", unique: true) - ) - .Build(); - - // Apply v1 - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // v2 changes to expression index (different name since it's semantically different) - var v2 = Schema - .Define("Test") - .Table( - "Artists", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .ExpressionIndex("uq_artists_name_ci", "lower(Name)", unique: true) - ) - .Build(); - - // Act - Calculate upgrade operations - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var upgradeOps = ( - (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) - ).Value; - - // Assert - Should have drop old index + create new expression index - Assert.Equal(2, upgradeOps.Count); - Assert.Contains(upgradeOps, op => op is DropIndexOperation); - Assert.Contains(upgradeOps, op => op is CreateIndexOperation); - - // Apply the upgrade - var result = MigrationRunner.Apply( - connection, - upgradeOps, - SqliteDdlGenerator.Generate, - MigrationOptions.Destructive, - _logger - ); - - Assert.True(result is MigrationApplyResultOk); - - // Verify new expression index exists - using var cmd = connection.CreateCommand(); - cmd.CommandText = - "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_artists_name_ci'"; - var indexDef = cmd.ExecuteScalar() as string; - Assert.NotNull(indexDef); - Assert.Contains("lower", indexDef); + // v2 changes to expression index (different name since it's semantically different) + var v2 = Schema + .Define("Test") + .Table( + "Artists", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .ExpressionIndex("uq_artists_name_ci", "lower(Name)", unique: true) + ) + .Build(); + + // Act - Calculate upgrade operations + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var upgradeOps = ( + (OperationsResultOk) + SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) + ).Value; + + // Assert - Should have drop old index + create new expression index + Assert.Equal(2, upgradeOps.Count); + Assert.Contains(upgradeOps, op => op is DropIndexOperation); + Assert.Contains(upgradeOps, op => op is CreateIndexOperation); + + // Apply the upgrade + var result = MigrationRunner.Apply( + connection, + upgradeOps, + SqliteDdlGenerator.Generate, + MigrationOptions.Destructive, + _logger + ); + + Assert.True(result is MigrationApplyResultOk); + + // Verify new expression index exists + using var cmd = connection.CreateCommand(); + cmd.CommandText = + "SELECT sql FROM sqlite_master WHERE type='index' AND name='uq_artists_name_ci'"; + var indexDef = cmd.ExecuteScalar() as string; + Assert.NotNull(indexDef); + Assert.Contains("lower", indexDef); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void UpgradeIndex_ExpressionToColumn_RequiresDropAndCreate() { // Arrange - Create table with expression index - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var v1 = Schema + .Define("Test") + .Table( + "Venues", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .ExpressionIndex("uq_venues_name", "lower(Name)", unique: true) + ) + .Build(); + + // Apply v1 + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var v1Ops = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) + ).Value; + _ = MigrationRunner.Apply( + connection, + v1Ops, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // v2 changes back to simple column index (different name) + var v2 = Schema + .Define("Test") + .Table( + "Venues", + t => + t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) + .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) + .Index("idx_venues_name", "Name", unique: true) + ) + .Build(); + + // Act + var currentSchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var upgradeOps = ( + (OperationsResultOk) + SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) + ).Value; - var v1 = Schema - .Define("Test") - .Table( - "Venues", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .ExpressionIndex("uq_venues_name", "lower(Name)", unique: true) - ) - .Build(); - - // Apply v1 - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var v1Ops = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, v1, logger: _logger) - ).Value; - _ = MigrationRunner.Apply( - connection, - v1Ops, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // v2 changes back to simple column index (different name) - var v2 = Schema - .Define("Test") - .Table( - "Venues", - t => - t.Column("Id", PortableTypes.Uuid, c => c.PrimaryKey()) - .Column("Name", PortableTypes.VarChar(200), c => c.NotNull()) - .Index("idx_venues_name", "Name", unique: true) - ) - .Build(); - - // Act - var currentSchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var upgradeOps = ( - (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: true, logger: _logger) - ).Value; - - // Assert - Should have drop + create - Assert.Equal(2, upgradeOps.Count); - Assert.Contains(upgradeOps, op => op is DropIndexOperation); - Assert.Contains(upgradeOps, op => op is CreateIndexOperation); - - var result = MigrationRunner.Apply( - connection, - upgradeOps, - SqliteDdlGenerator.Generate, - MigrationOptions.Destructive, - _logger - ); - - Assert.True(result is MigrationApplyResultOk); - - // Verify new column index exists (no lower() function) - using var cmd = connection.CreateCommand(); - cmd.CommandText = - "SELECT sql FROM sqlite_master WHERE type='index' AND name='idx_venues_name'"; - var indexDef = cmd.ExecuteScalar() as string; - Assert.NotNull(indexDef); - Assert.DoesNotContain("lower", indexDef); + // Assert - Should have drop + create + Assert.Equal(2, upgradeOps.Count); + Assert.Contains(upgradeOps, op => op is DropIndexOperation); + Assert.Contains(upgradeOps, op => op is CreateIndexOperation); + + var result = MigrationRunner.Apply( + connection, + upgradeOps, + SqliteDdlGenerator.Generate, + MigrationOptions.Destructive, + _logger + ); + + Assert.True(result is MigrationApplyResultOk); + + // Verify new column index exists (no lower() function) + using var cmd = connection.CreateCommand(); + cmd.CommandText = + "SELECT sql FROM sqlite_master WHERE type='index' AND name='idx_venues_name'"; + var indexDef = cmd.ExecuteScalar() as string; + Assert.NotNull(indexDef); + Assert.DoesNotContain("lower", indexDef); + } + finally + { + CleanupTestDb(connection, dbPath); + } } // ============================================================================= @@ -1195,419 +1319,454 @@ public void UpgradeIndex_ExpressionToColumn_RequiresDropAndCreate() public void LqlDefault_NowFunction_TranslatesToCurrentTimestamp() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "events", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column( + "created_at", + PortableTypes.DateTime(), + c => c.NotNull().DefaultLql("now()") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "events", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column( - "created_at", - PortableTypes.DateTime(), - c => c.NotNull().DefaultLql("now()") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Verify table DDL contains datetime('now') - the SQLite translation of now() - using var cmd = connection.CreateCommand(); - cmd.CommandText = "SELECT sql FROM sqlite_master WHERE type='table' AND name='events'"; - var tableDef = cmd.ExecuteScalar() as string; - Assert.NotNull(tableDef); - Assert.Contains("(datetime('now'))", tableDef); - - // Insert and verify default works - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO events (id) VALUES (1)"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT created_at FROM events WHERE id = 1"; - var createdAt = selectCmd.ExecuteScalar() as string; - Assert.NotNull(createdAt); - Assert.NotEmpty(createdAt); + // Verify table DDL contains datetime('now') - the SQLite translation of now() + using var cmd = connection.CreateCommand(); + cmd.CommandText = "SELECT sql FROM sqlite_master WHERE type='table' AND name='events'"; + var tableDef = cmd.ExecuteScalar() as string; + Assert.NotNull(tableDef); + Assert.Contains("(datetime('now'))", tableDef); + + // Insert and verify default works + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO events (id) VALUES (1)"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT created_at FROM events WHERE id = 1"; + var createdAt = selectCmd.ExecuteScalar() as string; + Assert.NotNull(createdAt); + Assert.NotEmpty(createdAt); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_BooleanTrue_TranslatesTo1() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "flags", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column( + "is_active", + PortableTypes.Boolean, + c => c.NotNull().DefaultLql("true") + ) + .Column( + "is_deleted", + PortableTypes.Boolean, + c => c.NotNull().DefaultLql("false") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "flags", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column( - "is_active", - PortableTypes.Boolean, - c => c.NotNull().DefaultLql("true") - ) - .Column( - "is_deleted", - PortableTypes.Boolean, - c => c.NotNull().DefaultLql("false") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Verify table DDL contains 1 and 0 for boolean defaults - using var cmd = connection.CreateCommand(); - cmd.CommandText = "SELECT sql FROM sqlite_master WHERE type='table' AND name='flags'"; - var tableDef = cmd.ExecuteScalar() as string; - Assert.NotNull(tableDef); - Assert.Contains("DEFAULT 1", tableDef); // true -> 1 - Assert.Contains("DEFAULT 0", tableDef); // false -> 0 - - // Insert and verify defaults work - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO flags (id) VALUES (1)"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT is_active, is_deleted FROM flags WHERE id = 1"; - using var reader = selectCmd.ExecuteReader(); - Assert.True(reader.Read()); - Assert.Equal(1, reader.GetInt64(0)); // is_active = true = 1 - Assert.Equal(0, reader.GetInt64(1)); // is_deleted = false = 0 + // Verify table DDL contains 1 and 0 for boolean defaults + using var cmd = connection.CreateCommand(); + cmd.CommandText = "SELECT sql FROM sqlite_master WHERE type='table' AND name='flags'"; + var tableDef = cmd.ExecuteScalar() as string; + Assert.NotNull(tableDef); + Assert.Contains("DEFAULT 1", tableDef); // true -> 1 + Assert.Contains("DEFAULT 0", tableDef); // false -> 0 + + // Insert and verify defaults work + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO flags (id) VALUES (1)"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT is_active, is_deleted FROM flags WHERE id = 1"; + using var reader = selectCmd.ExecuteReader(); + Assert.True(reader.Read()); + Assert.Equal(1, reader.GetInt64(0)); // is_active = true = 1 + Assert.Equal(0, reader.GetInt64(1)); // is_deleted = false = 0 + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_NumericValues_PassThrough() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "counters", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column("count", PortableTypes.Int, c => c.NotNull().DefaultLql("0")) + .Column("priority", PortableTypes.Int, c => c.NotNull().DefaultLql("100")) + .Column( + "rate", + PortableTypes.Decimal(5, 2), + c => c.NotNull().DefaultLql("1.5") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "counters", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("count", PortableTypes.Int, c => c.NotNull().DefaultLql("0")) - .Column("priority", PortableTypes.Int, c => c.NotNull().DefaultLql("100")) - .Column( - "rate", - PortableTypes.Decimal(5, 2), - c => c.NotNull().DefaultLql("1.5") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Insert and verify defaults work - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO counters (id) VALUES (1)"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT count, priority, rate FROM counters WHERE id = 1"; - using var reader = selectCmd.ExecuteReader(); - Assert.True(reader.Read()); - Assert.Equal(0, reader.GetInt64(0)); - Assert.Equal(100, reader.GetInt64(1)); - Assert.Equal(1.5, reader.GetDouble(2), 2); + // Insert and verify defaults work + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO counters (id) VALUES (1)"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT count, priority, rate FROM counters WHERE id = 1"; + using var reader = selectCmd.ExecuteReader(); + Assert.True(reader.Read()); + Assert.Equal(0, reader.GetInt64(0)); + Assert.Equal(100, reader.GetInt64(1)); + Assert.Equal(1.5, reader.GetDouble(2), 2); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_StringLiteral_PassThrough() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "items", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column( + "status", + PortableTypes.VarChar(20), + c => c.NotNull().DefaultLql("'pending'") + ) + .Column( + "category", + PortableTypes.VarChar(50), + c => c.DefaultLql("'uncategorized'") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "items", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column( - "status", - PortableTypes.VarChar(20), - c => c.NotNull().DefaultLql("'pending'") - ) - .Column( - "category", - PortableTypes.VarChar(50), - c => c.DefaultLql("'uncategorized'") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Insert and verify defaults work - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO items (id) VALUES (1)"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT status, category FROM items WHERE id = 1"; - using var reader = selectCmd.ExecuteReader(); - Assert.True(reader.Read()); - Assert.Equal("pending", reader.GetString(0)); - Assert.Equal("uncategorized", reader.GetString(1)); + // Insert and verify defaults work + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO items (id) VALUES (1)"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT status, category FROM items WHERE id = 1"; + using var reader = selectCmd.ExecuteReader(); + Assert.True(reader.Read()); + Assert.Equal("pending", reader.GetString(0)); + Assert.Equal("uncategorized", reader.GetString(1)); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_GenUuid_GeneratesValidUuidFormat() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); - - var schema = Schema - .Define("Test") - .Table( - "records", - t => - t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) - .Column("name", PortableTypes.VarChar(100)) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Insert multiple rows and verify UUIDs are generated and unique - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO records (name) VALUES ('test1'), ('test2'), ('test3')"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT id FROM records"; - using var reader = selectCmd.ExecuteReader(); - - var uuids = new List(); - while (reader.Read()) + var (connection, dbPath) = CreateTestDb(); + try { - var uuid = reader.GetString(0); - Assert.NotNull(uuid); - Assert.Matches( - @"^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$", - uuid + var schema = Schema + .Define("Test") + .Table( + "records", + t => + t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) + .Column("name", PortableTypes.VarChar(100)) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger ); - uuids.Add(uuid); - } - // All UUIDs should be unique - Assert.Equal(3, uuids.Count); - Assert.Equal(3, uuids.Distinct().Count()); + // Assert + Assert.True(result is MigrationApplyResultOk); + + // Insert multiple rows and verify UUIDs are generated and unique + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO records (name) VALUES ('test1'), ('test2'), ('test3')"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT id FROM records"; + using var reader = selectCmd.ExecuteReader(); + + var uuids = new List(); + while (reader.Read()) + { + var uuid = reader.GetString(0); + Assert.NotNull(uuid); + Assert.Matches( + @"^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$", + uuid + ); + uuids.Add(uuid); + } + + // All UUIDs should be unique + Assert.Equal(3, uuids.Count); + Assert.Equal(3, uuids.Distinct().Count()); + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_CurrentDate_ReturnsDateOnly() { // Arrange - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "logs", + t => + t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) + .Column( + "log_date", + PortableTypes.Date, + c => c.NotNull().DefaultLql("current_date()") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); - var schema = Schema - .Define("Test") - .Table( - "logs", - t => - t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) - .Column( - "log_date", - PortableTypes.Date, - c => c.NotNull().DefaultLql("current_date()") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Insert and verify default date - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO logs (id) VALUES (1)"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT log_date FROM logs WHERE id = 1"; - var logDate = selectCmd.ExecuteScalar() as string; - Assert.NotNull(logDate); - Assert.Matches(@"^\d{4}-\d{2}-\d{2}$", logDate); // YYYY-MM-DD format + // Assert + Assert.True(result is MigrationApplyResultOk); + + // Insert and verify default date + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO logs (id) VALUES (1)"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT log_date FROM logs WHERE id = 1"; + var logDate = selectCmd.ExecuteScalar() as string; + Assert.NotNull(logDate); + Assert.Matches(@"^\d{4}-\d{2}-\d{2}$", logDate); // YYYY-MM-DD format + } + finally + { + CleanupTestDb(connection, dbPath); + } } [Fact] public void LqlDefault_MixedDefaults_AllWorkTogether() { // Arrange - A complex table with multiple LQL defaults - using var connection = new SqliteConnection("Data Source=:memory:"); - connection.Open(); + var (connection, dbPath) = CreateTestDb(); + try + { + var schema = Schema + .Define("Test") + .Table( + "orders", + t => + t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) + .Column( + "status", + PortableTypes.VarChar(20), + c => c.NotNull().DefaultLql("'pending'") + ) + .Column("quantity", PortableTypes.Int, c => c.NotNull().DefaultLql("1")) + .Column( + "is_urgent", + PortableTypes.Boolean, + c => c.NotNull().DefaultLql("false") + ) + .Column( + "created_at", + PortableTypes.DateTime(), + c => c.NotNull().DefaultLql("now()") + ) + ) + .Build(); + + // Act + var emptySchema = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; + var operations = ( + (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) + ).Value; + var result = MigrationRunner.Apply( + connection, + operations, + SqliteDdlGenerator.Generate, + MigrationOptions.Default, + _logger + ); + + // Assert + Assert.True(result is MigrationApplyResultOk); - var schema = Schema - .Define("Test") - .Table( - "orders", - t => - t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) - .Column( - "status", - PortableTypes.VarChar(20), - c => c.NotNull().DefaultLql("'pending'") - ) - .Column("quantity", PortableTypes.Int, c => c.NotNull().DefaultLql("1")) - .Column( - "is_urgent", - PortableTypes.Boolean, - c => c.NotNull().DefaultLql("false") - ) - .Column( - "created_at", - PortableTypes.DateTime(), - c => c.NotNull().DefaultLql("now()") - ) - ) - .Build(); - - // Act - var emptySchema = ( - (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) - ).Value; - var operations = ( - (OperationsResultOk)SchemaDiff.Calculate(emptySchema, schema, logger: _logger) - ).Value; - var result = MigrationRunner.Apply( - connection, - operations, - SqliteDdlGenerator.Generate, - MigrationOptions.Default, - _logger - ); - - // Assert - Assert.True(result is MigrationApplyResultOk); - - // Insert with no columns specified - all defaults should apply - using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO orders DEFAULT VALUES"; - insertCmd.ExecuteNonQuery(); - - using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT id, status, quantity, is_urgent, created_at FROM orders"; - using var reader = selectCmd.ExecuteReader(); - Assert.True(reader.Read()); - - var id = reader.GetString(0); - var status = reader.GetString(1); - var quantity = reader.GetInt64(2); - var isUrgent = reader.GetInt64(3); - var createdAt = reader.GetString(4); - - Assert.Matches(@"^[0-9a-f-]{36}$", id); // UUID format - Assert.Equal("pending", status); // String default - Assert.Equal(1, quantity); // Numeric default - Assert.Equal(0, isUrgent); // Boolean false = 0 - Assert.NotEmpty(createdAt); // Timestamp generated + // Insert with no columns specified - all defaults should apply + using var insertCmd = connection.CreateCommand(); + insertCmd.CommandText = "INSERT INTO orders DEFAULT VALUES"; + insertCmd.ExecuteNonQuery(); + + using var selectCmd = connection.CreateCommand(); + selectCmd.CommandText = "SELECT id, status, quantity, is_urgent, created_at FROM orders"; + using var reader = selectCmd.ExecuteReader(); + Assert.True(reader.Read()); + + var id = reader.GetString(0); + var status = reader.GetString(1); + var quantity = reader.GetInt64(2); + var isUrgent = reader.GetInt64(3); + var createdAt = reader.GetString(4); + + Assert.Matches(@"^[0-9a-f-]{36}$", id); // UUID format + Assert.Equal("pending", status); // String default + Assert.Equal(1, quantity); // Numeric default + Assert.Equal(0, isUrgent); // Boolean false = 0 + Assert.NotEmpty(createdAt); // Timestamp generated + } + finally + { + CleanupTestDb(connection, dbPath); + } } } diff --git a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj index a262ee4..c9f7eb6 100644 --- a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj +++ b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj @@ -34,7 +34,7 @@ - + diff --git a/Samples/Clinical/Clinical.Api/DataProvider.json b/Samples/Clinical/Clinical.Api/DataProvider.json index e42e1df..dba35e3 100644 --- a/Samples/Clinical/Clinical.Api/DataProvider.json +++ b/Samples/Clinical/Clinical.Api/DataProvider.json @@ -63,5 +63,5 @@ "primaryKeyColumns": ["Id"] } ], - "connectionString": "Data Source=clinical-build.db" + "connectionString": "Data Source=clinical.db" } diff --git a/Samples/Clinical/Clinical.Api/FileLoggerProvider.cs b/Samples/Clinical/Clinical.Api/FileLoggerProvider.cs index 0e21fbb..74cc9a9 100644 --- a/Samples/Clinical/Clinical.Api/FileLoggerProvider.cs +++ b/Samples/Clinical/Clinical.Api/FileLoggerProvider.cs @@ -95,8 +95,7 @@ public void Log( } var message = formatter(state, exception); - var line = - $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; + var line = $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; if (exception != null) { line += Environment.NewLine + exception; diff --git a/Samples/Scheduling/Scheduling.Api/DataProvider.json b/Samples/Scheduling/Scheduling.Api/DataProvider.json index d653388..2ac8588 100644 --- a/Samples/Scheduling/Scheduling.Api/DataProvider.json +++ b/Samples/Scheduling/Scheduling.Api/DataProvider.json @@ -49,5 +49,5 @@ "primaryKeyColumns": ["Id"] } ], - "connectionString": "Data Source=scheduling-build.db" + "connectionString": "Data Source=scheduling.db" } diff --git a/Samples/Scheduling/Scheduling.Api/FileLoggerProvider.cs b/Samples/Scheduling/Scheduling.Api/FileLoggerProvider.cs index 760f5cc..2434cab 100644 --- a/Samples/Scheduling/Scheduling.Api/FileLoggerProvider.cs +++ b/Samples/Scheduling/Scheduling.Api/FileLoggerProvider.cs @@ -95,8 +95,7 @@ public void Log( } var message = formatter(state, exception); - var line = - $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; + var line = $"{DateTime.UtcNow:yyyy-MM-dd HH:mm:ss.fff} [{logLevel}] {_category}: {message}"; if (exception != null) { line += Environment.NewLine + exception; diff --git a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj index ff1daa0..822b4ff 100644 --- a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj +++ b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj @@ -34,7 +34,7 @@ - + diff --git a/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs b/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs index a4289a1..4bacc86 100644 --- a/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs +++ b/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs @@ -12,6 +12,7 @@ public sealed class CrossDatabaseSyncTests : IAsyncLifetime private PostgreSqlContainer _postgresContainer = null!; private string _postgresConnectionString = null!; private readonly ILogger _logger = NullLogger.Instance; + private readonly List _sqliteDbPaths = []; public async Task InitializeAsync() { @@ -26,15 +27,28 @@ public async Task InitializeAsync() _postgresConnectionString = _postgresContainer.GetConnectionString(); } - public async Task DisposeAsync() => + public async Task DisposeAsync() + { await _postgresContainer.DisposeAsync().ConfigureAwait(false); + foreach (var dbPath in _sqliteDbPaths) + { + if (File.Exists(dbPath)) + { + try { File.Delete(dbPath); } + catch { /* File may be locked */ } + } + } + } + /// - /// Creates a fresh SQLite in-memory database with sync schema and triggers. + /// Creates a fresh SQLite file database with sync schema and triggers. /// - private static SqliteConnection CreateSqliteDb(string originId) + private SqliteConnection CreateSqliteDb(string originId) { - var conn = new SqliteConnection("Data Source=:memory:"); + var dbPath = Path.Combine(Path.GetTempPath(), $"http_sync_{Guid.NewGuid()}.db"); + _sqliteDbPaths.Add(dbPath); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); // Create sync schema diff --git a/Sync/Sync.Http.Tests/HttpEndpointTests.cs b/Sync/Sync.Http.Tests/HttpEndpointTests.cs index d838153..f1ee8b4 100644 --- a/Sync/Sync.Http.Tests/HttpEndpointTests.cs +++ b/Sync/Sync.Http.Tests/HttpEndpointTests.cs @@ -120,9 +120,10 @@ public async Task PushChanges_WithTooManyChanges_ReturnsBadRequest() "application/json" ); - // Act + // Act - using temp file path (test expects BadRequest before connection is used) + var tempDbPath = Path.Combine(Path.GetTempPath(), $"temp_test_{Guid.NewGuid()}.db"); var response = await _client.PostAsync( - "/sync/changes?dbType=sqlite&connectionString=Data Source=:memory:", + $"/sync/changes?dbType=sqlite&connectionString=Data Source={Uri.EscapeDataString(tempDbPath)}", content ); diff --git a/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs b/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs index 973693c..0b3524d 100644 --- a/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs +++ b/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs @@ -12,6 +12,7 @@ public sealed class HttpMappingSyncTests : IAsyncLifetime private PostgreSqlContainer _postgresContainer = null!; private string _postgresConnectionString = null!; private readonly ILogger _logger = NullLogger.Instance; + private readonly List _sqliteDbPaths = []; public async Task InitializeAsync() { @@ -26,16 +27,29 @@ public async Task InitializeAsync() _postgresConnectionString = _postgresContainer.GetConnectionString(); } - public async Task DisposeAsync() => + public async Task DisposeAsync() + { await _postgresContainer.DisposeAsync().ConfigureAwait(false); + foreach (var dbPath in _sqliteDbPaths) + { + if (File.Exists(dbPath)) + { + try { File.Delete(dbPath); } + catch { /* File may be locked */ } + } + } + } + /// /// Creates SQLite source DB with User table (source schema). /// Columns: Id, FullName, EmailAddress (DIFFERENT from target!) /// - private static SqliteConnection CreateSourceDb(string originId) + private SqliteConnection CreateSourceDb(string originId) { - var conn = new SqliteConnection("Data Source=:memory:"); + var dbPath = Path.Combine(Path.GetTempPath(), $"mapping_source_{Guid.NewGuid()}.db"); + _sqliteDbPaths.Add(dbPath); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); SyncSchema.CreateSchema(conn); @@ -286,7 +300,9 @@ public void MultiTargetMapping_OneSourceToManyTargets() { // Arrange var sourceOrigin = Guid.NewGuid().ToString(); - using var source = new SqliteConnection("Data Source=:memory:"); + var dbPath = Path.Combine(Path.GetTempPath(), $"multi_target_{Guid.NewGuid()}.db"); + _sqliteDbPaths.Add(dbPath); + using var source = new SqliteConnection($"Data Source={dbPath}"); source.Open(); SyncSchema.CreateSchema(source); diff --git a/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs b/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs index 05a5eff..5dc4483 100644 --- a/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs +++ b/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs @@ -17,6 +17,7 @@ public sealed class CrossDatabaseSyncTests : IAsyncLifetime private PostgreSqlContainer _postgres = null!; private NpgsqlConnection _pgConn = null!; private SqliteConnection _sqliteConn = null!; + private string _sqliteDbPath = null!; private readonly string _sqliteOrigin = Guid.NewGuid().ToString(); private readonly string _postgresOrigin = Guid.NewGuid().ToString(); private static readonly ILogger Logger = NullLogger.Instance; @@ -37,8 +38,9 @@ public async Task InitializeAsync() _pgConn = new NpgsqlConnection(_postgres.GetConnectionString()); await _pgConn.OpenAsync().ConfigureAwait(false); - // Create SQLite in-memory - _sqliteConn = new SqliteConnection("Data Source=:memory:"); + // Create SQLite file database + _sqliteDbPath = Path.Combine(Path.GetTempPath(), $"cross_db_sync_{Guid.NewGuid()}.db"); + _sqliteConn = new SqliteConnection($"Data Source={_sqliteDbPath}"); _sqliteConn.Open(); // Initialize sync schemas @@ -63,6 +65,12 @@ public async Task DisposeAsync() await _pgConn.CloseAsync().ConfigureAwait(false); await _pgConn.DisposeAsync().ConfigureAwait(false); await _postgres.DisposeAsync(); + + if (File.Exists(_sqliteDbPath)) + { + try { File.Delete(_sqliteDbPath); } + catch { /* File may be locked */ } + } } private static void CreateTestTable(NpgsqlConnection conn) diff --git a/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs b/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs index 0a70950..8a44c4a 100644 --- a/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs @@ -6,17 +6,22 @@ namespace Sync.SQLite.Tests; /// /// Integration tests for ChangeApplierSQLite. /// Tests applying sync changes (insert, update, delete) to SQLite database. -/// NO MOCKS - real SQLite databases only! +/// NO MOCKS - real file-based SQLite databases only! NO :memory:! /// public sealed class ChangeApplierIntegrationTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath; private readonly string _originId = Guid.NewGuid().ToString(); private const string Timestamp = "2025-01-01T00:00:00.000Z"; + /// + /// Initializes test with file-based SQLite database. + /// public ChangeApplierIntegrationTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _dbPath = Path.Combine(Path.GetTempPath(), $"change_applier_{Guid.NewGuid():N}.db"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -589,7 +594,17 @@ private void InsertPerson(string id, string name, int age) cmd.ExecuteNonQuery(); } - public void Dispose() => _db.Dispose(); + /// + public void Dispose() + { + _db.Close(); + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try { File.Delete(_dbPath); } + catch { /* File may be locked */ } + } + } #endregion } diff --git a/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs b/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs index 2c03279..358a525 100644 --- a/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs +++ b/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs @@ -13,11 +13,15 @@ public sealed class EndToEndSyncTests : IDisposable private readonly SqliteConnection _targetDb; private readonly string _sourceOrigin = Guid.NewGuid().ToString(); private readonly string _targetOrigin = Guid.NewGuid().ToString(); + private readonly string _sourceDbPath; + private readonly string _targetDbPath; public EndToEndSyncTests() { - _sourceDb = CreateDatabase(); - _targetDb = CreateDatabase(); + _sourceDbPath = Path.Combine(Path.GetTempPath(), $"e2e_source_{Guid.NewGuid()}.db"); + _targetDbPath = Path.Combine(Path.GetTempPath(), $"e2e_target_{Guid.NewGuid()}.db"); + _sourceDb = CreateDatabase(_sourceDbPath); + _targetDb = CreateDatabase(_targetDbPath); SetupSchema(_sourceDb, _sourceOrigin); SetupSchema(_targetDb, _targetOrigin); @@ -217,9 +221,9 @@ public void Sync_BiDirectional_BothDbsGetChanges() Assert.NotNull(GetPerson(_targetDb, "p2")); } - private static SqliteConnection CreateDatabase() + private static SqliteConnection CreateDatabase(string dbPath) { - var connection = new SqliteConnection("Data Source=:memory:"); + var connection = new SqliteConnection($"Data Source={dbPath}"); connection.Open(); return connection; } @@ -405,5 +409,15 @@ public void Dispose() { _sourceDb.Dispose(); _targetDb.Dispose(); + if (File.Exists(_sourceDbPath)) + { + try { File.Delete(_sourceDbPath); } + catch { /* File may be locked */ } + } + if (File.Exists(_targetDbPath)) + { + try { File.Delete(_targetDbPath); } + catch { /* File may be locked */ } + } } } diff --git a/Sync/Sync.SQLite.Tests/SchemaAndTriggerTests.cs b/Sync/Sync.SQLite.Tests/SchemaAndTriggerTests.cs index 70f3427..172bc79 100644 --- a/Sync/Sync.SQLite.Tests/SchemaAndTriggerTests.cs +++ b/Sync/Sync.SQLite.Tests/SchemaAndTriggerTests.cs @@ -11,11 +11,15 @@ namespace Sync.SQLite.Tests; public sealed class SchemaAndTriggerTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"schemaandtriggertests_{Guid.NewGuid()}.db" + ); private const string OriginId = "test-origin-id"; public SchemaAndTriggerTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); } @@ -835,5 +839,19 @@ public void EnableDisable_MultipleToggles_WorksCorrectly() #endregion - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } } diff --git a/Sync/Sync.SQLite.Tests/SpecComplianceTests.cs b/Sync/Sync.SQLite.Tests/SpecComplianceTests.cs index 491b98c..bd7af0d 100644 --- a/Sync/Sync.SQLite.Tests/SpecComplianceTests.cs +++ b/Sync/Sync.SQLite.Tests/SpecComplianceTests.cs @@ -12,11 +12,15 @@ namespace Sync.SQLite.Tests; public sealed class SpecComplianceTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"speccompliancetests_{Guid.NewGuid()}.db" + ); private readonly string _originId = Guid.NewGuid().ToString(); public SpecComplianceTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -982,5 +986,19 @@ private List GetTableColumns(string tableName) #endregion - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } } diff --git a/Sync/Sync.SQLite.Tests/SpecConformanceTests.cs b/Sync/Sync.SQLite.Tests/SpecConformanceTests.cs index a77cf64..65365ee 100644 --- a/Sync/Sync.SQLite.Tests/SpecConformanceTests.cs +++ b/Sync/Sync.SQLite.Tests/SpecConformanceTests.cs @@ -14,11 +14,15 @@ namespace Sync.SQLite.Tests; public sealed partial class SpecConformanceTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"specconformancetests_{Guid.NewGuid()}.db" + ); private readonly string _originId = Guid.NewGuid().ToString(); public SpecConformanceTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -688,7 +692,21 @@ private List FetchAllChanges() [GeneratedRegex(@"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$")] private static partial Regex UuidRegex(); - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } #endregion } diff --git a/Sync/Sync.SQLite.Tests/SqliteExtensionIntegrationTests.cs b/Sync/Sync.SQLite.Tests/SqliteExtensionIntegrationTests.cs index 552337a..3104773 100644 --- a/Sync/Sync.SQLite.Tests/SqliteExtensionIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/SqliteExtensionIntegrationTests.cs @@ -11,12 +11,16 @@ namespace Sync.SQLite.Tests; public sealed class SqliteExtensionIntegrationTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"sqliteextensionintegrationtests_{Guid.NewGuid()}.db" + ); private readonly string _originId = Guid.NewGuid().ToString(); private const string Timestamp = "2025-01-01T00:00:00.000Z"; public SqliteExtensionIntegrationTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -532,5 +536,19 @@ public void InsertAndRetrieve_WithExpiresAt_PreservesValue() #endregion - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } } diff --git a/Sync/Sync.SQLite.Tests/SubscriptionIntegrationTests.cs b/Sync/Sync.SQLite.Tests/SubscriptionIntegrationTests.cs index 0db64c3..c9ade20 100644 --- a/Sync/Sync.SQLite.Tests/SubscriptionIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/SubscriptionIntegrationTests.cs @@ -11,12 +11,16 @@ namespace Sync.SQLite.Tests; public sealed class SubscriptionIntegrationTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"subscriptionintegrationtests_{Guid.NewGuid()}.db" + ); private readonly string _originId = Guid.NewGuid().ToString(); private const string Timestamp = "2025-01-01T00:00:00.000Z"; public SubscriptionIntegrationTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -387,7 +391,21 @@ private static SyncLogEntry CreateChange( Timestamp ); - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } #endregion } diff --git a/Sync/Sync.SQLite.Tests/SyncRepositoryIntegrationTests.cs b/Sync/Sync.SQLite.Tests/SyncRepositoryIntegrationTests.cs index 4e70161..d91881d 100644 --- a/Sync/Sync.SQLite.Tests/SyncRepositoryIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/SyncRepositoryIntegrationTests.cs @@ -11,12 +11,16 @@ namespace Sync.SQLite.Tests; public sealed class SyncRepositoryIntegrationTests : IDisposable { private readonly SqliteConnection _db; + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"syncrepositoryintegrationtests_{Guid.NewGuid()}.db" + ); private readonly string _originId = Guid.NewGuid().ToString(); private const string Timestamp = "2025-01-01T00:00:00.000Z"; public SyncRepositoryIntegrationTests() { - _db = new SqliteConnection("Data Source=:memory:"); + _db = new SqliteConnection($"Data Source={_dbPath}"); _db.Open(); SyncSchema.CreateSchema(_db); SyncSchema.SetOriginId(_db, _originId); @@ -774,5 +778,19 @@ public void TombstoneManager_PurgeTombstones_RemovesOldDeletes() #endregion - public void Dispose() => _db.Dispose(); + public void Dispose() + { + _db.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } } diff --git a/Sync/Sync.SQLite.Tests/TombstoneIntegrationTests.cs b/Sync/Sync.SQLite.Tests/TombstoneIntegrationTests.cs index 3dee118..7a6a2ae 100644 --- a/Sync/Sync.SQLite.Tests/TombstoneIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/TombstoneIntegrationTests.cs @@ -14,13 +14,21 @@ public sealed class TombstoneIntegrationTests : IDisposable { private readonly SqliteConnection _serverDb; private readonly SqliteConnection _clientDb; + private readonly string _serverDbPath = Path.Combine( + Path.GetTempPath(), + $"tombstoneintegrationtests_server_{Guid.NewGuid()}.db" + ); + private readonly string _clientDbPath = Path.Combine( + Path.GetTempPath(), + $"tombstoneintegrationtests_client_{Guid.NewGuid()}.db" + ); private readonly string _serverOrigin = "server-" + Guid.NewGuid(); private readonly string _clientOrigin = "client-" + Guid.NewGuid(); public TombstoneIntegrationTests() { - _serverDb = CreateSyncDatabase(_serverOrigin); - _clientDb = CreateSyncDatabase(_clientOrigin); + _serverDb = CreateSyncDatabase(_serverDbPath, _serverOrigin); + _clientDb = CreateSyncDatabase(_clientDbPath, _clientOrigin); } #region Section 13.3: Server Tracking @@ -315,9 +323,9 @@ public void Spec13_7_TombstonesPreserved_UntilAllClientsSynced() #region Helpers - private static SqliteConnection CreateSyncDatabase(string originId) + private static SqliteConnection CreateSyncDatabase(string dbPath, string originId) { - var conn = new SqliteConnection("Data Source=:memory:"); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); SyncSchema.CreateSchema(conn); SyncSchema.SetOriginId(conn, originId); @@ -360,6 +368,30 @@ public void Dispose() { _serverDb.Dispose(); _clientDb.Dispose(); + + if (File.Exists(_serverDbPath)) + { + try + { + File.Delete(_serverDbPath); + } + catch + { + /* File may be locked */ + } + } + + if (File.Exists(_clientDbPath)) + { + try + { + File.Delete(_clientDbPath); + } + catch + { + /* File may be locked */ + } + } } #endregion diff --git a/Sync/Sync.Tests/SyncCoordinatorTests.cs b/Sync/Sync.Tests/SyncCoordinatorTests.cs index ad6a151..3378dee 100644 --- a/Sync/Sync.Tests/SyncCoordinatorTests.cs +++ b/Sync/Sync.Tests/SyncCoordinatorTests.cs @@ -12,6 +12,14 @@ namespace Sync.Tests; public sealed class SyncCoordinatorTests : IDisposable { private static readonly ILogger Logger = NullLogger.Instance; + private readonly string _serverDbPath = Path.Combine( + Path.GetTempPath(), + $"synccoord_server_{Guid.NewGuid()}.db" + ); + private readonly string _clientDbPath = Path.Combine( + Path.GetTempPath(), + $"synccoord_client_{Guid.NewGuid()}.db" + ); private readonly SqliteConnection _serverDb; private readonly SqliteConnection _clientDb; private const string ServerOrigin = "server-coord-001"; @@ -19,8 +27,8 @@ public sealed class SyncCoordinatorTests : IDisposable public SyncCoordinatorTests() { - _serverDb = CreateSyncDatabase(ServerOrigin); - _clientDb = CreateSyncDatabase(ClientOrigin); + _serverDb = CreateSyncDatabase(ServerOrigin, _serverDbPath); + _clientDb = CreateSyncDatabase(ClientOrigin, _clientDbPath); } #region Pull Tests @@ -452,9 +460,9 @@ public void Sync_IncrementalSync_OnlyNewChanges() #region Helper Methods - private static SqliteConnection CreateSyncDatabase(string originId) + private static SqliteConnection CreateSyncDatabase(string originId, string dbPath) { - var conn = new SqliteConnection("Data Source=:memory:"); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); using var cmd = conn.CreateCommand(); @@ -807,6 +815,28 @@ public void Dispose() { _serverDb.Dispose(); _clientDb.Dispose(); + if (File.Exists(_serverDbPath)) + { + try + { + File.Delete(_serverDbPath); + } + catch + { + /* File may be locked */ + } + } + if (File.Exists(_clientDbPath)) + { + try + { + File.Delete(_clientDbPath); + } + catch + { + /* File may be locked */ + } + } } #endregion diff --git a/Sync/Sync.Tests/SyncIntegrationTests.cs b/Sync/Sync.Tests/SyncIntegrationTests.cs index 851eed2..96fbe4a 100644 --- a/Sync/Sync.Tests/SyncIntegrationTests.cs +++ b/Sync/Sync.Tests/SyncIntegrationTests.cs @@ -11,6 +11,14 @@ namespace Sync.Tests; public sealed class SyncIntegrationTests : IDisposable { private static readonly ILogger Logger = NullLogger.Instance; + private readonly string _serverDbPath = Path.Combine( + Path.GetTempPath(), + $"syncintegration_server_{Guid.NewGuid()}.db" + ); + private readonly string _clientDbPath = Path.Combine( + Path.GetTempPath(), + $"syncintegration_client_{Guid.NewGuid()}.db" + ); private readonly SqliteConnection _serverDb; private readonly SqliteConnection _clientDb; private const string ServerOrigin = "server-origin-001"; @@ -18,8 +26,8 @@ public sealed class SyncIntegrationTests : IDisposable public SyncIntegrationTests() { - _serverDb = CreateSyncDatabase(ServerOrigin); - _clientDb = CreateSyncDatabase(ClientOrigin); + _serverDb = CreateSyncDatabase(ServerOrigin, _serverDbPath); + _clientDb = CreateSyncDatabase(ClientOrigin, _clientDbPath); } [Fact] @@ -183,9 +191,9 @@ public void HashVerification_AfterSync_HashesMatch() // === Helper Methods === - private static SqliteConnection CreateSyncDatabase(string originId) + private static SqliteConnection CreateSyncDatabase(string originId, string dbPath) { - var conn = new SqliteConnection("Data Source=:memory:"); + var conn = new SqliteConnection($"Data Source={dbPath}"); conn.Open(); using var cmd = conn.CreateCommand(); @@ -612,5 +620,27 @@ public void Dispose() { _serverDb.Dispose(); _clientDb.Dispose(); + if (File.Exists(_serverDbPath)) + { + try + { + File.Delete(_serverDbPath); + } + catch + { + /* File may be locked */ + } + } + if (File.Exists(_clientDbPath)) + { + try + { + File.Delete(_clientDbPath); + } + catch + { + /* File may be locked */ + } + } } } diff --git a/Sync/Sync.Tests/TestDb.cs b/Sync/Sync.Tests/TestDb.cs index 344f07b..d3293a3 100644 --- a/Sync/Sync.Tests/TestDb.cs +++ b/Sync/Sync.Tests/TestDb.cs @@ -3,15 +3,20 @@ namespace Sync.Tests; /// -/// In-memory SQLite database for integration testing. +/// File-based SQLite database for integration testing. /// public sealed class TestDb : IDisposable { + private readonly string _dbPath = Path.Combine( + Path.GetTempPath(), + $"testdb_{Guid.NewGuid()}.db" + ); + public SqliteConnection Connection { get; } public TestDb() { - Connection = new SqliteConnection("Data Source=:memory:"); + Connection = new SqliteConnection($"Data Source={_dbPath}"); Connection.Open(); InitializeSyncSchema(); } @@ -131,5 +136,19 @@ private static SyncOperation ParseOperation(string op) => _ => throw new ArgumentException($"Unknown operation: {op}"), }; - public void Dispose() => Connection.Dispose(); + public void Dispose() + { + Connection.Dispose(); + if (File.Exists(_dbPath)) + { + try + { + File.Delete(_dbPath); + } + catch + { + /* File may be locked */ + } + } + } } diff --git a/Website/src/docs/lql.md b/Website/src/docs/lql.md deleted file mode 100644 index bd0e107..0000000 --- a/Website/src/docs/lql.md +++ /dev/null @@ -1,243 +0,0 @@ ---- -layout: layouts/docs.njk -title: "Lambda Query Language (LQL)" ---- - -# Lambda Query Language (LQL) - -A functional pipeline-style DSL that transpiles to SQL. LQL provides an intuitive, composable way to write database queries using lambda expressions and pipeline operators, making complex queries more readable and maintainable. - -## Website - -Visit [lql.dev](https://lql.dev) for interactive playground and documentation. - -## Features - -- **Pipeline Syntax** - Chain operations using `|>` operator -- **Lambda Expressions** - Use familiar lambda syntax for filtering -- **Cross-Database Support** - Transpiles to PostgreSQL, SQLite, and SQL Server -- **Type Safety** - Integrates with DataProvider for compile-time validation -- **VS Code Extension** - Syntax highlighting and IntelliSense support -- **CLI Tools** - Command-line transpilation and validation - -## Syntax Overview - -### Basic Pipeline -```lql -users |> select(id, name, email) -``` - -### With Filtering -```lql -employees -|> filter(fn(row) => row.salary > 50000) -|> select(id, name, salary) -``` - -### Joins -```lql -Customer -|> join(Order, on = Customer.Id = Order.CustomerId) -|> select(Customer.Name, Order.Total) -``` - -### Complex Queries -```lql -let high_value_customers = Customer -|> join(Order, on = Customer.Id = Order.CustomerId) -|> filter(fn(row) => row.Order.Total > 1000) -|> group_by(Customer.Id, Customer.Name) -|> having(fn(row) => SUM(row.Order.Total) > 5000) -|> select(Customer.Name, SUM(Order.Total) AS TotalSpent) -|> order_by(TotalSpent DESC) -|> limit(10) -``` - -## Pipeline Operations - -| Operation | Description | SQL Equivalent | -|-----------|-------------|----------------| -| `select(cols...)` | Choose columns | `SELECT` | -| `filter(fn(row) => ...)` | Filter rows | `WHERE` | -| `join(table, on = ...)` | Join tables | `JOIN` | -| `left_join(table, on = ...)` | Left join | `LEFT JOIN` | -| `group_by(cols...)` | Group rows | `GROUP BY` | -| `having(fn(row) => ...)` | Filter groups | `HAVING` | -| `order_by(col [ASC/DESC])` | Sort results | `ORDER BY` | -| `limit(n)` | Limit rows | `LIMIT` | -| `offset(n)` | Skip rows | `OFFSET` | -| `distinct()` | Unique rows | `DISTINCT` | -| `union(query)` | Combine queries | `UNION` | -| `union_all(query)` | Combine with duplicates | `UNION ALL` | - -## Installation - -### CLI Tool (SQLite) -```bash -dotnet tool install -g LqlCli.SQLite -``` - -### VS Code Extension -Search for "LQL" in VS Code Extensions or: -```bash -code --install-extension lql-lang -``` - -### NuGet Packages -```xml - - - - - - - - -``` - -## CLI Usage - -### Transpile to SQL -```bash -lql --input query.lql --output query.sql -``` - -### Validate Syntax -```bash -lql --input query.lql --validate -``` - -### Print to Console -```bash -lql --input query.lql -``` - -## Programmatic Usage - -```csharp -using Lql; -using Lql.SQLite; - -// Parse LQL -var lqlCode = "users |> filter(fn(row) => row.age > 21) |> select(name, email)"; -var statement = LqlCodeParser.Parse(lqlCode); - -// Convert to SQL -var context = new SQLiteContext(); -var sql = statement.ToSql(context); - -Console.WriteLine(sql); -// Output: SELECT name, email FROM users WHERE age > 21 -``` - -## Function Support - -### Aggregate Functions -- `COUNT()`, `SUM()`, `AVG()`, `MIN()`, `MAX()` - -### String Functions -- `UPPER()`, `LOWER()`, `LENGTH()`, `CONCAT()` - -### Date Functions -- `NOW()`, `DATE()`, `YEAR()`, `MONTH()` - -### Conditional -- `CASE WHEN ... THEN ... ELSE ... END` -- `COALESCE()`, `NULLIF()` - -## Expression Support - -### Arithmetic -```lql -products |> select(price * quantity AS total) -``` - -### Comparisons -```lql -orders |> filter(fn(row) => row.date >= '2024-01-01' AND row.status != 'cancelled') -``` - -### Pattern Matching -```lql -customers |> filter(fn(row) => row.name LIKE 'John%') -``` - -### Subqueries -```lql -orders |> filter(fn(row) => row.customer_id IN ( - customers |> filter(fn(c) => c.country = 'USA') |> select(id) -)) -``` - -## VS Code Extension Features - -- Syntax highlighting -- Auto-completion -- Error diagnostics -- Format on save -- Snippets for common patterns - -## Architecture - -``` -Lql/ -ā”œā”€ā”€ Lql/ # Core transpiler -│ ā”œā”€ā”€ Parsing/ # ANTLR grammar and parser -│ ā”œā”€ā”€ FunctionMapping/ # Database-specific functions -│ └── Pipeline steps # AST transformation -ā”œā”€ā”€ Lql.SQLite/ # SQLite dialect -ā”œā”€ā”€ Lql.SqlServer/ # SQL Server dialect -ā”œā”€ā”€ Lql.Postgres/ # PostgreSQL dialect -ā”œā”€ā”€ LqlCli.SQLite/ # CLI tool -ā”œā”€ā”€ LqlExtension/ # VS Code extension -└── Website/ # lql.dev website -``` - -## Testing - -```bash -dotnet test Lql.Tests/Lql.Tests.csproj -``` - -## Examples - -See the `Lql.Tests/TestData/Lql/` directory for comprehensive examples of LQL queries and their SQL equivalents. - -## Error Handling - -LQL provides detailed error messages: - -```lql -// Invalid: Identifier cannot start with number -123table |> select(id) -// Error: Syntax error at line 1:0 - Identifier cannot start with a number - -// Invalid: Undefined variable -undefined_var |> select(name) -// Error: Syntax error at line 1:0 - Undefined variable -``` - -## Integration with DataProvider - -LQL files are automatically processed by DataProvider source generators: - -1. Write `.lql` files in your project -2. DataProvider transpiles to SQL during build -3. Generates type-safe C# extension methods -4. Use with full IntelliSense support - -## Contributing - -1. Follow functional programming principles -2. Add tests for new features -3. Update grammar file for syntax changes -4. Ensure all dialects are supported -5. Run tests before submitting PRs - -## License - -MIT License - -## Author - -MelbourneDeveloper - [ChristianFindlay.com](https://christianfindlay.com) From d073722222f587a417766d25eea3cd7cdabd51f8 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:03:10 +1100 Subject: [PATCH 02/12] Fixes --- .github/workflows/ci.yml | 7 ++- .../DataProviderIntegrationTests.cs | 5 +- .../DataProvider.SQLite.Cli/Program.cs | 10 +++- .../Gatekeeper.Api.Tests/TokenServiceTests.cs | 9 +++- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 6 ++- .../Lql.TypeProvider.FSharp.Tests.fsproj | 8 +-- Migration/Migration.Tests/LqlDefaultsTests.cs | 13 +++-- .../MigrationCornerCaseTests.cs | 46 ++++++++++++---- .../SchemaYamlSerializerTests.cs | 10 +++- .../Migration.Tests/SqliteMigrationTests.cs | 52 +++++++++++++++---- .../Sync.Http.Tests/CrossDatabaseSyncTests.cs | 9 +++- .../HttpMappingSyncTests.cs | 9 +++- .../CrossDatabaseSyncTests.cs | 9 +++- .../ChangeApplierIntegrationTests.cs | 9 +++- Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs | 18 +++++-- 15 files changed, 170 insertions(+), 50 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ea8147..13ba9bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -231,8 +231,13 @@ jobs: - name: Restore run: dotnet restore Lql/Lql.TypeProvider.FSharp.Tests + - name: Build CLI tools (needed by MSBuild targets in test project) + run: | + dotnet build Migration/Migration.Cli -c Release --no-restore + dotnet build DataProvider/DataProvider.SQLite.Cli -c Release --no-restore + - name: Test - run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --no-restore --verbosity normal --logger "trx;LogFileName=test-results.trx" + run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --verbosity normal --logger "trx;LogFileName=test-results.trx" - name: Upload test results uses: actions/upload-artifact@v4 diff --git a/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs b/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs index b61291d..de845d7 100644 --- a/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs +++ b/DataProvider/DataProvider.Example.Tests/DataProviderIntegrationTests.cs @@ -19,7 +19,10 @@ public sealed class DataProviderIntegrationTests : IDisposable public DataProviderIntegrationTests() { - _dbPath = Path.Combine(Path.GetTempPath(), $"dataprovider_integration_tests_{Guid.NewGuid()}.db"); + _dbPath = Path.Combine( + Path.GetTempPath(), + $"dataprovider_integration_tests_{Guid.NewGuid()}.db" + ); _connectionString = $"Data Source={_dbPath}"; _connection = new SqliteConnection(_connectionString); } diff --git a/DataProvider/DataProvider.SQLite.Cli/Program.cs b/DataProvider/DataProvider.SQLite.Cli/Program.cs index 56c9a56..2ebaa4f 100644 --- a/DataProvider/DataProvider.SQLite.Cli/Program.cs +++ b/DataProvider/DataProvider.SQLite.Cli/Program.cs @@ -94,7 +94,9 @@ DirectoryInfo outDir // Verify DB exists and is accessible; if empty, run schema file try { - using var conn = new Microsoft.Data.Sqlite.SqliteConnection(absoluteConnectionString); + using var conn = new Microsoft.Data.Sqlite.SqliteConnection( + absoluteConnectionString + ); await conn.OpenAsync().ConfigureAwait(false); // Check if any tables exist @@ -202,7 +204,11 @@ is Result.Error< ).Value; var colsResult = await SqliteCodeGenerator - .GetColumnMetadataFromSqlAsync(absoluteConnectionString, sql, stmt.Parameters) + .GetColumnMetadataFromSqlAsync( + absoluteConnectionString, + sql, + stmt.Parameters + ) .ConfigureAwait(false); if ( colsResult diff --git a/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs b/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs index edb35bd..fda81f0 100644 --- a/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs +++ b/Gatekeeper/Gatekeeper.Api.Tests/TokenServiceTests.cs @@ -549,8 +549,13 @@ private static void CleanupTestDb(SqliteConnection connection, string dbPath) connection.Dispose(); if (File.Exists(dbPath)) { - try { File.Delete(dbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(dbPath); + } + catch + { /* File may be locked */ + } } } diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index 8478b1a..d986e68 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -34,17 +34,19 @@ + - + + - + diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj index 2dfebbe..eb8830e 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj @@ -44,9 +44,9 @@ - - - - + diff --git a/Migration/Migration.Tests/LqlDefaultsTests.cs b/Migration/Migration.Tests/LqlDefaultsTests.cs index b20c527..6f23ea4 100644 --- a/Migration/Migration.Tests/LqlDefaultsTests.cs +++ b/Migration/Migration.Tests/LqlDefaultsTests.cs @@ -44,9 +44,16 @@ public async Task DisposeAsync() _sqliteConnection.Dispose(); if (File.Exists(_sqliteDbPath)) { - try { File.Delete(_sqliteDbPath); } - catch (IOException) { /* File may be locked */ } - catch (UnauthorizedAccessException) { /* May not have permission */ } + try + { + File.Delete(_sqliteDbPath); + } + catch (IOException) + { /* File may be locked */ + } + catch (UnauthorizedAccessException) + { /* May not have permission */ + } } } diff --git a/Migration/Migration.Tests/MigrationCornerCaseTests.cs b/Migration/Migration.Tests/MigrationCornerCaseTests.cs index ac049ee..cf2902a 100644 --- a/Migration/Migration.Tests/MigrationCornerCaseTests.cs +++ b/Migration/Migration.Tests/MigrationCornerCaseTests.cs @@ -89,7 +89,9 @@ public void ColumnName_IsReservedWord_HandledCorrectly() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; var table = inspected.Tables.Single(); Assert.Equal(7, table.Columns.Count); } @@ -120,7 +122,9 @@ public void TableName_CamelCase_PreservedCorrectly() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; Assert.Contains(inspected.Tables, t => t.Name == "UserAccountSettings"); } finally @@ -188,7 +192,9 @@ public void Table_ManyColumns_Success() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; Assert.Equal(21, inspected.Tables.Single().Columns.Count); } finally @@ -306,7 +312,10 @@ public void Table_MultiColumnIndex_Success() .Column("EntityType", PortableTypes.VarChar(100)) .Column("EntityId", PortableTypes.Uuid) .Column("EventDate", PortableTypes.DateTime()) - .Index("idx_events_tenant_entity", ["TenantId", "EntityType", "EntityId"]) + .Index( + "idx_events_tenant_entity", + ["TenantId", "EntityType", "EntityId"] + ) ) .Build(); @@ -314,7 +323,9 @@ public void Table_MultiColumnIndex_Success() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; Assert.Single(inspected.Tables.Single().Indexes); } finally @@ -352,7 +363,9 @@ public void Table_SelfReferencingForeignKey_Success() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; var table = inspected.Tables.Single(); Assert.Single(table.ForeignKeys); Assert.Equal("Categories", table.ForeignKeys[0].ReferencedTable); @@ -388,7 +401,9 @@ public void Table_MultipleIndexesOnSameColumn_Success() Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; Assert.Equal(3, inspected.Tables.Single().Indexes.Count); } finally @@ -426,7 +441,9 @@ public void AllColumnsNullable_ExceptPrimaryKey_Success() Assert.True(result is MigrationApplyResultOk); // Verify all columns except Id are nullable - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; var table = inspected.Tables.Single(); foreach (var col in table.Columns.Where(c => c.Name != "Id")) { @@ -487,7 +504,11 @@ public void DefaultValue_StringWithQuotes_Success() "Defaults", t => t.Column("Id", PortableTypes.Int, c => c.PrimaryKey()) - .Column("Status", PortableTypes.VarChar(50), c => c.Default("'pending'")) + .Column( + "Status", + PortableTypes.VarChar(50), + c => c.Default("'pending'") + ) .Column("Type", PortableTypes.VarChar(50), c => c.Default("'default'")) ) .Build(); @@ -782,7 +803,12 @@ public void AddIndex_ThenAddAnother_Success() var operations = ( (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) + SchemaDiff.Calculate( + currentSchema, + v2, + allowDestructive: false, + logger: _logger + ) ).Value; // Should add the new index diff --git a/Migration/Migration.Tests/SchemaYamlSerializerTests.cs b/Migration/Migration.Tests/SchemaYamlSerializerTests.cs index 740ec95..c195ef5 100644 --- a/Migration/Migration.Tests/SchemaYamlSerializerTests.cs +++ b/Migration/Migration.Tests/SchemaYamlSerializerTests.cs @@ -593,13 +593,19 @@ public void IntegrationTest_YamlToSqlite_CreatesDatabaseSuccessfully() using var verifyCmd = connection.CreateCommand(); verifyCmd.CommandText = "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name IN ('Users', 'Orders')"; - var tableCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); + var tableCount = Convert.ToInt32( + verifyCmd.ExecuteScalar(), + CultureInfo.InvariantCulture + ); Assert.Equal(2, tableCount); // Verify index exists verifyCmd.CommandText = "SELECT COUNT(*) FROM sqlite_master WHERE type='index' AND name='idx_users_email'"; - var indexCount = Convert.ToInt32(verifyCmd.ExecuteScalar(), CultureInfo.InvariantCulture); + var indexCount = Convert.ToInt32( + verifyCmd.ExecuteScalar(), + CultureInfo.InvariantCulture + ); Assert.Equal(1, indexCount); } finally diff --git a/Migration/Migration.Tests/SqliteMigrationTests.cs b/Migration/Migration.Tests/SqliteMigrationTests.cs index f3fe112..a7d9458 100644 --- a/Migration/Migration.Tests/SqliteMigrationTests.cs +++ b/Migration/Migration.Tests/SqliteMigrationTests.cs @@ -21,8 +21,13 @@ private static void CleanupTestDb(SqliteConnection connection, string dbPath) connection.Dispose(); if (File.Exists(dbPath)) { - try { File.Delete(dbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(dbPath); + } + catch + { /* File may be locked */ + } } } @@ -140,7 +145,9 @@ public void CreateDatabaseFromScratch_MultipleTablesWithForeignKeys_Success() // Assert Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; Assert.Equal(2, inspected.Tables.Count); Assert.Contains(inspected.Tables, t => t.Name == "Users"); @@ -508,7 +515,9 @@ public void CreateTable_AllPortableTypes_Success() // Assert Assert.True(result is MigrationApplyResultOk); - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; var table = inspected.Tables.Single(); Assert.Equal(25, table.Columns.Count); } @@ -559,7 +568,12 @@ public void Destructive_DropTable_BlockedByDefault() var operations = ( (OperationsResultOk) - SchemaDiff.Calculate(currentSchema, v2, allowDestructive: false, logger: _logger) + SchemaDiff.Calculate( + currentSchema, + v2, + allowDestructive: false, + logger: _logger + ) ).Value; // Assert - No drop operations should be generated @@ -675,7 +689,9 @@ public void SchemaInspector_RoundTrip_Matches() ); // Act - Inspect and compare - var inspected = ((SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger)).Value; + var inspected = ( + (SchemaResultOk)SqliteSchemaInspector.Inspect(connection, _logger) + ).Value; // Calculate diff between original and inspected - should be empty var diff = ( @@ -1462,7 +1478,11 @@ public void LqlDefault_NumericValues_PassThrough() t => t.Column("id", PortableTypes.Int, c => c.PrimaryKey()) .Column("count", PortableTypes.Int, c => c.NotNull().DefaultLql("0")) - .Column("priority", PortableTypes.Int, c => c.NotNull().DefaultLql("100")) + .Column( + "priority", + PortableTypes.Int, + c => c.NotNull().DefaultLql("100") + ) .Column( "rate", PortableTypes.Decimal(5, 2), @@ -1582,7 +1602,11 @@ public void LqlDefault_GenUuid_GeneratesValidUuidFormat() .Table( "records", t => - t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) + t.Column( + "id", + PortableTypes.Uuid, + c => c.PrimaryKey().DefaultLql("gen_uuid()") + ) .Column("name", PortableTypes.VarChar(100)) ) .Build(); @@ -1607,7 +1631,8 @@ public void LqlDefault_GenUuid_GeneratesValidUuidFormat() // Insert multiple rows and verify UUIDs are generated and unique using var insertCmd = connection.CreateCommand(); - insertCmd.CommandText = "INSERT INTO records (name) VALUES ('test1'), ('test2'), ('test3')"; + insertCmd.CommandText = + "INSERT INTO records (name) VALUES ('test1'), ('test2'), ('test3')"; insertCmd.ExecuteNonQuery(); using var selectCmd = connection.CreateCommand(); @@ -1704,7 +1729,11 @@ public void LqlDefault_MixedDefaults_AllWorkTogether() .Table( "orders", t => - t.Column("id", PortableTypes.Uuid, c => c.PrimaryKey().DefaultLql("gen_uuid()")) + t.Column( + "id", + PortableTypes.Uuid, + c => c.PrimaryKey().DefaultLql("gen_uuid()") + ) .Column( "status", PortableTypes.VarChar(20), @@ -1748,7 +1777,8 @@ public void LqlDefault_MixedDefaults_AllWorkTogether() insertCmd.ExecuteNonQuery(); using var selectCmd = connection.CreateCommand(); - selectCmd.CommandText = "SELECT id, status, quantity, is_urgent, created_at FROM orders"; + selectCmd.CommandText = + "SELECT id, status, quantity, is_urgent, created_at FROM orders"; using var reader = selectCmd.ExecuteReader(); Assert.True(reader.Read()); diff --git a/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs b/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs index 4bacc86..5b5cbc7 100644 --- a/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs +++ b/Sync/Sync.Http.Tests/CrossDatabaseSyncTests.cs @@ -35,8 +35,13 @@ public async Task DisposeAsync() { if (File.Exists(dbPath)) { - try { File.Delete(dbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(dbPath); + } + catch + { /* File may be locked */ + } } } } diff --git a/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs b/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs index 0b3524d..c67cd3e 100644 --- a/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs +++ b/Sync/Sync.Integration.Tests/HttpMappingSyncTests.cs @@ -35,8 +35,13 @@ public async Task DisposeAsync() { if (File.Exists(dbPath)) { - try { File.Delete(dbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(dbPath); + } + catch + { /* File may be locked */ + } } } } diff --git a/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs b/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs index 5dc4483..f53679f 100644 --- a/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs +++ b/Sync/Sync.Postgres.Tests/CrossDatabaseSyncTests.cs @@ -68,8 +68,13 @@ public async Task DisposeAsync() if (File.Exists(_sqliteDbPath)) { - try { File.Delete(_sqliteDbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(_sqliteDbPath); + } + catch + { /* File may be locked */ + } } } diff --git a/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs b/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs index 8a44c4a..9185339 100644 --- a/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs +++ b/Sync/Sync.SQLite.Tests/ChangeApplierIntegrationTests.cs @@ -601,8 +601,13 @@ public void Dispose() _db.Dispose(); if (File.Exists(_dbPath)) { - try { File.Delete(_dbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(_dbPath); + } + catch + { /* File may be locked */ + } } } diff --git a/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs b/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs index 358a525..3288c53 100644 --- a/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs +++ b/Sync/Sync.SQLite.Tests/EndToEndSyncTests.cs @@ -411,13 +411,23 @@ public void Dispose() _targetDb.Dispose(); if (File.Exists(_sourceDbPath)) { - try { File.Delete(_sourceDbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(_sourceDbPath); + } + catch + { /* File may be locked */ + } } if (File.Exists(_targetDbPath)) { - try { File.Delete(_targetDbPath); } - catch { /* File may be locked */ } + try + { + File.Delete(_targetDbPath); + } + catch + { /* File may be locked */ + } } } } From 9408a1d035895a5341bf533a0f1218b4e39d4acb Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:22:31 +1100 Subject: [PATCH 03/12] type provider fixes --- .github/workflows/ci.yml | 7 +- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 22 +++--- Lql/LqlExtension/README.md | 29 ++++++-- .../Components/Layout/MainLayout.razor | 1 + Lql/README.md | 71 ++++++++++++++++++- Website/src/_data/navigation.json | 1 + .../src/blog/lql-simplifies-development.md | 66 +++++++++++++---- Website/src/docs/getting-started.md | 3 +- Website/src/docs/installation.md | 28 ++++++++ Website/src/docs/quick-start.md | 24 ++++++- 10 files changed, 209 insertions(+), 43 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 13ba9bb..ba99bdd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: run: dotnet restore - name: Build - run: dotnet build --no-restore -c Release + run: dotnet build --no-restore -c Release -p:SkipTypeProviderCodeGen=true # DataProvider tests dataprovider-tests: @@ -231,11 +231,6 @@ jobs: - name: Restore run: dotnet restore Lql/Lql.TypeProvider.FSharp.Tests - - name: Build CLI tools (needed by MSBuild targets in test project) - run: | - dotnet build Migration/Migration.Cli -c Release --no-restore - dotnet build DataProvider/DataProvider.SQLite.Cli -c Release --no-restore - - name: Test run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --verbosity normal --logger "trx;LogFileName=test-results.trx" diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index d986e68..8706927 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -7,10 +7,8 @@ false - - - - + + @@ -34,22 +32,20 @@ - - - + + + + - - + + - + - - - diff --git a/Lql/LqlExtension/README.md b/Lql/LqlExtension/README.md index 513a6e6..c70f093 100644 --- a/Lql/LqlExtension/README.md +++ b/Lql/LqlExtension/README.md @@ -212,14 +212,33 @@ npm run lint 4. Add tests if applicable 5. Submit a pull request +## F# Type Provider + +For F# projects, LQL also offers compile-time validation through a type provider. Invalid LQL queries cause compilation errors rather than runtime failures. + +```fsharp +open Lql + +// Validated at compile time +type GetUsers = LqlCommand<"Users |> select(Users.Id, Users.Name)"> +type FilteredUsers = LqlCommand<"Users |> filter(fn(row) => row.Users.Age > 18) |> select(*)"> + +// Access generated SQL +let sql = GetUsers.Sql // SQL string generated at compile time +``` + +Install the type provider: + +```xml + +``` + +See the [LQL documentation](../README.md) for full type provider details. + ## License MIT License - see LICENSE file for details. ## Support -For issues and feature requests, please visit our [GitHub repository](https://github.com/your-org/lambda-query-language). - ---- - -**Enjoy coding with Lambda Query Language! šŸš€** \ No newline at end of file +For issues and feature requests, please visit our [GitHub repository](https://github.com/your-org/lambda-query-language). \ No newline at end of file diff --git a/Lql/LqlWebsite/Components/Layout/MainLayout.razor b/Lql/LqlWebsite/Components/Layout/MainLayout.razor index a9b6525..ada97a8 100644 --- a/Lql/LqlWebsite/Components/Layout/MainLayout.razor +++ b/Lql/LqlWebsite/Components/Layout/MainLayout.razor @@ -13,6 +13,7 @@
  • Features
  • Examples
  • Playground
  • +
  • DataProvider
  • GitHub
  • diff --git a/Lql/README.md b/Lql/README.md index 93d5c49..6a17cb7 100644 --- a/Lql/README.md +++ b/Lql/README.md @@ -69,13 +69,78 @@ var sql = LqlCodeParser.Parse(lql).ToSql(new SQLiteContext()); Validate LQL queries at compile time. Invalid queries cause compilation errors, not runtime errors. +### Installation + +```xml + +``` + +### Basic Usage + ```fsharp -open Lql.TypeProvider +open Lql -type GetUsers = LqlCommand<"Users |> select(Id, Name, Email)"> +// Define types with validated LQL - errors caught at COMPILE TIME +type GetUsers = LqlCommand<"Users |> select(Users.Id, Users.Name, Users.Email)"> type ActiveUsers = LqlCommand<"Users |> filter(fn(row) => row.Status = 'active') |> select(*)"> -let sql = GetUsers.Sql // SQL generated at compile time +// Access generated SQL and original query +let sql = GetUsers.Sql // Generated SQL string +let query = GetUsers.Query // Original LQL string +``` + +### What Gets Validated + +The type provider validates your LQL at compile time and generates two properties: +- `Query` - The original LQL query string +- `Sql` - The generated SQL (SQLite dialect) + +### Query Examples + +```fsharp +// Select with columns +type SelectColumns = LqlCommand<"Users |> select(Users.Id, Users.Name, Users.Email)"> + +// Filtering with AND/OR +type FilterComplex = LqlCommand<"Users |> filter(fn(row) => row.Users.Age > 18 and row.Users.Status = 'active') |> select(*)"> + +// Joins +type JoinQuery = LqlCommand<"Users |> join(Orders, on = Users.Id = Orders.UserId) |> select(Users.Name, Orders.Total)"> +type LeftJoin = LqlCommand<"Users |> left_join(Orders, on = Users.Id = Orders.UserId) |> select(*)"> + +// Aggregations with GROUP BY and HAVING +type GroupBy = LqlCommand<"Orders |> group_by(Orders.UserId) |> select(Orders.UserId, count(*) as order_count)"> +type Having = LqlCommand<"Orders |> group_by(Orders.UserId) |> having(fn(g) => count(*) > 5) |> select(Orders.UserId, count(*) as cnt)"> + +// Order, limit, offset +type Pagination = LqlCommand<"Users |> order_by(Users.Name asc) |> limit(10) |> offset(20) |> select(*)"> + +// Arithmetic expressions +type Calculated = LqlCommand<"Products |> select(Products.Price * Products.Quantity as total)"> +``` + +### Compile-Time Error Example + +Invalid LQL causes a build error with line/column position: + +```fsharp +// This FAILS to compile with: "Invalid LQL syntax at line 1, column 15" +type BadQuery = LqlCommand<"Users |> selectt(*)"> // typo: 'selectt' +``` + +### Executing Queries + +```fsharp +open Microsoft.Data.Sqlite + +let executeQuery() = + use conn = new SqliteConnection("Data Source=mydb.db") + conn.Open() + + // SQL is validated at compile time, safe to execute + use cmd = new SqliteCommand(GetUsers.Sql, conn) + use reader = cmd.ExecuteReader() + // ... process results ``` ## Pipeline Operations diff --git a/Website/src/_data/navigation.json b/Website/src/_data/navigation.json index 9675eb9..6ab48a8 100644 --- a/Website/src/_data/navigation.json +++ b/Website/src/_data/navigation.json @@ -2,6 +2,7 @@ "main": [ { "text": "Docs", "url": "/docs/getting-started/" }, { "text": "API", "url": "/apidocs/" }, + { "text": "LQL", "url": "https://lql.dev", "external": true }, { "text": "Blog", "url": "/blog/" }, { "text": "GitHub", "url": "https://github.com/MelbourneDeveloper/DataProvider", "external": true } ], diff --git a/Website/src/blog/lql-simplifies-development.md b/Website/src/blog/lql-simplifies-development.md index 69aac84..6d819e6 100644 --- a/Website/src/blog/lql-simplifies-development.md +++ b/Website/src/blog/lql-simplifies-development.md @@ -1,35 +1,73 @@ --- layout: layouts/blog.njk -title: "LQL: How Lightweight Query Language Simplifies .NET Development" -description: A deep dive into LQL and its benefits for .NET data access. +title: "LQL: How Lambda Query Language Simplifies .NET Development" +description: A deep dive into LQL and its benefits for .NET data access, including the new F# Type Provider. date: 2024-04-14 author: DataProvider Team tags: - .NET - LQL + - F# - post --- -LQL (Lambda Query Language) is a type-safe query syntax that transpiles to SQL. It allows you to write queries using familiar C# lambda expressions. +LQL (Lambda Query Language) is a functional pipeline-style DSL that transpiles to SQL. Write database logic once, run it anywhere. ## Why LQL? -Traditional SQL strings are error-prone and lack type safety. LQL provides: +Traditional SQL strings are error-prone and differ across database vendors. LQL provides: -- **Compile-time checking**: Catch errors before runtime -- **IntelliSense support**: Full autocomplete in your IDE -- **Refactoring support**: Rename properties safely +- **Write once, deploy everywhere**: Same query works on PostgreSQL, SQLite, and SQL Server +- **Compile-time validation**: Catch errors before runtime (especially with F# Type Provider) +- **Functional pipeline syntax**: Readable, composable query building +- **IDE support**: VS Code extension with syntax highlighting and IntelliSense ## Basic Example -```csharp -// LQL -var query = Orders - .Where(o => o.Status == "Active") - .Select(o => new { o.Id, o.Name }); +```lql +Users +|> filter(fn(row) => row.Age > 18 and row.Status = 'active') +|> join(Orders, on = Users.Id = Orders.UserId) +|> group_by(Users.Id, Users.Name) +|> select(Users.Name, sum(Orders.Total) as TotalSpent) +|> order_by(TotalSpent desc) +|> limit(10) +``` + +This transpiles to correct SQL for PostgreSQL, SQLite, or SQL Server. + +## F# Type Provider: Compile-Time Validation + +The new F# Type Provider takes LQL to the next level by validating queries at compile time. Invalid LQL causes a build error, not a runtime crash. + +```fsharp +open Lql + +// These are validated when you compile - errors caught immediately +type GetUsers = LqlCommand<"Users |> select(Users.Id, Users.Name, Users.Email)"> +type ActiveUsers = LqlCommand<"Users |> filter(fn(row) => row.Status = 'active') |> select(*)"> + +// Access the generated SQL +let sql = GetUsers.Sql // SQL string ready to execute +``` + +Invalid queries fail the build with descriptive error messages: + +```fsharp +// Build error: "Invalid LQL syntax at line 1, column 15" +type BadQuery = LqlCommand<"Users |> selectt(*)"> // typo in 'select' +``` + +The type provider supports all LQL operations: +- Select, filter, join, left_join +- Group by, having, order by +- Limit, offset, distinct +- Arithmetic expressions and aggregations (sum, avg, count, min, max) + +Install it with: -// Transpiles to SQL: -// SELECT Id, Name FROM Orders WHERE Status = 'Active' +```xml + ``` ## Getting Started diff --git a/Website/src/docs/getting-started.md b/Website/src/docs/getting-started.md index 5525af4..853dcc8 100644 --- a/Website/src/docs/getting-started.md +++ b/Website/src/docs/getting-started.md @@ -39,5 +39,6 @@ DataProvider is built around these key principles: ## Next Steps - [DataProvider Documentation](/docs/dataprovider/) -- [LQL Query Language](/docs/lql/) +- [LQL Query Language](/docs/lql/) - Write once, deploy to any SQL database +- [F# Type Provider](/docs/lql/#f-type-provider) - Compile-time validated LQL queries - [API Reference](/api/) diff --git a/Website/src/docs/installation.md b/Website/src/docs/installation.md index 273c2b0..145b57e 100644 --- a/Website/src/docs/installation.md +++ b/Website/src/docs/installation.md @@ -40,6 +40,34 @@ dotnet add package DataProvider.MySql dotnet add package DataProvider.Sqlite ``` +## LQL (Lambda Query Language) + +Install LQL packages for cross-database query transpilation: + +```bash +# Choose your target database +dotnet add package Lql.SQLite +dotnet add package Lql.Postgres +dotnet add package Lql.SqlServer +``` + +### F# Type Provider + +For F# projects, install the type provider for compile-time LQL validation: + +```bash +dotnet add package Lql.TypeProvider.FSharp +``` + +This enables compile-time validated queries: + +```fsharp +open Lql + +type GetUsers = LqlCommand<"Users |> select(Users.Id, Users.Name)"> +let sql = GetUsers.Sql // Invalid LQL = build error +``` + ## Requirements - .NET 9.0 or later diff --git a/Website/src/docs/quick-start.md b/Website/src/docs/quick-start.md index 4c93203..7590e1f 100644 --- a/Website/src/docs/quick-start.md +++ b/Website/src/docs/quick-start.md @@ -75,8 +75,30 @@ catch } ``` +## Using LQL for Cross-Database Queries + +Instead of writing raw SQL, use LQL to write queries that work across all databases: + +```csharp +using Lql; +using Lql.SQLite; + +var lql = "Orders |> filter(fn(row) => row.Status = 'Active') |> select(Id, Name)"; +var sql = LqlCodeParser.Parse(lql).ToSql(new SQLiteContext()); +``` + +For F# projects, use the Type Provider for compile-time validation: + +```fsharp +open Lql + +type ActiveOrders = LqlCommand<"Orders |> filter(fn(row) => row.Status = 'Active') |> select(*)"> +let sql = ActiveOrders.Sql // SQL validated at compile time +``` + ## Next Steps - [DataProvider Documentation](/docs/dataprovider/) -- [LQL Query Language](/docs/lql/) +- [LQL Query Language](/docs/lql/) - Cross-database query language +- [F# Type Provider](/docs/lql/#f-type-provider) - Compile-time LQL validation - [API Reference](/api/) From 0ef77f29c39ac2cfe55de75aa9876ef3d13f5572 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:27:55 +1100 Subject: [PATCH 04/12] Fix ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ba99bdd..9a5ed66 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -232,7 +232,7 @@ jobs: run: dotnet restore Lql/Lql.TypeProvider.FSharp.Tests - name: Test - run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --verbosity normal --logger "trx;LogFileName=test-results.trx" + run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --verbosity normal --logger "trx;LogFileName=test-results.trx" -p:SkipTypeProviderCodeGen=true - name: Upload test results uses: actions/upload-artifact@v4 From e8544ae62ed54df0864c746aa93a99d666f2901c Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:30:51 +1100 Subject: [PATCH 05/12] Fix ci build --- .github/workflows/ci.yml | 7 ++++++- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 9 +++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9a5ed66..51b110e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -231,8 +231,13 @@ jobs: - name: Restore run: dotnet restore Lql/Lql.TypeProvider.FSharp.Tests + - name: Build CLI tools (needed by MSBuild targets) + run: | + dotnet build Migration/Migration.Cli -c Debug + dotnet build DataProvider/DataProvider.SQLite.Cli -c Debug + - name: Test - run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --verbosity normal --logger "trx;LogFileName=test-results.trx" -p:SkipTypeProviderCodeGen=true + run: dotnet test Lql/Lql.TypeProvider.FSharp.Tests --no-restore --verbosity normal --logger "trx;LogFileName=test-results.trx" - name: Upload test results uses: actions/upload-artifact@v4 diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index 8706927..8232fab 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -7,8 +7,10 @@ false - - + + + + @@ -47,5 +49,8 @@ + + +
    From e1491d77857c67c9dd79f94b6265f4fcccee28cf Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:39:29 +1100 Subject: [PATCH 06/12] migrate from yml --- .../Lql.TypeProvider.FSharp.Tests.fsproj | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj index eb8830e..2dfebbe 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests/Lql.TypeProvider.FSharp.Tests.fsproj @@ -44,9 +44,9 @@ - + + + + From b443e0fc7c1573bb3ba050c79c182acc54da2a82 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:54:27 +1100 Subject: [PATCH 07/12] Try fix build --- .github/workflows/ci.yml | 5 +--- .gitignore | 5 ++++ .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 26 +++++++------------ 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 51b110e..eed969d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -118,11 +118,8 @@ jobs: restore-keys: | ${{ runner.os }}-nuget- - - name: Restore dependencies - run: dotnet restore - - name: Build - run: dotnet build --no-restore -c Release -p:SkipTypeProviderCodeGen=true + run: dotnet build -c Release # DataProvider tests dataprovider-tests: diff --git a/.gitignore b/.gitignore index 38eb736..da987c0 100644 --- a/.gitignore +++ b/.gitignore @@ -379,6 +379,9 @@ Generated_Code/ # Generated C# code from DataProvider source generator *.g.cs + +!Lql/Lql.TypeProvider.FSharp.Tests.Data/Generated/*.g.cs + # Build timestamps .timestamp @@ -399,3 +402,5 @@ Website/src/apidocs/ Website/src/docs/sync.md Website/src/docs/dataprovider.md Website/src/docs/lql.md + +Lql/Lql.TypeProvider.FSharp.Tests.Data/Generated/ diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index 8232fab..04f120b 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -7,11 +7,6 @@ false - - - - - @@ -33,24 +28,23 @@ + + + - - - + - - - + - - + - - - From 3a65f8b3320a6c93e2f1e7a10bfa16df65e29838 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:59:01 +1100 Subject: [PATCH 08/12] Fix build --- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index 04f120b..b025248 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -7,6 +7,11 @@ false + + + + + @@ -28,8 +33,7 @@ - - + + + + From 66dde98fbcfd9b6e62ca4c8b7dab0c6e99727705 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 21:23:39 +1100 Subject: [PATCH 09/12] try fix build --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eed969d..b47b392 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -119,7 +119,7 @@ jobs: ${{ runner.os }}-nuget- - name: Build - run: dotnet build -c Release + run: dotnet build -c Release -m:1 # DataProvider tests dataprovider-tests: From 85ed0055b7713403b0bafc83d9db698b54e5abc6 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Sun, 4 Jan 2026 22:29:30 +1100 Subject: [PATCH 10/12] try fix --- .github/workflows/ci.yml | 2 +- .../DataProvider.Example/DataProvider.Example.csproj | 8 +++++--- Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj | 6 ++++-- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 5 +++-- Samples/Clinical/Clinical.Api/Clinical.Api.csproj | 6 ++++-- Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj | 6 ++++-- 6 files changed, 21 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b47b392..eed969d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -119,7 +119,7 @@ jobs: ${{ runner.os }}-nuget- - name: Build - run: dotnet build -c Release -m:1 + run: dotnet build -c Release # DataProvider tests dataprovider-tests: diff --git a/DataProvider/DataProvider.Example/DataProvider.Example.csproj b/DataProvider/DataProvider.Example/DataProvider.Example.csproj index 963dc04..cbfa070 100644 --- a/DataProvider/DataProvider.Example/DataProvider.Example.csproj +++ b/DataProvider/DataProvider.Example/DataProvider.Example.csproj @@ -41,11 +41,13 @@ + - + + @@ -55,9 +57,9 @@ - + - + diff --git a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj index 732a5f7..e4bdcdc 100644 --- a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj +++ b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj @@ -34,15 +34,17 @@ + - + + - + diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index b025248..4ac6dc0 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -34,12 +34,13 @@ + - + @@ -48,7 +49,7 @@ Outputs="$(MSBuildProjectDirectory)/Generated/.timestamp"> - + diff --git a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj index c9f7eb6..e163986 100644 --- a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj +++ b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj @@ -33,15 +33,17 @@ + - + + - + diff --git a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj index 822b4ff..284a544 100644 --- a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj +++ b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj @@ -33,15 +33,17 @@ + - + + - + From 64e4235e2e28d6bf45ef58466d1a43854d5e2b92 Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Mon, 5 Jan 2026 17:49:49 +1100 Subject: [PATCH 11/12] Try fix --- DataProvider/DataProvider.Example/DataProvider.Example.csproj | 2 -- Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj | 2 -- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 1 - Samples/Clinical/Clinical.Api/Clinical.Api.csproj | 2 -- Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj | 2 -- 5 files changed, 9 deletions(-) diff --git a/DataProvider/DataProvider.Example/DataProvider.Example.csproj b/DataProvider/DataProvider.Example/DataProvider.Example.csproj index cbfa070..acdac3c 100644 --- a/DataProvider/DataProvider.Example/DataProvider.Example.csproj +++ b/DataProvider/DataProvider.Example/DataProvider.Example.csproj @@ -41,13 +41,11 @@ - - diff --git a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj index e4bdcdc..50b6881 100644 --- a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj +++ b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj @@ -34,13 +34,11 @@ - - diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index 4ac6dc0..a9f2566 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -34,7 +34,6 @@ - - - diff --git a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj index 284a544..29c89d8 100644 --- a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj +++ b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj @@ -33,13 +33,11 @@ - - From 2b42fe62cf1390ee2c1c0eaae7040eae9999aeca Mon Sep 17 00:00:00 2001 From: Christian Findlay <16697547+MelbourneDeveloper@users.noreply.github.com> Date: Mon, 5 Jan 2026 17:56:29 +1100 Subject: [PATCH 12/12] Try fix --- .github/workflows/ci.yml | 2 +- .../DataProvider.Example.csproj | 6 +- .../Gatekeeper.Api/Gatekeeper.Api.csproj | 4 +- .../Lql.TypeProvider.FSharp.Tests.Data.csproj | 18 +- .../Clinical/Clinical.Api/Clinical.Api.csproj | 4 +- .../Scheduling.Api/Scheduling.Api.csproj | 4 +- Website/src/docs/dataprovider.md | 2 - Website/src/docs/migrations.md | 340 ++++++++++++++++++ Website/src/docs/sync.md | 2 - 9 files changed, 357 insertions(+), 25 deletions(-) create mode 100644 Website/src/docs/migrations.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eed969d..b47b392 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -119,7 +119,7 @@ jobs: ${{ runner.os }}-nuget- - name: Build - run: dotnet build -c Release + run: dotnet build -c Release -m:1 # DataProvider tests dataprovider-tests: diff --git a/DataProvider/DataProvider.Example/DataProvider.Example.csproj b/DataProvider/DataProvider.Example/DataProvider.Example.csproj index acdac3c..963dc04 100644 --- a/DataProvider/DataProvider.Example/DataProvider.Example.csproj +++ b/DataProvider/DataProvider.Example/DataProvider.Example.csproj @@ -42,7 +42,7 @@ - + @@ -55,9 +55,9 @@ - + - + diff --git a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj index 50b6881..732a5f7 100644 --- a/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj +++ b/Gatekeeper/Gatekeeper.Api/Gatekeeper.Api.csproj @@ -35,14 +35,14 @@ - + - + diff --git a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj index a9f2566..8478b1a 100644 --- a/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj +++ b/Lql/Lql.TypeProvider.FSharp.Tests.Data/Lql.TypeProvider.FSharp.Tests.Data.csproj @@ -7,7 +7,7 @@ false - + @@ -33,22 +33,18 @@ - - - - + + - + + - + + diff --git a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj index bebe033..c9f7eb6 100644 --- a/Samples/Clinical/Clinical.Api/Clinical.Api.csproj +++ b/Samples/Clinical/Clinical.Api/Clinical.Api.csproj @@ -34,14 +34,14 @@ - + - + diff --git a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj index 29c89d8..822b4ff 100644 --- a/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj +++ b/Samples/Scheduling/Scheduling.Api/Scheduling.Api.csproj @@ -34,14 +34,14 @@ - + - + diff --git a/Website/src/docs/dataprovider.md b/Website/src/docs/dataprovider.md index 261cea3..d17ba98 100644 --- a/Website/src/docs/dataprovider.md +++ b/Website/src/docs/dataprovider.md @@ -3,8 +3,6 @@ layout: layouts/docs.njk title: "DataProvider SQL Parser" --- -# DataProvider SQL Parser - A .NET source generator project that aims to parse SQL files and generate strongly-typed extension methods for multiple SQL database platforms. CRITICAL: The generator connects to the database at compile time to get query metadata. If it doesn't connect, the generation fails with a compiler error. diff --git a/Website/src/docs/migrations.md b/Website/src/docs/migrations.md new file mode 100644 index 0000000..d13ebb8 --- /dev/null +++ b/Website/src/docs/migrations.md @@ -0,0 +1,340 @@ +--- +layout: layouts/docs.njk +title: "Migrations" +description: Database-agnostic schema migration framework for .NET applications. +--- + +A database-agnostic schema migration framework for .NET applications. Define your schema once in YAML, deploy to SQLite, PostgreSQL, or SQL Server. + +## Overview + +The Migration framework provides: + +- **Database-agnostic definitions** - Single schema definition works across SQLite, PostgreSQL, SQL Server +- **Additive-only by default** - Safe upgrades that only add, never remove +- **Idempotent operations** - Running migrations multiple times produces same result +- **Schema introspection** - Compare desired schema against actual database state +- **YAML-based schemas** - Version control friendly, human-readable definitions + +## Quick Start + +### 1. Define Your Schema (YAML) + +Create a `schema.yaml` file: + +```yaml +name: MyApp +tables: + - name: Users + columns: + - name: Id + type: { kind: uuid } + nullable: false + - name: Email + type: { kind: varchar, maxLength: 255 } + nullable: false + - name: Name + type: { kind: varchar, maxLength: 100 } + nullable: true + - name: CreatedAt + type: { kind: datetime } + nullable: false + default: "CURRENT_TIMESTAMP" + primaryKey: + columns: [Id] + indexes: + - name: IX_Users_Email + columns: [Email] + unique: true +``` + +### 2. Run the Migration CLI + +```bash +dotnet run --project Migration.Cli -- \ + --schema schema.yaml \ + --output myapp.db \ + --provider sqlite +``` + +This creates the database and applies the schema. + +### 3. Update Your Schema + +Add new tables or columns to your YAML file. Run the CLI again - it will only apply the changes. + +## CLI Reference + +```bash +Migration.Cli --schema --output --provider +``` + +| Option | Required | Description | +|--------|----------|-------------| +| `--schema` | Yes | Path to YAML schema file | +| `--output` | Yes | Database connection string or file path | +| `--provider` | Yes | Database provider: `sqlite`, `postgres`, `sqlserver` | + +## YAML Schema Reference + +### Schema Structure + +```yaml +name: SchemaName # Required: Schema identifier +tables: # Required: List of tables + - name: TableName # Required: Table name + schema: public # Optional: Schema namespace (default: none for SQLite) + comment: Description # Optional: Table documentation + columns: [...] # Required: Column definitions + primaryKey: {...} # Optional: Primary key definition + indexes: [...] # Optional: Index definitions + foreignKeys: [...] # Optional: Foreign key definitions +``` + +### Column Definition + +```yaml +columns: + - name: ColumnName # Required + type: { kind: ... } # Required: Type definition (see Type Reference) + nullable: true # Optional: Allow NULL (default: true) + default: "expression" # Optional: SQL default expression + identity: # Optional: Auto-increment + seed: 1 + increment: 1 + computed: # Optional: Computed column + expression: "Price * Quantity" + persisted: false + checkConstraint: "Age > 0" # Optional: Column-level CHECK + collation: "NOCASE" # Optional: String collation + comment: "Description" # Optional: Column documentation +``` + +### Type Reference + +#### Simple Types (no parameters) + +| Kind | Description | +|------|-------------| +| `tinyint` | 8-bit integer | +| `smallint` | 16-bit integer | +| `int` | 32-bit integer | +| `bigint` | 64-bit integer | +| `float` | Single precision float | +| `double` | Double precision float | +| `text` | Unlimited text | +| `blob` | Binary data | +| `date` | Date only | +| `uuid` | UUID/GUID | +| `boolean` | True/false | + +#### Parameterized Types + +| Kind | Parameters | Example | +|------|------------|---------| +| `char` | `length` | `{ kind: char, length: 10 }` | +| `varchar` | `maxLength` | `{ kind: varchar, maxLength: 255 }` | +| `decimal` | `precision`, `scale` | `{ kind: decimal, precision: 18, scale: 2 }` | +| `datetime` | `precision` | `{ kind: datetime, precision: 3 }` | + +### Type Mapping by Platform + +| Portable Type | SQLite | PostgreSQL | SQL Server | +|---------------|--------|------------|------------| +| `uuid` | TEXT | UUID | UNIQUEIDENTIFIER | +| `varchar(n)` | TEXT | VARCHAR(n) | VARCHAR(n) | +| `int` | INTEGER | INTEGER | INT | +| `bigint` | INTEGER | BIGINT | BIGINT | +| `decimal(p,s)` | REAL | NUMERIC(p,s) | DECIMAL(p,s) | +| `boolean` | INTEGER | BOOLEAN | BIT | +| `datetime` | TEXT | TIMESTAMP | DATETIME2 | +| `text` | TEXT | TEXT | NVARCHAR(MAX) | +| `blob` | BLOB | BYTEA | VARBINARY(MAX) | + +### Primary Key Definition + +```yaml +primaryKey: + name: PK_TableName # Optional: Constraint name + columns: [Id] # Required: Column(s) in the key +``` + +### Index Definition + +```yaml +indexes: + - name: IX_TableName_Column # Required: Index name + columns: [Column1, Column2] # Required: Indexed columns + unique: false # Optional: Unique constraint (default: false) + filter: "Status = 'active'" # Optional: Partial index filter (Postgres/SQL Server) +``` + +### Foreign Key Definition + +```yaml +foreignKeys: + - name: FK_Orders_Users # Optional: Constraint name + columns: [UserId] # Required: Local column(s) + referencedTable: Users # Required: Referenced table + referencedColumns: [Id] # Required: Referenced column(s) + onDelete: Cascade # Optional: NoAction, Cascade, SetNull, SetDefault, Restrict + onUpdate: NoAction # Optional: Same options as onDelete +``` + +## Complete Example + +```yaml +name: Ecommerce +tables: + - name: Users + comment: Application users + columns: + - name: Id + type: { kind: uuid } + nullable: false + - name: Email + type: { kind: varchar, maxLength: 255 } + nullable: false + - name: Name + type: { kind: varchar, maxLength: 100 } + nullable: true + - name: Age + type: { kind: int } + nullable: true + checkConstraint: "Age >= 0" + - name: Status + type: { kind: varchar, maxLength: 20 } + nullable: false + default: "'active'" + - name: CreatedAt + type: { kind: datetime } + nullable: false + default: "CURRENT_TIMESTAMP" + primaryKey: + name: PK_Users + columns: [Id] + indexes: + - name: IX_Users_Email + columns: [Email] + unique: true + - name: IX_Users_Status + columns: [Status] + + - name: Products + columns: + - name: Id + type: { kind: uuid } + nullable: false + - name: Sku + type: { kind: char, length: 12 } + nullable: false + comment: Stock keeping unit + - name: Name + type: { kind: varchar, maxLength: 200 } + nullable: false + - name: Price + type: { kind: decimal, precision: 10, scale: 2 } + nullable: false + default: "0.00" + checkConstraint: "Price >= 0" + - name: Stock + type: { kind: int } + nullable: false + default: "0" + primaryKey: + columns: [Id] + indexes: + - name: IX_Products_Sku + columns: [Sku] + unique: true + + - name: Orders + columns: + - name: Id + type: { kind: uuid } + nullable: false + - name: UserId + type: { kind: uuid } + nullable: false + - name: ProductId + type: { kind: uuid } + nullable: false + - name: Quantity + type: { kind: int } + nullable: false + - name: Total + type: { kind: decimal, precision: 10, scale: 2 } + nullable: false + - name: Status + type: { kind: varchar, maxLength: 20 } + nullable: false + default: "'pending'" + - name: CreatedAt + type: { kind: datetime } + nullable: false + default: "CURRENT_TIMESTAMP" + primaryKey: + columns: [Id] + foreignKeys: + - name: FK_Orders_Users + columns: [UserId] + referencedTable: Users + referencedColumns: [Id] + onDelete: Cascade + - name: FK_Orders_Products + columns: [ProductId] + referencedTable: Products + referencedColumns: [Id] + onDelete: Restrict + indexes: + - name: IX_Orders_UserId + columns: [UserId] + - name: IX_Orders_Status + columns: [Status] +``` + +## MSBuild Integration + +Add migration targets to your `.csproj` to run migrations at build time: + +```xml + + + + +``` + +## Design Principles + +The Migration framework follows strict coding rules: + +- **No exceptions** - All operations return `Result` +- **Additive-only** - Destructive operations (DROP) require explicit opt-in +- **Idempotent** - Safe to run multiple times +- **Database-agnostic** - Same YAML works across all supported databases + +## Architecture + +``` ++-----------------------------------------------------------+ +| Application Layer | ++-----------------------------------------------------------+ +| Migration Engine | +| +-------------+ +-------------+ +-------------+ | +| | Schema | | Diff | | DDL | | +| | Definition | | Engine | | Generator | | +| +-------------+ +-------------+ +-------------+ | ++-----------------------------------------------------------+ +| Provider Layer | +| +----------+ +----------+ +----------+ | +| | SQLite | | Postgres | | SqlServer| | +| | Provider | | Provider | | Provider | | +| +----------+ +----------+ +----------+ | ++-----------------------------------------------------------+ +``` + +## Next Steps + +- [DataProvider Documentation](/docs/dataprovider/) - Generate code from SQL +- [Sync Documentation](/docs/sync/) - Offline-first synchronization +- [LQL Documentation](/docs/lql/) - Cross-database query language diff --git a/Website/src/docs/sync.md b/Website/src/docs/sync.md index f541ae9..ee365d6 100644 --- a/Website/src/docs/sync.md +++ b/Website/src/docs/sync.md @@ -3,8 +3,6 @@ layout: layouts/docs.njk title: "Sync Framework" --- -# Sync Framework - A database-agnostic, offline-first synchronization framework for .NET applications. Enables two-way data synchronization between distributed replicas with conflict resolution, tombstone management, and real-time subscriptions. ## Overview