From bcb5f6b35fa289dfa83398a27b95dbc456df5571 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 23 Dec 2025 15:42:35 +0000 Subject: [PATCH 1/4] docs: add new llms.txt --- llms.txt | 1097 +++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 954 insertions(+), 143 deletions(-) diff --git a/llms.txt b/llms.txt index 8554b3b..545870e 100644 --- a/llms.txt +++ b/llms.txt @@ -1,193 +1,1004 @@ -# llms.txt — Guidance for AI systems using the Databricks AppKit (@databricks/appkit) - +# llms.txt — LLM Guide for Building Great Databricks Apps with AppKit Project: Databricks AppKit -Author: Databricks -Version: 1.0.0 - -# ===================== -# General Description -# ===================== -AppKit is a modular TypeScript SDK for building apps with workflows and plugins. -It provides a single entrypoint (createApp) where you configure and register plugins. -Each plugin is then available under AppKit[pluginName]. - -Main concepts: -- createApp(config): initializes the SDK with plugins -- Plugins: extend AppKit with functionality (server, analytics, ai, etc.) -- AppKit[pluginName]: exposes plugin API after initialization -- New plugins can be created by extending the Plugin class. - -# ===================== -# Primary Usage Pattern -# ===================== -Always use async/await. -Always initialize AppKit before using plugins. -Server and plugins already initialized, no custom endpoints. - -Example: + +This document is written *for LLMs* generating code in a brand-new project folder that installs AppKit from npm. It is intentionally prescriptive. + +## High-level mission + +Build **full-stack TypeScript apps** on Databricks using: + +- **Backend**: `@databricks/appkit` +- **Frontend**: `@databricks/appkit-ui` +- **Analytics**: SQL files in `config/queries/*.sql` executed via the AppKit analytics plugin + +This file is designed to work even when you *do not* have access to the AppKit source repo. Prefer only public package APIs and portable project structures. + +## Hard rules (LLM guardrails) + +- **Do not invent APIs**. If unsure, stick to the patterns shown in this file and only documented exports from `@databricks/appkit` and `@databricks/appkit-ui`. +- **`createApp()` is async**. Prefer **top-level `await createApp(...)`**. If you can’t, use `void createApp(...)` and do not ignore promise rejection. +- **Always memoize query parameters** passed to `useAnalyticsQuery` / charts to avoid refetch loops. +- **Always handle loading/error/empty states** in UI (use `Skeleton`, error text, empty state). +- **Always use `sql.*` helpers** for query parameters (do not pass raw strings/numbers unless the query expects none). +- **Never construct SQL strings dynamically**. Use parameterized queries with `:paramName`. +- **Never use `require()`**. Use ESM `import/export`. + +## Canonical project layout + +Recommended structure: + +``` +my-app/ +├── server.ts # backend entry point (or server/index.ts for complex apps) +├── index.html # Vite entry point +├── vite.config.ts # Vite config +├── src/ +│ ├── main.tsx # React entry point +│ └── App.tsx # Root component +├── config/ +│ └── queries/ +│ └── my_query.sql # SQL queries (optional, if using analytics) +├── app.yaml # Databricks Apps config (warehouse binding) +├── package.json +└── tsconfig.json +``` + +Alternative structure for larger apps: + +``` +my-app/ +├── server/ +│ └── index.ts # backend entry point +├── client/ +│ ├── index.html +│ ├── vite.config.ts +│ └── src/ +│ ├── main.tsx +│ └── App.tsx +├── config/ +│ └── queries/ +│ └── my_query.sql +├── app.yaml +├── package.json +└── tsconfig.json +``` + +## Project scaffolding (start here) + +### `package.json` + +```json +{ + "name": "my-app", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "NODE_ENV=development tsx watch server.ts", + "build": "vite build", + "start": "NODE_ENV=production tsx server.ts" + }, + "dependencies": { + "@databricks/appkit": "^0.0.2", + "@databricks/appkit-ui": "^0.0.2", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "devDependencies": { + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "@vitejs/plugin-react": "^5.0.0", + "tsx": "^4.19.0", + "typescript": "~5.6.0", + "vite": "^6.0.0" + } +} +``` + +### `index.html` + +```html + + + + + + My App + + +
+ + + +``` + +### `src/main.tsx` + +```tsx +import { StrictMode } from "react"; +import { createRoot } from "react-dom/client"; +import App from "./App"; + +createRoot(document.getElementById("root")!).render( + + + , +); +``` + +### `src/App.tsx` (minimal) + +```tsx +export default function App() { + return ( +
+

My App

+
+ ); +} +``` + +### `vite.config.ts` ```ts -import { createApp, server, analytics } from "@databricks/appkit"; +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; + +export default defineConfig({ + plugins: [react()], +}); +``` + +### `tsconfig.json` + +```json +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "ESNext", + "moduleResolution": "bundler", + "jsx": "react-jsx", + "strict": true, + "skipLibCheck": true, + "noEmit": true, + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true + }, + "include": ["src", "server.ts"] +} +``` + +### `server.ts` + +```ts +import { createApp, server } from "@databricks/appkit"; await createApp({ - plugins: [ - server({ port: 8000 }), - analytics(), - ], + plugins: [server()], +}); +``` + +### Running the app + +```bash +# Install dependencies +npm install + +# Development (starts backend + Vite dev server) +npm run dev + +# Production build +npm run build +npm start +``` + +## Integrating into an existing app + +If you already have a React/Vite app and want to add AppKit: + +### 1. Install dependencies + +```bash +npm install @databricks/appkit @databricks/appkit-ui +npm install -D tsx +``` + +### 2. Create `server.ts` (new file) + +```ts +import { createApp, server } from "@databricks/appkit"; + +await createApp({ + plugins: [server()], }); ``` -# ============================================== -# Basic Usage Pattern starting the server -# ============================================== +### 3. Update `package.json` scripts + +```json +{ + "scripts": { + "dev": "NODE_ENV=development tsx watch server.ts", + "build": "vite build", + "start": "NODE_ENV=production tsx server.ts" + } +} +``` + +### 4. That's it -Example: +- Your existing `vite.config.ts` stays the same +- Your existing `src/` folder stays the same +- AppKit's server plugin will automatically serve your Vite app in dev mode and the `dist/` folder in production + +### Adding analytics to an existing app ```ts +// server.ts import { createApp, server, analytics } from "@databricks/appkit"; -const AppKit = await createApp({ +await createApp({ + plugins: [server(), analytics({})], +}); +``` + +Then create `config/queries/` and add your `.sql` files. + +## Environment variables + +### Required for Databricks Apps deployment + +These are typically **provided by Databricks Apps runtime** (exact set can vary by platform/version): + +| Variable | Description | +|----------|-------------| +| `DATABRICKS_HOST` | Workspace URL (e.g. `https://xxx.cloud.databricks.com`) | +| `DATABRICKS_APP_PORT` | Port to bind (default: `8000`) | +| `DATABRICKS_APP_NAME` | App name in Databricks | + +### Required for SQL queries (analytics plugin) + +| Variable | Description | How to set | +|----------|-------------|------------| +| `DATABRICKS_WAREHOUSE_ID` | SQL warehouse ID | In `app.yaml`: `valueFrom: sql-warehouse` | + +### Optional + +| Variable | Description | Default | +|----------|-------------|---------| +| `DATABRICKS_WORKSPACE_ID` | Workspace ID | Auto-fetched from API | +| `NODE_ENV` | `"development"` or `"production"` | — | +| `FLASK_RUN_HOST` | Host to bind | `0.0.0.0` | + +### Local development + +For local development, you need to authenticate with Databricks. Options: + +**Option 1: Databricks CLI profile (recommended)** + +```bash +# Configure once +databricks configure --profile my-profile + +# Then run with profile +DATABRICKS_CONFIG_PROFILE=my-profile npm run dev +# If your Databricks SDK expects a different variable name, try: +# DATABRICKS_PROFILE=my-profile npm run dev +``` + +**Option 2: Environment variables** + +```bash +export DATABRICKS_HOST="https://xxx.cloud.databricks.com" +export DATABRICKS_TOKEN="dapi..." +export DATABRICKS_WAREHOUSE_ID="abc123..." +npm run dev +``` + +**Option 3: `.env` file (auto-loaded by AppKit)** + +```bash +# .env (add to .gitignore!) +DATABRICKS_HOST=https://xxx.cloud.databricks.com +DATABRICKS_TOKEN=dapi... +DATABRICKS_WAREHOUSE_ID=abc123... +``` + +### Telemetry (optional) + +| Variable | Description | +|----------|-------------| +| `OTEL_EXPORTER_OTLP_ENDPOINT` | OpenTelemetry collector endpoint | +| `OTEL_SERVICE_NAME` | Service name for traces | + +## Backend: `@databricks/appkit` + +### Minimal server (golden template) + +The smallest valid AppKit server: + +```ts +// server.ts (or server/index.ts) +import { createApp, server } from "@databricks/appkit"; + +await createApp({ + plugins: [server()], +}); +``` + +### Server plugin (`server()`) + +What it does: + +- Starts an Express server (default `host=0.0.0.0`, `port=8000`) +- Mounts plugin routes under `/api//...` +- Adds `/health` (returns `{ status: "ok" }`) +- Serves frontend: + - **Development** (`NODE_ENV=development`): runs a Vite dev server in middleware mode + - **Production**: auto-detects static frontend directory (checks `dist`, `client/dist`, `build`, `public`, `out`) + +Config (real options): + +```ts +import { createApp, server } from "@databricks/appkit"; + +await createApp({ plugins: [ - server({ port: 8000, autoStart: false }), - analytics(), + server({ + port: 8000, // default: Number(process.env.DATABRICKS_APP_PORT) || 8000 + host: "0.0.0.0", // default: process.env.FLASK_RUN_HOST || "0.0.0.0" + autoStart: true, // default: true + staticPath: "dist", // optional: force a specific static directory + }), ], }); - -const app = await AppKit.server.start(); -app.get("/ping", (req, res) => res.send("pong")); ``` -# ===================== -# Plugin APIs -# ===================== +Manual server start (when you need to `.extend()` Express): + +```ts +import { createApp, server } from "@databricks/appkit"; -Each plugin exposes a set of endpoints by default. +const appkit = await createApp({ + plugins: [server({ autoStart: false })], +}); -## Server Plugin -- AppKit.server.start(): Promise -- Purpose: Start an Express server with configured port, only use if { autoStart: false } is provided in the config of the server plugin -- Usage: Add routes via the returned app -- Config - When setting the plugin, the following options can be provided: - server({ - port?: number; - staticPath?: string; // This provides the path where the frontend assets are. - autoStart?: boolean; - }) +appkit.server.extend((app) => { + app.get("/custom", (_req, res) => res.json({ ok: true })); +}); -## Analytics Plugin -- AppKit.analytics.query.executeQuery({ query, parameters }: { query: string; parameters?: Record }, options?: ExecuteOptions): Promise; -- Purpose: Provide SQL by key interface. -- Usage: Only for structured query + insert examples. SQL never goes into the call to the function. Any SQL that needs to be written, - will be written into config/queries/.sql. All queries should be parameterized (use placeholders). -- Default endpoints: - - POST /api/analytics/:query_key -> `query_key` will be the key to the file that contains the query. Expects a body with the shape { parameters?: Record; }. parameters will be bound into the query. +await appkit.server.start(); +``` -# ===================== -# Custom Plugins -# ===================== +### Analytics plugin (`analytics()`) -Databricks AppKit Might not cover all the cases needed, so for those cases a plugin can be created. -Here is an example: +Add SQL query execution backed by Databricks SQL Warehouses. ```ts -import { Plugin, toPlugin } from '@databricks/appkit'; +import { analytics, createApp, server } from "@databricks/appkit"; -class OpenWeatherPlugin extends Plugin { - name: string = "open-weather"; - private apiKey: string; - private url: string; +await createApp({ + plugins: [server(), analytics({})], +}); +``` - constructor(config: any, auth: IAuthManager, telemetry: ITelemetryManager) { - super(config, auth, telemetry); +Where queries live: - this.apiKey = process.env.OPEN_WEATHER_API_KEY!; - this.url = process.env.OPEN_WEATHER_URL || "https://api.openweathermap.org/data/3.0/onecall"; +- Put `.sql` files in `config/queries/`. +- Query key is the filename without `.sql` (e.g. `spend_summary.sql` → `"spend_summary"`). - // ... - } +SQL parameters: - async getWeather(lat: number, lon: number): Promise { - const url = `${this.url}?lat=${lat}&lon=${lon}&appid=${this.apiKey}`; - - try { - const response = await fetch(url); - if (!response.ok) { - console.error("Error fetching weather data:", response.statusText); - return null; - } - - const data = await response.json(); - return data; - } catch (error) { - console.error("Fetch error:", error); - return null; - } - } +- Use `:paramName` placeholders. +- Optionally annotate parameter types using SQL comments: + +```sql +-- @param startDate DATE +-- @param endDate DATE +-- @param limit NUMERIC +SELECT ... +WHERE usage_date BETWEEN :startDate AND :endDate +LIMIT :limit +``` + +Supported `-- @param` types (case-insensitive): + +- `STRING`, `NUMERIC`, `BOOLEAN`, `DATE`, `TIMESTAMP`, `BINARY` + +Server-injected params (important): + +- `:workspaceId` is **injected by the server** and **must not** be annotated. +- Example: + +```sql +WHERE workspace_id = :workspaceId +``` + +HTTP endpoints exposed (mounted under `/api/analytics`): + +- `POST /api/analytics/query/:query_key` +- `POST /api/analytics/users/me/query/:query_key` +- `GET /api/analytics/arrow-result/:jobId` +- `GET /api/analytics/users/me/arrow-result/:jobId` + +Formats: + +- `format: "JSON"` (default) returns JSON rows +- `format: "ARROW"` returns an Arrow “external links” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId` + +### Request context (`getRequestContext()`) + +If a plugin sets `requiresDatabricksClient = true`, AppKit adds middleware that provides request context. + +Headers used: + +- `x-forwarded-user`: required in production; identifies the user +- `x-forwarded-access-token`: optional; enables **user token passthrough** if `DATABRICKS_HOST` is set + +Context fields (real behavior): + +- `userId`: derived from `x-forwarded-user` (in development it falls back to `serviceUserId`) +- `serviceUserId`: service principal/user ID +- `warehouseId`: `Promise` (from `DATABRICKS_WAREHOUSE_ID`, or auto-selected in development) +- `workspaceId`: `Promise` (from `DATABRICKS_WORKSPACE_ID` or fetched) +- `userDatabricksClient`: present only when passthrough is available (or in dev it equals service client) +- `serviceDatabricksClient`: always present + +### Custom plugins (backend) + +If you need custom API routes or background logic, implement an AppKit plugin. + +```ts +import { Plugin, toPlugin } from "@databricks/appkit"; +import type express from "express"; + +class MyPlugin extends Plugin { + name = "my-plugin"; + envVars = []; // list required env vars here + requiresDatabricksClient = false; // set true if you need getRequestContext() - /** - * Optionally the plugin can inject its own routes to the router - */ injectRoutes(router: express.Router) { - /** - * Each route is scoped to the plugin name. So in this case the route will be end up being - * /api/open-weather/weather - * - * and an example request would be: - * GET /api/open-weather/weather?lat=40.7128&lon=-74.0060 - */ - router.get("/weather", async (req: any, res: any) => { - const { lat, lon } = req.query; - const data = await this.getWeather(lat, lon); - res.send(data); + this.route(router, { + name: "hello", + method: "get", + path: "/hello", + handler: async (_req, res) => { + res.json({ ok: true }); + }, }); } } -export const openWeather = toPlugin(OpenWeatherPlugin, "openWeather"); +export const myPlugin = toPlugin, "my-plugin">( + MyPlugin, + "my-plugin", +); ``` -Then it would be used as the rest of the plugins +### Caching (global + plugin-level) + +Global: ```ts -import { createApp, server, analytics } from "@databricks/appkit"; -import { openWeather } from './open-weather'; +await createApp({ + plugins: [server(), analytics({})], + cache: { + enabled: true, + ttl: 3600, // seconds + strictPersistence: false, + }, +}); +``` + +- Storage auto-selects **Lakebase persistent cache when healthy**, otherwise falls back to in-memory. + +Plugin-level: + +```ts +// inside a Plugin subclass: +const value = await this.cache.getOrExecute( + ["my-plugin", "data", userId], + async () => expensiveWork(), + userKey, + { ttl: 300 }, +); +``` + +## Frontend: `@databricks/appkit-ui` + +### Imports + +- React-facing APIs: `@databricks/appkit-ui/react` +- Non-React utilities (sql markers, arrow, SSE): `@databricks/appkit-ui/js` + +```tsx +import { useAnalyticsQuery, Card, Skeleton } from "@databricks/appkit-ui/react"; +import { sql } from "@databricks/appkit-ui/js"; +``` + +### `useAnalyticsQuery(queryKey, parameters, options?)` + +Facts: + +- Uses **SSE** under the hood (not `fetch()` polling). +- By default it hits `POST /api/analytics/query/:queryKey`. +- Returns `{ data, loading, error }` where `data` is `null` until loaded. +- `format` is `"JSON"` or `"ARROW"` (uppercase). + +Recommended usage pattern (memoized params + explicit states): + +```tsx +import { useMemo } from "react"; +import { useAnalyticsQuery, Skeleton } from "@databricks/appkit-ui/react"; +import { sql } from "@databricks/appkit-ui/js"; + +export function Users() { + const params = useMemo( + () => ({ + status: sql.string("active"), + limit: sql.number(50), + }), + [], + ); + + const { data, loading, error } = useAnalyticsQuery("users_list", params); + + if (loading) return ; + if (error) return
Error: {error}
; + if (!data || data.length === 0) return
No results
; + + return
{JSON.stringify(data[0], null, 2)}
; +} +``` + +Options: + +- `format?: "JSON" | "ARROW"` (default `"JSON"`) +- `autoStart?: boolean` (default `true`) +- `maxParametersSize?: number` (default `100 * 1024` bytes) + +### `useChartData({ queryKey, parameters, format, transformer })` + +- `format` here is **lowercase**: `"json" | "arrow" | "auto"` (default `"auto"`) +- Auto-selection heuristics: + - If `parameters._preferArrow === true` → Arrow + - If `parameters._preferJson === true` → JSON + - If `parameters.limit` is a number > 500 → Arrow + - If `parameters.startDate` and `parameters.endDate` exist → Arrow + +### Charts (unified query/data API) + +All charts support: + +- **Query mode**: `queryKey` + `parameters` +- **Data mode**: `data` (inline JSON, no server) + +Available chart components: + +- `BarChart`, `LineChart`, `AreaChart`, `PieChart`, `DonutChart`, `HeatmapChart`, `ScatterChart`, `RadarChart` + +Query mode (recommended for Databricks-backed analytics): + +```tsx +import { LineChart } from "@databricks/appkit-ui/react"; +import { sql } from "@databricks/appkit-ui/js"; +import { useMemo } from "react"; + +export function SpendChart() { + const params = useMemo( + () => ({ + startDate: sql.date("2024-01-01"), + endDate: sql.date("2024-12-31"), + aggregationLevel: sql.string("day"), + }), + [], + ); + + return ( + + ); +} +``` + +### SQL helpers (`sql.*`) + +Use these to build typed parameters (they return marker objects: `{ __sql_type, value }`): + +- `sql.string(value)` → STRING (accepts string|number|boolean) +- `sql.number(value)` → NUMERIC (accepts number|string) +- `sql.boolean(value)` → BOOLEAN (accepts boolean|string("true"/"false")|number(1/0)) +- `sql.date(value)` → DATE (accepts Date or `"YYYY-MM-DD"`) +- `sql.timestamp(value)` → TIMESTAMP (accepts Date, ISO string, or unix time) + +Binary parameters (important): + +- Databricks SQL Warehouse doesn't support `BINARY` as a parameter type. +- `sql.binary(value)` returns a **STRING marker containing hex**, so use `UNHEX(:param)` in SQL. +- `sql.binary` accepts `Uint8Array`, `ArrayBuffer`, or a hex string. + +### `connectSSE` (custom SSE connections) + +For custom streaming endpoints (not analytics), use the `connectSSE` utility: + +```tsx +import { connectSSE } from "@databricks/appkit-ui/js"; +import { useEffect, useState } from "react"; + +function useCustomStream(endpoint: string) { + const [messages, setMessages] = useState([]); + const [connected, setConnected] = useState(false); + + useEffect(() => { + const controller = new AbortController(); + + connectSSE({ + url: endpoint, + payload: { key: "value" }, // optional: makes it a POST + onMessage: async ({ data }) => { + setConnected(true); + setMessages((prev) => [...prev, data]); + }, + onError: (error) => { + console.error("SSE error:", error); + setConnected(false); + }, + signal: controller.signal, + maxRetries: 3, // default: 3 + retryDelay: 2000, // default: 2000ms (exponential backoff) + timeout: 300000, // default: 5 minutes + maxBufferSize: 1048576, // default: 1MB + }); + + return () => controller.abort(); + }, [endpoint]); + + return { messages, connected }; +} +``` + +Options: -const AppKit = await createApp({ +- `url`: SSE endpoint URL (required) +- `payload`: Optional request body (if provided, uses POST; otherwise GET) +- `onMessage({ id, data })`: Called for each SSE message +- `onError(error)`: Called on connection errors +- `signal`: AbortSignal to cancel the connection +- `lastEventId`: Resume from a specific event ID +- `maxRetries`: Max retry attempts (default: 3) +- `retryDelay`: Base delay between retries in ms (default: 2000) +- `timeout`: Connection timeout in ms (default: 300000) +- `maxBufferSize`: Max buffer size in bytes (default: 1MB) + +### `ArrowClient` (advanced Arrow processing) + +For low-level Arrow data handling: + +```tsx +import { ArrowClient } from "@databricks/appkit-ui/js"; + +// Process Arrow buffer +const table = await ArrowClient.processArrowBuffer(buffer); + +// Fetch and process Arrow data in one call +const table = await ArrowClient.fetchAndProcessArrow(url, headers); + +// Extract fields from table +const fields = ArrowClient.extractArrowFields(table); +// → [{ name: "date", type: ... }, { name: "value", type: ... }] + +// Extract columns as arrays +const columns = ArrowClient.extractArrowColumns(table); +// → { date: [...], value: [...] } + +// Extract chart data +const { xData, yDataMap } = ArrowClient.extractChartData(table, "date", ["value", "count"]); +// → { xData: [...], yDataMap: { value: [...], count: [...] } } + +// Auto-detect chart fields from Arrow table +const detected = ArrowClient.detectFieldsFromArrow(table); +// → { xField: "date", yFields: ["value"], chartType: "timeseries" } +``` + +### DataTable + +`DataTable` is a production-ready table integrated with `useAnalyticsQuery`. + +Key behaviors: + +- `parameters` is required (use `{}` if none) +- Supports opinionated mode (auto columns) and full-control mode (`children(table)`) + +```tsx +import { DataTable } from "@databricks/appkit-ui/react"; + +export function UsersTable() { + return ( + + ); +} +``` + +### UI components (primitives) + +AppKit-UI ships shadcn-style primitives. Import from `@databricks/appkit-ui/react`. + +Note: Exact exports can vary by AppKit-UI version. Prefer using IDE auto-import/autocomplete to confirm what your installed version exports. + +**Available components:** + +`Accordion`, `Alert`, `AlertDialog`, `AspectRatio`, `Avatar`, `Badge`, `Breadcrumb`, `Button`, `ButtonGroup`, `Calendar`, `Card`, `CardHeader`, `CardTitle`, `CardDescription`, `CardContent`, `CardFooter`, `Carousel`, `Checkbox`, `Collapsible`, `Command`, `ContextMenu`, `Dialog`, `DialogTrigger`, `DialogContent`, `DialogHeader`, `DialogTitle`, `DialogDescription`, `DialogFooter`, `Drawer`, `DropdownMenu`, `Empty`, `Field`, `Form`, `HoverCard`, `Input`, `InputGroup`, `InputOtp`, `Item`, `Kbd`, `Label`, `Menubar`, `NavigationMenu`, `Pagination`, `Popover`, `Progress`, `RadioGroup`, `Resizable`, `ScrollArea`, `Select`, `SelectTrigger`, `SelectValue`, `SelectContent`, `SelectItem`, `Separator`, `Sheet`, `Sidebar`, `Skeleton`, `Slider`, `Sonner`, `Spinner`, `Switch`, `Table`, `Tabs`, `TabsList`, `TabsTrigger`, `TabsContent`, `Textarea`, `Toggle`, `ToggleGroup`, `Tooltip`, `TooltipTrigger`, `TooltipContent`, `TooltipProvider` + +### Card pattern + +```tsx +import { + Card, + CardHeader, + CardTitle, + CardDescription, + CardContent, + CardFooter, +} from "@databricks/appkit-ui/react"; + +function MetricCard({ title, value, description }: Props) { + return ( + + + {description} + {value} + + + {/* Optional content */} + + + {/* Optional footer */} + + + ); +} +``` + +### Select pattern + +```tsx +import { + Select, + SelectTrigger, + SelectValue, + SelectContent, + SelectItem, +} from "@databricks/appkit-ui/react"; + +function DateRangeSelect({ value, onChange }: Props) { + return ( + + ); +} +``` + +### Tabs pattern + +```tsx +import { Tabs, TabsList, TabsTrigger, TabsContent } from "@databricks/appkit-ui/react"; + +function Dashboard() { + return ( + + + Overview + Analytics + + +

Overview content

+
+ +

Analytics content

+
+
+ ); +} +``` + +### Dialog pattern + +```tsx +import { + Dialog, + DialogTrigger, + DialogContent, + DialogHeader, + DialogTitle, + DialogDescription, + DialogFooter, + Button, +} from "@databricks/appkit-ui/react"; + +function ConfirmDialog() { + return ( + + + + + + + Confirm deletion + + This action cannot be undone. + + + + + + + + + ); +} +``` + +### TooltipProvider requirement + +If using tooltips anywhere in your app, wrap your root component with `TooltipProvider`: + +```tsx +import { TooltipProvider } from "@databricks/appkit-ui/react"; + +function App() { + return ( + + {/* Your app content */} + + ); +} +``` + +### Button variants + +```tsx +import { Button } from "@databricks/appkit-ui/react"; + + + + + + + +``` + +### Loading skeleton pattern + +```tsx +import { Card, CardHeader, Skeleton } from "@databricks/appkit-ui/react"; + +function LoadingCard() { + return ( + + + + + + + + ); +} +``` + +## Type generation (QueryRegistry + IntelliSense) + +Goal: generate `client/src/appKitTypes.d.ts` so query keys, params, and result rows are type-safe. + +### Vite plugin: `appKitTypesPlugin` + +Correct option names: + +- `outFile?: string` (default `src/appKitTypes.d.ts`) +- `watchFolders?: string[]` (default `["../config/queries"]`) + +```ts +// client/vite.config.ts +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; +import { appKitTypesPlugin } from "@databricks/appkit"; + +export default defineConfig({ plugins: [ - server({ port: 8000 }), - analytics(), - openWeather(), + react(), + appKitTypesPlugin({ + outFile: "src/appKitTypes.d.ts", + watchFolders: ["../config/queries"], + }), ], }); +``` -const app = await AppKit.server.start(); -/** - * A route could also be added here - */ -app.get("/api/open-weather/weather", async (req, res) => { - const data = await AppKit.openWeather.getWeather(40.7128, -74.0060); - res.send(data); -}); +Important nuance: + +- When the frontend is served through AppKit in dev mode, AppKit’s dev server already includes `appKitTypesPlugin()` internally. +- You still want it in your client build pipeline if you run `vite build` separately. + +### CLI: `appkit-generate-types` + +```bash +# Requires DATABRICKS_WAREHOUSE_ID (or pass as 3rd arg) +npx appkit-generate-types [rootDir] [outFile] [warehouseId] + +# Example: +npx appkit-generate-types . client/src/appKitTypes.d.ts + +# Force regeneration (skip cache): +npx appkit-generate-types --no-cache ``` +## Databricks Apps config: `app.yaml` -# ===================== -# Style Guidelines for AI -# ===================== -- Always prefer async/await (never .then chaining in examples). -- Always show explicit plugin config (no hidden defaults). -- Use ESModules (import/export), not require(). -- Use TypeScript typings in advanced examples if helpful. +Bind a SQL warehouse for Apps runtime: + +```yaml +env: + - name: DATABRICKS_WAREHOUSE_ID + valueFrom: sql-warehouse +``` + +Full example with command: + +```yaml +command: + - node + - build/index.mjs +env: + - name: DATABRICKS_WAREHOUSE_ID + valueFrom: sql-warehouse +``` -# ===================== -# Anti-Patterns (avoid in examples) -# ===================== -- ❌ Do not access AppKit internals (only use AppKit[pluginName]). -- ❌ Do not assume SQL queries hit a real DB (they return demo data unless configured). -- ❌ Do not show usage without createApp first. +## LLM checklist (before you "finalize" code) + +- **Project setup** + - `package.json` has `"type": "module"` + - `tsx` is in devDependencies for dev server + - `dev` script uses `NODE_ENV=development tsx watch server.ts` + - `index.html` exists with `
` and script pointing to `src/main.tsx` + +- **Backend** + - `await createApp({ plugins: [...] })` is used (or `void createApp` with intent) + - `server()` is included (always) + - If using SQL: `analytics({})` included + `config/queries/*.sql` present + - Queries use `:param` placeholders, and params are passed from UI using `sql.*` + - If query needs workspace scoping: uses `:workspaceId` + +- **Frontend** + - `useMemo` wraps parameters objects + - Loading/error/empty states are explicit + - Charts use `format="auto"` unless you have a reason to force `"json"`/`"arrow"` + - If using tooltips: root is wrapped with `` + +- **Never** + - Don't build SQL strings manually + - Don't pass untyped raw params for annotated queries + - Don't ignore `createApp()`'s promise + - Don't invent UI components not listed in this file -# ===================== -# Attribution -# ===================== -If AI-generated code uses this SDK, attribute: -"Powered by Databricks AppKit (https://github.com/...)". \ No newline at end of file From f175080dfd42efce1f1f9042c83f4c4092513eac Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 23 Dec 2025 15:43:05 +0000 Subject: [PATCH 2/4] docs: remove llms-compact --- llms-compact.txt | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 llms-compact.txt diff --git a/llms-compact.txt b/llms-compact.txt deleted file mode 100644 index 3f8d694..0000000 --- a/llms-compact.txt +++ /dev/null @@ -1,22 +0,0 @@ -# llms-compact.txt — Quick AI reference for @databricks/appkit - -Project: Databricks AppKit -Version: 1.0.0 - -# Core Pattern -- Always call: createApp({ plugins }) -- Plugins available at: AppKit[pluginName] -- Use async/await only (no .then) -- ESModules (import/export) - -# Server Plugin -- AppKit.server.start(): Promise - -# Analytics Plugin -- AppKit.analytics.query.executeQuery({ query, parameters }: { query: string; parameters?: Record }, options?: ExecuteOptions): Promise; - -# Rules -- Explicit config in examples (no hidden defaults) -- Show async/await -- Never access AppKit internals directly -- if something is not clear enough check [llms.txt](./llms.txt) \ No newline at end of file From 033bcce0cb7b6a8b3a4d442026631213cb689cf2 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 23 Dec 2025 15:59:20 +0000 Subject: [PATCH 3/4] docs: add llms to use client/server structures --- llms.txt | 117 ++++++++++++++++++----------- packages/appkit/src/cache/index.ts | 20 +---- 2 files changed, 75 insertions(+), 62 deletions(-) diff --git a/llms.txt b/llms.txt index 545870e..5a22e19 100644 --- a/llms.txt +++ b/llms.txt @@ -25,30 +25,13 @@ This file is designed to work even when you *do not* have access to the AppKit s ## Canonical project layout -Recommended structure: - -``` -my-app/ -├── server.ts # backend entry point (or server/index.ts for complex apps) -├── index.html # Vite entry point -├── vite.config.ts # Vite config -├── src/ -│ ├── main.tsx # React entry point -│ └── App.tsx # Root component -├── config/ -│ └── queries/ -│ └── my_query.sql # SQL queries (optional, if using analytics) -├── app.yaml # Databricks Apps config (warehouse binding) -├── package.json -└── tsconfig.json -``` - -Alternative structure for larger apps: +Recommended structure (client/server split): ``` my-app/ ├── server/ -│ └── index.ts # backend entry point +│ ├── index.ts # backend entry point (AppKit) +│ └── .env # optional local dev env vars (do not commit) ├── client/ │ ├── index.html │ ├── vite.config.ts @@ -63,6 +46,12 @@ my-app/ └── tsconfig.json ``` +Why this layout: + +- The AppKit `server()` plugin automatically serves: + - **Dev**: Vite dev server (HMR) from `client/` + - **Prod**: static files from `client/dist` (built by Vite) + ## Project scaffolding (start here) ### `package.json` @@ -74,28 +63,54 @@ my-app/ "version": "0.0.0", "type": "module", "scripts": { - "dev": "NODE_ENV=development tsx watch server.ts", + "dev": "NODE_ENV=development tsx watch server/index.ts", + "build": "npm run build:server && npm run build:client", + "build:server": "tsdown --out-dir build server/index.ts", + "build:client": "cd client && npm run build", + "start": "node build/index.mjs" + }, + "dependencies": { + "@databricks/appkit": "^0.0.2" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "tsdown": "^0.15.7", + "tsx": "^4.19.0", + "typescript": "~5.6.0" + } +} +``` + +### `client/package.json` + +```json +{ + "name": "client", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", "build": "vite build", - "start": "NODE_ENV=production tsx server.ts" + "preview": "vite preview" }, "dependencies": { - "@databricks/appkit": "^0.0.2", "@databricks/appkit-ui": "^0.0.2", "react": "^18.0.0", - "react-dom": "^18.0.0" + "react-dom": "^18.0.0", + "recharts": "^3.0.0" }, "devDependencies": { "@types/react": "^18.0.0", "@types/react-dom": "^18.0.0", "@vitejs/plugin-react": "^5.0.0", - "tsx": "^4.19.0", "typescript": "~5.6.0", "vite": "^6.0.0" } } ``` -### `index.html` +### `client/index.html` ```html @@ -112,7 +127,7 @@ my-app/ ``` -### `src/main.tsx` +### `client/src/main.tsx` ```tsx import { StrictMode } from "react"; @@ -126,7 +141,7 @@ createRoot(document.getElementById("root")!).render( ); ``` -### `src/App.tsx` (minimal) +### `client/src/App.tsx` (minimal) ```tsx export default function App() { @@ -138,7 +153,7 @@ export default function App() { } ``` -### `vite.config.ts` +### `client/vite.config.ts` ```ts import { defineConfig } from "vite"; @@ -165,11 +180,11 @@ export default defineConfig({ "allowImportingTsExtensions": true, "verbatimModuleSyntax": true }, - "include": ["src", "server.ts"] + "include": ["server", "client/src"] } ``` -### `server.ts` +### `server/index.ts` ```ts import { createApp, server } from "@databricks/appkit"; @@ -184,6 +199,7 @@ await createApp({ ```bash # Install dependencies npm install +cd client && npm install && cd .. # Development (starts backend + Vite dev server) npm run dev @@ -200,11 +216,22 @@ If you already have a React/Vite app and want to add AppKit: ### 1. Install dependencies ```bash -npm install @databricks/appkit @databricks/appkit-ui -npm install -D tsx +npm install @databricks/appkit +npm install -D tsx tsdown + +# If you don't already have a client/ folder, create one and move your Vite app into it: +# - move index.html -> client/index.html +# - move vite.config.ts -> client/vite.config.ts +# - move src/ -> client/src/ +# +# Then install client deps: +cd client +npm install @databricks/appkit-ui react react-dom recharts +npm install -D vite @vitejs/plugin-react typescript +cd .. ``` -### 2. Create `server.ts` (new file) +### 2. Create `server/index.ts` (new file) ```ts import { createApp, server } from "@databricks/appkit"; @@ -219,23 +246,24 @@ await createApp({ ```json { "scripts": { - "dev": "NODE_ENV=development tsx watch server.ts", - "build": "vite build", - "start": "NODE_ENV=production tsx server.ts" + "dev": "NODE_ENV=development tsx watch server/index.ts", + "build": "npm run build:server && npm run build:client", + "build:server": "tsdown --out-dir build server/index.ts", + "build:client": "cd client && npm run build", + "start": "node build/index.mjs" } } ``` ### 4. That's it -- Your existing `vite.config.ts` stays the same -- Your existing `src/` folder stays the same -- AppKit's server plugin will automatically serve your Vite app in dev mode and the `dist/` folder in production +- AppKit's server plugin will automatically serve your Vite app in dev mode and `client/dist` in production. +- If your Vite app must stay at the repo root (no `client/` folder), AppKit can still work, but the recommended layout is `client/` + `server/`. ### Adding analytics to an existing app ```ts -// server.ts +// server/index.ts import { createApp, server, analytics } from "@databricks/appkit"; await createApp({ @@ -319,7 +347,7 @@ DATABRICKS_WAREHOUSE_ID=abc123... The smallest valid AppKit server: ```ts -// server.ts (or server/index.ts) +// server/index.ts import { createApp, server } from "@databricks/appkit"; await createApp({ @@ -980,8 +1008,9 @@ env: - **Project setup** - `package.json` has `"type": "module"` - `tsx` is in devDependencies for dev server - - `dev` script uses `NODE_ENV=development tsx watch server.ts` - - `index.html` exists with `
` and script pointing to `src/main.tsx` + - `dev` script uses `NODE_ENV=development tsx watch server/index.ts` + - `client/index.html` exists with `
` and script pointing to `client/src/main.tsx` + - `client/package.json` exists and includes `@databricks/appkit-ui` - **Backend** - `await createApp({ plugins: [...] })` is used (or `void createApp` with intent) diff --git a/packages/appkit/src/cache/index.ts b/packages/appkit/src/cache/index.ts index a8f0c9f..5f80fbf 100644 --- a/packages/appkit/src/cache/index.ts +++ b/packages/appkit/src/cache/index.ts @@ -128,12 +128,7 @@ export class CacheManager { return new CacheManager(config.storage, config); } - console.warn("[Cache] Provided storage health check failed"); - if (config.strictPersistence) { - console.warn( - "[Cache] strictPersistence enabled but provided storage unhealthy. Cache disabled.", - ); const disabledConfig = { ...config, enabled: false }; return new CacheManager( new InMemoryStorage(disabledConfig), @@ -141,7 +136,6 @@ export class CacheManager { ); } - console.warn("[Cache] Falling back to in-memory cache."); return new CacheManager(new InMemoryStorage(config), config); } @@ -156,20 +150,11 @@ export class CacheManager { await persistentStorage.initialize(); return new CacheManager(persistentStorage, config); } - - console.warn( - "[Cache] Lakebase health check failed, default storage unhealthy", - ); - } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); - console.warn(`[Cache] Lakebase unavailable: ${errorMessage}`); + } catch { + // lakebase unavailable, continue with in-memory storage } if (config.strictPersistence) { - console.warn( - "[Cache] strictPersistence enabled but lakebase unavailable. Cache disabled.", - ); const disabledConfig = { ...config, enabled: false }; return new CacheManager( new InMemoryStorage(disabledConfig), @@ -177,7 +162,6 @@ export class CacheManager { ); } - console.warn("[Cache] Falling back to in-memory cache."); return new CacheManager(new InMemoryStorage(config), config); } From c294022df1ef0f1b0159763dbe3fab4739c0fb81 Mon Sep 17 00:00:00 2001 From: Ditadi Date: Tue, 23 Dec 2025 16:09:54 +0000 Subject: [PATCH 4/4] test: fix tests --- llms.txt | 44 ++++++++++ .../src/cache/tests/cache-manager.test.ts | 83 ++++--------------- 2 files changed, 59 insertions(+), 68 deletions(-) diff --git a/llms.txt b/llms.txt index 5a22e19..40873f5 100644 --- a/llms.txt +++ b/llms.txt @@ -23,6 +23,15 @@ This file is designed to work even when you *do not* have access to the AppKit s - **Never construct SQL strings dynamically**. Use parameterized queries with `:paramName`. - **Never use `require()`**. Use ESM `import/export`. +## TypeScript import rules (when using `verbatimModuleSyntax`) + +If your `tsconfig.json` uses `"verbatimModuleSyntax": true`, **always use `import type` for type-only imports** (otherwise builds can fail in strict setups): + +```ts +import type { ReactNode } from "react"; +import { useMemo } from "react"; +``` + ## Canonical project layout Recommended structure (client/server split): @@ -554,6 +563,16 @@ Facts: - Returns `{ data, loading, error }` where `data` is `null` until loaded. - `format` is `"JSON"` or `"ARROW"` (uppercase). +When to use it: + +- Use `useAnalyticsQuery` **only** when you need a custom UI (cards/KPIs/forms/conditional rendering). +- If you just need a standard chart or table, prefer the built-in components (`BarChart`, `LineChart`, `DataTable`, etc.) so you don’t re-implement loading/error/empty states. + +Limitations (common LLM pitfall): + +- There is **no `enabled` option**. Use conditional rendering to mount/unmount the component. +- There is **no `refetch()`**. Change `parameters` (memoized) or re-mount to re-run the query. + Recommended usage pattern (memoized params + explicit states): ```tsx @@ -606,6 +625,17 @@ Available chart components: - `BarChart`, `LineChart`, `AreaChart`, `PieChart`, `DonutChart`, `HeatmapChart`, `ScatterChart`, `RadarChart` +Avoid double-fetching: + +```tsx +// ❌ Wrong: fetches the same query twice +// const { data } = useAnalyticsQuery("spend_data", params); +// return ; + +// ✅ Correct: let the chart fetch +return ; +``` + Query mode (recommended for Databricks-backed analytics): ```tsx @@ -653,6 +683,16 @@ Binary parameters (important): - `sql.binary(value)` returns a **STRING marker containing hex**, so use `UNHEX(:param)` in SQL. - `sql.binary` accepts `Uint8Array`, `ArrayBuffer`, or a hex string. +### SQL result types (important) + +Databricks SQL JSON results can return some numeric-like fields (especially `DECIMAL`) as strings. If a field behaves like a string at runtime, convert explicitly: + +```ts +const value = Number(row.amount); +``` + +If you need more reliable numeric fidelity for large datasets, prefer `format: "ARROW"` and process Arrow on the client. + ### `connectSSE` (custom SSE connections) For custom streaming endpoints (not analytics), use the `connectSSE` utility: @@ -768,6 +808,10 @@ AppKit-UI ships shadcn-style primitives. Import from `@databricks/appkit-ui/reac Note: Exact exports can vary by AppKit-UI version. Prefer using IDE auto-import/autocomplete to confirm what your installed version exports. +Radix constraint (common bug): + +- `SelectItem` cannot have `value=""`. Use a sentinel value like `"all"` or `"none"`. + **Available components:** `Accordion`, `Alert`, `AlertDialog`, `AspectRatio`, `Avatar`, `Badge`, `Breadcrumb`, `Button`, `ButtonGroup`, `Calendar`, `Card`, `CardHeader`, `CardTitle`, `CardDescription`, `CardContent`, `CardFooter`, `Carousel`, `Checkbox`, `Collapsible`, `Command`, `ContextMenu`, `Dialog`, `DialogTrigger`, `DialogContent`, `DialogHeader`, `DialogTitle`, `DialogDescription`, `DialogFooter`, `Drawer`, `DropdownMenu`, `Empty`, `Field`, `Form`, `HoverCard`, `Input`, `InputGroup`, `InputOtp`, `Item`, `Kbd`, `Label`, `Menubar`, `NavigationMenu`, `Pagination`, `Popover`, `Progress`, `RadioGroup`, `Resizable`, `ScrollArea`, `Select`, `SelectTrigger`, `SelectValue`, `SelectContent`, `SelectItem`, `Separator`, `Sheet`, `Sidebar`, `Skeleton`, `Slider`, `Sonner`, `Spinner`, `Switch`, `Table`, `Tabs`, `TabsList`, `TabsTrigger`, `TabsContent`, `Textarea`, `Toggle`, `ToggleGroup`, `Tooltip`, `TooltipTrigger`, `TooltipContent`, `TooltipProvider` diff --git a/packages/appkit/src/cache/tests/cache-manager.test.ts b/packages/appkit/src/cache/tests/cache-manager.test.ts index 7fc45c5..65d6c0f 100644 --- a/packages/appkit/src/cache/tests/cache-manager.test.ts +++ b/packages/appkit/src/cache/tests/cache-manager.test.ts @@ -565,19 +565,12 @@ describe("CacheManager", () => { (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Pass an unhealthy storage with strictPersistence: true const cache = await CacheManager.getInstance({ storage: createUnhealthyMockStorage(), strictPersistence: true, }); - // Should have logged about strictPersistence - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("strictPersistence"), - ); - // Cache should be disabled const fn = vi.fn().mockResolvedValue("result"); await cache.getOrExecute(["key"], fn, "user1"); @@ -585,8 +578,6 @@ describe("CacheManager", () => { // Function called twice because cache is disabled expect(fn).toHaveBeenCalledTimes(2); - - consoleSpy.mockRestore(); }); }); @@ -596,45 +587,31 @@ describe("CacheManager", () => { (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Pass an unhealthy storage, should fallback to in-memory const cache = await CacheManager.getInstance({ storage: createUnhealthyMockStorage(), strictPersistence: false, }); - // Should log fallback message - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("[Cache]"), - ); - // Cache should still work (in-memory fallback) await cache.set("test-key", "value"); const result = await cache.get("test-key"); expect(result).toBe("value"); - - consoleSpy.mockRestore(); }); - test("should log warning when provided storage health check fails", async () => { + test("should use in-memory storage when provided storage health check fails", async () => { // Reset singleton (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - - await CacheManager.getInstance({ + const cache = await CacheManager.getInstance({ storage: createUnhealthyMockStorage(), strictPersistence: false, }); - // Should have logged about storage health check failing - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("[Cache] Provided storage health check failed"), - ); - - consoleSpy.mockRestore(); + // Should be using in-memory storage (not persistent) + const storage = (cache as any).storage; + expect(storage.isPersistent()).toBe(false); }); }); @@ -659,18 +636,11 @@ describe("CacheManager", () => { (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Lakebase unhealthy (default in beforeEach) mockLakebaseHealthCheck.mockResolvedValue(false); const cache = await CacheManager.getInstance({}); - // Should log fallback message - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("[Cache] Falling back to in-memory cache"), - ); - // Cache should work (in-memory fallback) await cache.set("test-key", "value"); const result = await cache.get("test-key"); @@ -679,8 +649,6 @@ describe("CacheManager", () => { // Storage should not be persistent const storage = (cache as any).storage; expect(storage.isPersistent()).toBe(false); - - consoleSpy.mockRestore(); }); test("should disable cache when Lakebase unavailable and strictPersistence is true", async () => { @@ -688,8 +656,6 @@ describe("CacheManager", () => { (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Lakebase unhealthy mockLakebaseHealthCheck.mockResolvedValue(false); @@ -697,13 +663,6 @@ describe("CacheManager", () => { strictPersistence: true, }); - // Should have logged about strictPersistence - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining( - "strictPersistence enabled but lakebase unavailable", - ), - ); - // Cache should be disabled const fn = vi.fn().mockResolvedValue("result"); await cache.getOrExecute(["key"], fn, "user1"); @@ -711,50 +670,38 @@ describe("CacheManager", () => { // Function called twice because cache is disabled expect(fn).toHaveBeenCalledTimes(2); - - consoleSpy.mockRestore(); }); - test("should log warning when Lakebase health check fails", async () => { + test("should use in-memory storage when Lakebase health check fails", async () => { // Reset singleton (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Lakebase unhealthy mockLakebaseHealthCheck.mockResolvedValue(false); - await CacheManager.getInstance({}); - - // Should have logged about Lakebase health check - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("[Cache] Lakebase health check failed"), - ); + const cache = await CacheManager.getInstance({}); - consoleSpy.mockRestore(); + // Should be using in-memory storage + const storage = (cache as any).storage; + expect(storage.isPersistent()).toBe(false); }); - test("should log warning when Lakebase throws an error", async () => { + test("should use in-memory storage when Lakebase throws an error", async () => { // Reset singleton (CacheManager as any).instance = null; (CacheManager as any).initPromise = null; - const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {}); - // Lakebase throws mockLakebaseHealthCheck.mockRejectedValue( new Error("Connection refused"), ); - await CacheManager.getInstance({}); - - // Should have logged about Lakebase being unavailable - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("[Cache] Lakebase unavailable"), - ); + const cache = await CacheManager.getInstance({}); - consoleSpy.mockRestore(); + // Should be using in-memory storage + const storage = (cache as any).storage; + expect(storage.isPersistent()).toBe(false); }); }); });