diff --git a/00_core.ipynb b/00_core.ipynb
index 781a766..b85edcf 100644
--- a/00_core.ipynb
+++ b/00_core.ipynb
@@ -33,15 +33,24 @@
"from pathlib import Path\n",
"from typing import Any, Optional, Union, Iterable, Generator, List, Tuple, Dict, get_args\n",
"\n",
- "from sqlalchemy.orm import Session\n",
+ "from contextlib import contextmanager\n",
+ "import contextvars, threading\n",
"from fastcore.utils import *\n",
- "from fastcore.test import test_fail, test_eq\n",
"from fastcore.xtras import dataclass_src\n",
- "from itertools import starmap\n",
"\n",
"import sqlparse, sqlalchemy as sa, subprocess"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0924c4a5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from fastcore.test import *"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -54,7 +63,9 @@
" pass\n",
"\n",
"\n",
- "DEFAULT = Default()\n"
+ "DEFAULT = Default()\n",
+ "\n",
+ "_ctx_conn = contextvars.ContextVar('fastsql_conn', default=None)\n"
]
},
{
@@ -127,15 +138,37 @@
" self.meta = sa.MetaData()\n",
" self.meta.reflect(bind=self.engine)\n",
" self.meta.bind = self.engine\n",
- " self.conn = self.engine.connect()\n",
- " self.meta.conn = self.conn\n",
- " self._tables = {}\n",
- "\n",
- " def execute(self, st, params=None, opts=None): return self.conn.execute(st, params, execution_options=opts)\n",
+ " self._tables, self._tables_lock = {}, threading.Lock()\n",
+ "\n",
+ " @contextmanager\n",
+ " def conn(self, write=False):\n",
+ " \"Yield a connection; uses current tx connection if present.\"\n",
+ " if conn := _ctx_conn.get(): yield conn; return\n",
+ " cm = self.engine.begin() if write else self.engine.connect()\n",
+ " with cm as conn: yield conn\n",
+ "\n",
+ " @contextmanager\n",
+ " def tx(self):\n",
+ " \"Transaction scope; reuses current conn, uses SAVEPOINT when nested.\"\n",
+ " if conn := _ctx_conn.get():\n",
+ " with conn.begin_nested(): yield self\n",
+ " return\n",
+ " with self.engine.begin() as conn:\n",
+ " tok = _ctx_conn.set(conn)\n",
+ " try: yield self\n",
+ " finally: _ctx_conn.reset(tok)\n",
+ "\n",
+ " def execute(self, st, params=None, opts=None):\n",
+ " \"Execute `st` and return buffered results or rowcount\"\n",
+ " with self.conn(write=True) as conn:\n",
+ " res = conn.execute(st, params, execution_options=opts)\n",
+ " return res.mappings().all() if res.returns_rows else res.rowcount\n",
+ "\n",
+ " def close(self):\n",
+ " self.engine.dispose()\n",
"\n",
- " def close(self): self.conn.close()\n",
- "\n",
- " def __repr__(self): return f\"Database({self.conn_str})\"\n"
+ " def __repr__(self):\n",
+ " return f\"Database({self.conn_str})\"\n"
]
},
{
@@ -203,8 +236,9 @@
"@patch\n",
"def q(self: Database, sql: str, **params):\n",
" \"Query database with raw SQL and optional parameters. Returns list of dicts.\"\n",
- " result = self.execute(sa.text(sql), params=params)\n",
- " if result.returns_rows: return list(map(dict, result.mappings()))\n",
+ " res = self.execute(sa.text(sql), params=params)\n",
+ " if isinstance(res, list):\n",
+ " return list(map(dict, res))\n",
" return []\n"
]
},
@@ -308,11 +342,39 @@
"\n",
" def __init__(self, table: sa.Table, db: Database, cls, _exists=None):\n",
" store_attr()\n",
+ " self._xtra_var = contextvars.ContextVar(f'xtra_{self.table.name}_{id(self)}', default={})\n",
+ " self._result_var = contextvars.ContextVar(f'result_{self.table.name}_{id(self)}', default=[])\n",
" self.xtra_id, self.result = {}, []\n",
" if len(table.columns) > 0:\n",
" table.create(self.db.engine, checkfirst=True)\n",
" self._exists = True\n",
"\n",
+ " @property\n",
+ " def xtra_id(self): return self._xtra_var.get()\n",
+ "\n",
+ " @xtra_id.setter\n",
+ " def xtra_id(self, v): self._xtra_var.set(v or {})\n",
+ "\n",
+ " @property\n",
+ " def result(self): return self._result_var.get()\n",
+ "\n",
+ " @result.setter\n",
+ " def result(self, v): self._result_var.set(v or [])\n",
+ "\n",
+ " def _one(self, stmt, params=None, write=False):\n",
+ " with self.db.conn(write) as conn: return conn.execute(stmt, params).one()\n",
+ "\n",
+ " def _one_or_none(self, stmt, params=None, write=False):\n",
+ " with self.db.conn(write) as conn: return conn.execute(stmt, params).one_or_none()\n",
+ "\n",
+ " def _all(self, stmt, params=None, write=False, mappings=False):\n",
+ " with self.db.conn(write) as conn:\n",
+ " res = conn.execute(stmt, params)\n",
+ " return res.mappings().all() if mappings else res.fetchall()\n",
+ "\n",
+ " def _scalar(self, stmt, params=None, write=False):\n",
+ " with self.db.conn(write) as conn: return conn.execute(stmt, params).scalar_one()\n",
+ "\n",
" def __repr__(self):\n",
" if self._exists is False or (self._exists is None and len(self.table.columns) == 0):\n",
" return f\"
\"\n",
@@ -321,10 +383,6 @@
" def __str__(self):\n",
" return f'\"{self.table.name}\"'\n",
"\n",
- " @property\n",
- " def conn(self):\n",
- " return self.db.conn\n",
- "\n",
" def xtra(self, **kwargs):\n",
" \"Set `xtra_id`\"\n",
" self.xtra_id = kwargs\n"
@@ -362,8 +420,7 @@
"source": [
"#| export\n",
"@patch(as_prop=True)\n",
- "def t(self: DBTable):\n",
- " return self.table, self.table.c\n"
+ "def t(self: DBTable): return self.table, self.table.c"
]
},
{
@@ -468,24 +525,26 @@
"#| export\n",
"@patch\n",
"def table(self: Database, nm: str, cls=None):\n",
- " if nm in self._tables: return self._tables[nm]\n",
+ " # Thread-safe cache: multiple requests can hit this concurrently.\n",
+ " with self._tables_lock:\n",
+ " if nm in self._tables: return self._tables[nm]\n",
"\n",
- " if nm in self.meta.tables:\n",
- " tbl = self.meta.tables[nm]\n",
- " exists = True\n",
- " else:\n",
- " inspector = sa.inspect(self.engine)\n",
- " if nm in inspector.get_table_names() or nm in inspector.get_view_names():\n",
- " tbl = sa.Table(nm, self.meta, autoload_with=self.engine)\n",
+ " if nm in self.meta.tables:\n",
+ " tbl = self.meta.tables[nm]\n",
" exists = True\n",
" else:\n",
- " tbl = sa.Table(nm, self.meta)\n",
- " exists = False\n",
+ " inspector = sa.inspect(self.engine)\n",
+ " if nm in inspector.get_table_names() or nm in inspector.get_view_names():\n",
+ " tbl = sa.Table(nm, self.meta, autoload_with=self.engine)\n",
+ " exists = True\n",
+ " else:\n",
+ " tbl = sa.Table(nm, self.meta)\n",
+ " exists = False\n",
"\n",
- " if cls is None and hasattr(tbl, \"cls\"): cls = tbl.cls\n",
- " res = DBTable(tbl, self, cls, _exists=exists)\n",
- " self._tables[nm] = res\n",
- " return res\n"
+ " if cls is None and hasattr(tbl, \"cls\"): cls = tbl.cls\n",
+ " res = DBTable(tbl, self, cls, _exists=exists)\n",
+ " self._tables[nm] = res\n",
+ " return res\n"
]
},
{
@@ -554,7 +613,7 @@
"def database(path, wal=True, **kwargs) -> Any:\n",
" \"Create a `Database` from a path or connection string\"\n",
" conn_str = _db_str(path)\n",
- " db = Database(conn_str)\n",
+ " db = Database(conn_str, engine_kws=kwargs)\n",
" if wal and str(conn_str).startswith(\"sqlite:\"): db.execute(sa.text(\"PRAGMA journal_mode=WAL\"))\n",
" return db\n"
]
@@ -742,21 +801,17 @@
" stmt = ins.on_conflict_do_update(index_elements=pk, set_=record).returning(\n",
" *self.table.columns\n",
" )\n",
- " row = self.conn.execute(stmt).one()\n",
- " self.conn.commit()\n",
- " return _row_to_obj(self, row)\n",
+ " return _row_to_obj(self, self._one(stmt, write=True))\n",
" if dialect in (\"mysql\", \"mariadb\"):\n",
" from sqlalchemy.dialects.mysql import insert as dialect_insert\n",
"\n",
" ins = dialect_insert(self.table).values(**record)\n",
" stmt = ins.on_duplicate_key_update(**record)\n",
- " result = self.conn.execute(stmt)\n",
- " self.conn.commit()\n",
- " try:\n",
- " row = result.one()\n",
- " return _row_to_obj(self, row)\n",
- " except Exception:\n",
- " return self.get([record[k] for k in pk])\n",
+ " with self.db.conn(True) as conn:\n",
+ " try: row = conn.execute(stmt).one()\n",
+ " except Exception: row = None\n",
+ " if row is not None: return _row_to_obj(self, row)\n",
+ " return self.get([record[k] for k in pk])\n",
" existing = None\n",
" try:\n",
" existing = self.get([record[k] for k in pk])\n",
@@ -987,13 +1042,10 @@
"source": [
"#| export\n",
"@patch\n",
- "def table_names(self: Database):\n",
- " return sa.inspect(self.engine).get_table_names()\n",
- "\n",
+ "def table_names(self: Database): return sa.inspect(self.engine).get_table_names()\n",
"\n",
"@patch\n",
- "def view_names(self: Database):\n",
- " return sa.inspect(self.engine).get_view_names()\n"
+ "def view_names(self: Database): return sa.inspect(self.engine).get_view_names()"
]
},
{
@@ -1378,12 +1430,8 @@
" record = {**record, **kwargs}\n",
" if not record: return {}\n",
" record = {**record, **self.xtra_id}\n",
- " result = self.conn.execute(\n",
- " sa.insert(self.table).values(**record).returning(*self.table.columns)\n",
- " )\n",
- " row = result.one()\n",
- " self.conn.commit()\n",
- " return _row_to_obj(self, row)\n"
+ " stmt = sa.insert(self.table).values(**record).returning(*self.table.columns)\n",
+ " return _row_to_obj(self, self._one(stmt, write=True))\n"
]
},
{
@@ -1498,10 +1546,7 @@
" self.result = []\n",
" return self\n",
" stmt = sa.insert(self.table).returning(*self.table.columns)\n",
- " result = self.conn.execute(stmt, recs)\n",
- " rows = result.fetchall()\n",
- " self.conn.commit()\n",
- " self.result = [_row_to_obj(self, r) for r in rows]\n",
+ " self.result = [_row_to_obj(self, r) for r in self._all(stmt, recs, write=True)]\n",
" return self\n"
]
},
@@ -1597,8 +1642,8 @@
" **kw,\n",
") -> int:\n",
" stmt = sa.select(sa.func.count()).select_from(self.table)\n",
- " if where: stmt = stmt.where(_where(where, where_args, **kw))\n",
- " return int(self.conn.execute(stmt).scalar_one())\n"
+ " if where or self.xtra_id or kw: stmt = stmt.where(_where(where, where_args, self.xtra_id, **kw))\n",
+ " return int(self._scalar(stmt))\n"
]
},
{
@@ -1676,7 +1721,7 @@
" if order_by: query = query.order_by(sa.text(order_by))\n",
" if limit is not None: query = query.limit(limit)\n",
" if offset is not None: query = query.offset(offset)\n",
- " rows = self.conn.execute(query).mappings().all()\n",
+ " rows = self._all(query, write=False, mappings=True)\n",
" for row in rows: yield dict(row)\n"
]
},
@@ -2024,8 +2069,7 @@
" if len(cols) != len(vals): raise NotFoundError(f\"Need {len(cols)} pk\")\n",
" cond = sa.and_(*[col == val for col, val in zip(cols, vals)])\n",
" qry = sa.select(self.table).where(cond)\n",
- " result = self.conn.execute(qry).first()\n",
- " if not result:\n",
+ " if not (result := self._one_or_none(qry)):\n",
" if default is UNSET: raise NotFoundError()\n",
" return default\n",
" return _row_to_obj(self, result, as_cls=as_cls)\n",
@@ -2137,12 +2181,7 @@
" if pk_values is None: pk_values = [d[o.name] for o in self.table.primary_key]\n",
" else: pk_values = listify(pk_values)\n",
" qry = self._pk_where(\"update\", pk_values).values(**d).returning(*self.table.columns)\n",
- " result = self.conn.execute(qry)\n",
- " if (row := result.one_or_none()) is None:\n",
- " self.conn.rollback()\n",
- " raise NotFoundError()\n",
- "\n",
- " self.conn.commit()\n",
+ " if (row := self._one_or_none(qry, write=True)) is None: raise NotFoundError()\n",
" return _row_to_obj(self, row)"
]
},
@@ -2204,37 +2243,9 @@
") -> list:\n",
" \"Update rows matching `where` with `updates`. Returns updated rows.\"\n",
" stmt = self.table.update().values(**updates)\n",
- " if where: stmt = stmt.where(_where(where, where_args, xtra or getattr(self, \"xtra_id\", {}), **kw))\n",
- " rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()\n",
- " self.conn.commit()\n",
- " return [_row_to_obj(self, r) for r in rows]\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "d2106980",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| export\n",
- "@patch\n",
- "def update_where(\n",
- " self: DBTable,\n",
- " updates: dict,\n",
- " where: str | None = None,\n",
- " where_args: dict | Iterable | None = None,\n",
- " xtra: dict | None = None,\n",
- " **kw,\n",
- ") -> list:\n",
- " \"Update rows matching `where` with `updates`. Returns updated rows.\"\n",
- " stmt = self.table.update().values(**updates)\n",
- " if where:\n",
- " stmt = stmt.where(\n",
- " _where(where, where_args, xtra or getattr(self, \"xtra_id\", {}), **kw)\n",
- " )\n",
- " rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()\n",
- " self.conn.commit()\n",
+ " xtra = xtra or getattr(self, 'xtra_id', {})\n",
+ " if where or xtra or kw: stmt = stmt.where(_where(where, where_args, xtra, **kw))\n",
+ " rows = self._all(stmt.returning(*self.table.columns), write=True)\n",
" return [_row_to_obj(self, r) for r in rows]\n"
]
},
@@ -2271,12 +2282,8 @@
"@patch\n",
"def delete(self: DBTable, key):\n",
" \"Delete item with PK `key` and return the deleted object\"\n",
- " result = self.conn.execute(\n",
- " self._pk_where(\"delete\", key).returning(*self.table.columns)\n",
- " )\n",
- " row = result.one()\n",
- " self.conn.commit()\n",
- " return _row_to_obj(self, row)\n"
+ " stmt = self._pk_where('delete', key).returning(*self.table.columns)\n",
+ " return _row_to_obj(self, self._one(stmt, write=True))\n"
]
},
{
@@ -2307,9 +2314,9 @@
" **kw,\n",
"):\n",
" stmt = self.table.delete()\n",
- " if where: stmt = stmt.where(_where(where, where_args, xtra or getattr(self, \"xtra_id\", {}), **kw))\n",
- " rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()\n",
- " self.conn.commit()\n",
+ " xtra = xtra or getattr(self, 'xtra_id', {})\n",
+ " if where or xtra or kw: stmt = stmt.where(_where(where, where_args, xtra, **kw))\n",
+ " rows = self._all(stmt.returning(*self.table.columns), write=True)\n",
" return [_row_to_obj(self, r) for r in rows]\n"
]
},
@@ -2451,8 +2458,7 @@
"def drop(self: DBTable, ignore: bool = False):\n",
" \"Drop this table from the database\"\n",
" try:\n",
- " self.table.drop(self.db.engine)\n",
- " self.conn.commit()\n",
+ " with self.db.conn(True) as conn: self.table.drop(conn)\n",
" if self.table.name in self.db._tables: del self.db._tables[self.table.name]\n",
" if self.table.name in self.db.meta.tables: self.db.meta.remove(self.table)\n",
" except Exception as e:\n",
@@ -2699,25 +2705,20 @@
"@patch\n",
"def migrate(self:Database, mdir):\n",
" if '_meta' not in self.t: self._add_meta()\n",
- " cver = self.version\n",
" for v, p in _get_migrations(mdir)[self.version:]:\n",
- " try:\n",
- " if p.suffix == '.sql':\n",
+ " if p.suffix == '.sql':\n",
+ " with self.tx():\n",
" for stmt in filter(str.strip, sqlparse.split(p.read_text())): self.execute(sa.text(stmt))\n",
- " elif p.suffix == '.py':\n",
- " subprocess.run([sys.executable, p, self.conn_str], check=True)\n",
+ " self.version = v\n",
+ " elif p.suffix == '.py':\n",
+ " subprocess.run([sys.executable, p, self.conn_str], check=True)\n",
" self.version = v\n",
- " self.conn.commit()\n",
- " print(f\"Applied migration {v}: {p.name}\")\n",
- " except Exception as e:\n",
- " self.conn.rollback()\n",
- " raise e\n",
- " self.conn.commit()\n",
- " cls_map = {nm: tbl.cls for nm, tbl in self._tables.items() if tbl.cls}\n",
+ " print(f\"Applied migration {v}: {p.name}\")\n",
+ "\n",
" self._tables.clear()\n",
" self.meta.clear()\n",
" self.meta.reflect(bind=self.engine)\n",
- " for tbl in self.t: tbl.dataclass()"
+ " for tbl in self.t: tbl.dataclass()\n"
]
},
{
@@ -2926,14 +2927,17 @@
" t = self.execute(statement)\n",
" try:\n",
" return t.tuples()\n",
- " except ResourceClosedError:\n",
+ " except sa.exc.ResourceClosedError:\n",
" pass # statement didn't return anything\n",
"\n",
"\n",
"@patch\n",
"def sql(self: MetaData, statement, *args, **kwargs):\n",
- " \"Execute `statement` string and return `DataFrame` of results (if any)\"\n",
- " return self.conn.sql(statement, *args, **kwargs)\n"
+ " \"Execute `statement` string and return results (if any)\"\n",
+ " if conn := _ctx_conn.get(): return conn.sql(statement, *args, **kwargs)\n",
+ " eng = getattr(self, 'bind', None)\n",
+ " if eng is None: raise AttributeError('MetaData.bind is not set')\n",
+ " with eng.connect() as conn: return conn.sql(statement, *args, **kwargs)\n"
]
},
{
@@ -2980,7 +2984,7 @@
"@patch\n",
"def get(self: Table, where=None, limit=None):\n",
" \"Select from table, optionally limited by `where` and `limit` clauses\"\n",
- " return self.metadata.conn.sql(self.select().where(where).limit(limit))\n"
+ " return self.metadata.sql(self.select().where(where).limit(limit))\n"
]
},
{
@@ -3044,7 +3048,8 @@
"@patch\n",
"def close(self: MetaData):\n",
" \"Close the connection\"\n",
- " self.conn.close()"
+ " eng = getattr(self, 'bind', None)\n",
+ " if eng is not None: eng.dispose()"
]
},
{
diff --git a/fastsql/_modidx.py b/fastsql/_modidx.py
index f5f1783..29db7de 100644
--- a/fastsql/_modidx.py
+++ b/fastsql/_modidx.py
@@ -15,9 +15,12 @@
'fastsql.core.DBTable.__len__': ('core.html#dbtable.__len__', 'fastsql/core.py'),
'fastsql.core.DBTable.__repr__': ('core.html#dbtable.__repr__', 'fastsql/core.py'),
'fastsql.core.DBTable.__str__': ('core.html#dbtable.__str__', 'fastsql/core.py'),
+ 'fastsql.core.DBTable._all': ('core.html#dbtable._all', 'fastsql/core.py'),
+ 'fastsql.core.DBTable._one': ('core.html#dbtable._one', 'fastsql/core.py'),
+ 'fastsql.core.DBTable._one_or_none': ('core.html#dbtable._one_or_none', 'fastsql/core.py'),
'fastsql.core.DBTable._pk_where': ('core.html#dbtable._pk_where', 'fastsql/core.py'),
+ 'fastsql.core.DBTable._scalar': ('core.html#dbtable._scalar', 'fastsql/core.py'),
'fastsql.core.DBTable.c': ('core.html#dbtable.c', 'fastsql/core.py'),
- 'fastsql.core.DBTable.conn': ('core.html#dbtable.conn', 'fastsql/core.py'),
'fastsql.core.DBTable.count': ('core.html#dbtable.count', 'fastsql/core.py'),
'fastsql.core.DBTable.count_where': ('core.html#dbtable.count_where', 'fastsql/core.py'),
'fastsql.core.DBTable.create': ('core.html#dbtable.create', 'fastsql/core.py'),
@@ -31,6 +34,7 @@
'fastsql.core.DBTable.lookup': ('core.html#dbtable.lookup', 'fastsql/core.py'),
'fastsql.core.DBTable.pks': ('core.html#dbtable.pks', 'fastsql/core.py'),
'fastsql.core.DBTable.pks_and_rows_where': ('core.html#dbtable.pks_and_rows_where', 'fastsql/core.py'),
+ 'fastsql.core.DBTable.result': ('core.html#dbtable.result', 'fastsql/core.py'),
'fastsql.core.DBTable.rows': ('core.html#dbtable.rows', 'fastsql/core.py'),
'fastsql.core.DBTable.rows_where': ('core.html#dbtable.rows_where', 'fastsql/core.py'),
'fastsql.core.DBTable.schema': ('core.html#dbtable.schema', 'fastsql/core.py'),
@@ -40,12 +44,14 @@
'fastsql.core.DBTable.update_where': ('core.html#dbtable.update_where', 'fastsql/core.py'),
'fastsql.core.DBTable.upsert': ('core.html#dbtable.upsert', 'fastsql/core.py'),
'fastsql.core.DBTable.xtra': ('core.html#dbtable.xtra', 'fastsql/core.py'),
+ 'fastsql.core.DBTable.xtra_id': ('core.html#dbtable.xtra_id', 'fastsql/core.py'),
'fastsql.core.Database': ('core.html#database', 'fastsql/core.py'),
'fastsql.core.Database.__getitem__': ('core.html#database.__getitem__', 'fastsql/core.py'),
'fastsql.core.Database.__init__': ('core.html#database.__init__', 'fastsql/core.py'),
'fastsql.core.Database.__repr__': ('core.html#database.__repr__', 'fastsql/core.py'),
'fastsql.core.Database._add_meta': ('core.html#database._add_meta', 'fastsql/core.py'),
'fastsql.core.Database.close': ('core.html#database.close', 'fastsql/core.py'),
+ 'fastsql.core.Database.conn': ('core.html#database.conn', 'fastsql/core.py'),
'fastsql.core.Database.create': ('core.html#database.create', 'fastsql/core.py'),
'fastsql.core.Database.create_view': ('core.html#database.create_view', 'fastsql/core.py'),
'fastsql.core.Database.execute': ('core.html#database.execute', 'fastsql/core.py'),
@@ -58,6 +64,7 @@
'fastsql.core.Database.t': ('core.html#database.t', 'fastsql/core.py'),
'fastsql.core.Database.table': ('core.html#database.table', 'fastsql/core.py'),
'fastsql.core.Database.table_names': ('core.html#database.table_names', 'fastsql/core.py'),
+ 'fastsql.core.Database.tx': ('core.html#database.tx', 'fastsql/core.py'),
'fastsql.core.Database.v': ('core.html#database.v', 'fastsql/core.py'),
'fastsql.core.Database.view_names': ('core.html#database.view_names', 'fastsql/core.py'),
'fastsql.core.Default': ('core.html#default', 'fastsql/core.py'),
diff --git a/fastsql/core.py b/fastsql/core.py
index b95a92a..b4f3c1e 100644
--- a/fastsql/core.py
+++ b/fastsql/core.py
@@ -10,11 +10,10 @@
from pathlib import Path
from typing import Any, Optional, Union, Iterable, Generator, List, Tuple, Dict, get_args
-from sqlalchemy.orm import Session
+from contextlib import contextmanager
+import contextvars, threading
from fastcore.utils import *
-from fastcore.test import test_fail, test_eq
from fastcore.xtras import dataclass_src
-from itertools import starmap
import sqlparse, sqlalchemy as sa, subprocess
@@ -25,6 +24,8 @@ class Default:
DEFAULT = Default()
+_ctx_conn = contextvars.ContextVar('fastsql_conn', default=None)
+
# %% ../00_core.ipynb #4a52cb19
def _db_str(path):
@@ -45,23 +46,46 @@ def __init__(self, conn_str, engine_kws=None):
self.meta = sa.MetaData()
self.meta.reflect(bind=self.engine)
self.meta.bind = self.engine
- self.conn = self.engine.connect()
- self.meta.conn = self.conn
- self._tables = {}
-
- def execute(self, st, params=None, opts=None): return self.conn.execute(st, params, execution_options=opts)
-
- def close(self): self.conn.close()
+ self._tables, self._tables_lock = {}, threading.Lock()
+
+ @contextmanager
+ def conn(self, write=False):
+ "Yield a connection; uses current tx connection if present."
+ if conn := _ctx_conn.get(): yield conn; return
+ cm = self.engine.begin() if write else self.engine.connect()
+ with cm as conn: yield conn
+
+ @contextmanager
+ def tx(self):
+ "Transaction scope; reuses current conn, uses SAVEPOINT when nested."
+ if conn := _ctx_conn.get():
+ with conn.begin_nested(): yield self
+ return
+ with self.engine.begin() as conn:
+ tok = _ctx_conn.set(conn)
+ try: yield self
+ finally: _ctx_conn.reset(tok)
+
+ def execute(self, st, params=None, opts=None):
+ "Execute `st` and return buffered results or rowcount"
+ with self.conn(write=True) as conn:
+ res = conn.execute(st, params, execution_options=opts)
+ return res.mappings().all() if res.returns_rows else res.rowcount
+
+ def close(self):
+ self.engine.dispose()
- def __repr__(self): return f"Database({self.conn_str})"
+ def __repr__(self):
+ return f"Database({self.conn_str})"
# %% ../00_core.ipynb #f120bed1
@patch
def q(self: Database, sql: str, **params):
"Query database with raw SQL and optional parameters. Returns list of dicts."
- result = self.execute(sa.text(sql), params=params)
- if result.returns_rows: return list(map(dict, result.mappings()))
+ res = self.execute(sa.text(sql), params=params)
+ if isinstance(res, list):
+ return list(map(dict, res))
return []
@@ -71,11 +95,39 @@ class DBTable:
def __init__(self, table: sa.Table, db: Database, cls, _exists=None):
store_attr()
+ self._xtra_var = contextvars.ContextVar(f'xtra_{self.table.name}_{id(self)}', default={})
+ self._result_var = contextvars.ContextVar(f'result_{self.table.name}_{id(self)}', default=[])
self.xtra_id, self.result = {}, []
if len(table.columns) > 0:
table.create(self.db.engine, checkfirst=True)
self._exists = True
+ @property
+ def xtra_id(self): return self._xtra_var.get()
+
+ @xtra_id.setter
+ def xtra_id(self, v): self._xtra_var.set(v or {})
+
+ @property
+ def result(self): return self._result_var.get()
+
+ @result.setter
+ def result(self, v): self._result_var.set(v or [])
+
+ def _one(self, stmt, params=None, write=False):
+ with self.db.conn(write) as conn: return conn.execute(stmt, params).one()
+
+ def _one_or_none(self, stmt, params=None, write=False):
+ with self.db.conn(write) as conn: return conn.execute(stmt, params).one_or_none()
+
+ def _all(self, stmt, params=None, write=False, mappings=False):
+ with self.db.conn(write) as conn:
+ res = conn.execute(stmt, params)
+ return res.mappings().all() if mappings else res.fetchall()
+
+ def _scalar(self, stmt, params=None, write=False):
+ with self.db.conn(write) as conn: return conn.execute(stmt, params).scalar_one()
+
def __repr__(self):
if self._exists is False or (self._exists is None and len(self.table.columns) == 0):
return f""
@@ -84,10 +136,6 @@ def __repr__(self):
def __str__(self):
return f'"{self.table.name}"'
- @property
- def conn(self):
- return self.db.conn
-
def xtra(self, **kwargs):
"Set `xtra_id`"
self.xtra_id = kwargs
@@ -95,9 +143,7 @@ def xtra(self, **kwargs):
# %% ../00_core.ipynb #647933f2
@patch(as_prop=True)
-def t(self: DBTable):
- return self.table, self.table.c
-
+def t(self: DBTable): return self.table, self.table.c
# %% ../00_core.ipynb #9e105008
@patch(as_prop=True)
@@ -114,24 +160,26 @@ def schema(self: DBTable):
# %% ../00_core.ipynb #a0faa16a
@patch
def table(self: Database, nm: str, cls=None):
- if nm in self._tables: return self._tables[nm]
+ # Thread-safe cache: multiple requests can hit this concurrently.
+ with self._tables_lock:
+ if nm in self._tables: return self._tables[nm]
- if nm in self.meta.tables:
- tbl = self.meta.tables[nm]
- exists = True
- else:
- inspector = sa.inspect(self.engine)
- if nm in inspector.get_table_names() or nm in inspector.get_view_names():
- tbl = sa.Table(nm, self.meta, autoload_with=self.engine)
+ if nm in self.meta.tables:
+ tbl = self.meta.tables[nm]
exists = True
else:
- tbl = sa.Table(nm, self.meta)
- exists = False
+ inspector = sa.inspect(self.engine)
+ if nm in inspector.get_table_names() or nm in inspector.get_view_names():
+ tbl = sa.Table(nm, self.meta, autoload_with=self.engine)
+ exists = True
+ else:
+ tbl = sa.Table(nm, self.meta)
+ exists = False
- if cls is None and hasattr(tbl, "cls"): cls = tbl.cls
- res = DBTable(tbl, self, cls, _exists=exists)
- self._tables[nm] = res
- return res
+ if cls is None and hasattr(tbl, "cls"): cls = tbl.cls
+ res = DBTable(tbl, self, cls, _exists=exists)
+ self._tables[nm] = res
+ return res
# %% ../00_core.ipynb #33f7426a
@@ -144,7 +192,7 @@ def __getitem__(self: Database, nm: str):
def database(path, wal=True, **kwargs) -> Any:
"Create a `Database` from a path or connection string"
conn_str = _db_str(path)
- db = Database(conn_str)
+ db = Database(conn_str, engine_kws=kwargs)
if wal and str(conn_str).startswith("sqlite:"): db.execute(sa.text("PRAGMA journal_mode=WAL"))
return db
@@ -276,21 +324,17 @@ def upsert(
stmt = ins.on_conflict_do_update(index_elements=pk, set_=record).returning(
*self.table.columns
)
- row = self.conn.execute(stmt).one()
- self.conn.commit()
- return _row_to_obj(self, row)
+ return _row_to_obj(self, self._one(stmt, write=True))
if dialect in ("mysql", "mariadb"):
from sqlalchemy.dialects.mysql import insert as dialect_insert
ins = dialect_insert(self.table).values(**record)
stmt = ins.on_duplicate_key_update(**record)
- result = self.conn.execute(stmt)
- self.conn.commit()
- try:
- row = result.one()
- return _row_to_obj(self, row)
- except Exception:
- return self.get([record[k] for k in pk])
+ with self.db.conn(True) as conn:
+ try: row = conn.execute(stmt).one()
+ except Exception: row = None
+ if row is not None: return _row_to_obj(self, row)
+ return self.get([record[k] for k in pk])
existing = None
try:
existing = self.get([record[k] for k in pk])
@@ -391,14 +435,10 @@ def create(
# %% ../00_core.ipynb #0089aca8
@patch
-def table_names(self: Database):
- return sa.inspect(self.engine).get_table_names()
-
+def table_names(self: Database): return sa.inspect(self.engine).get_table_names()
@patch
-def view_names(self: Database):
- return sa.inspect(self.engine).get_view_names()
-
+def view_names(self: Database): return sa.inspect(self.engine).get_view_names()
# %% ../00_core.ipynb #7f00cf84
@patch
@@ -595,12 +635,8 @@ def insert(
record = {**record, **kwargs}
if not record: return {}
record = {**record, **self.xtra_id}
- result = self.conn.execute(
- sa.insert(self.table).values(**record).returning(*self.table.columns)
- )
- row = result.one()
- self.conn.commit()
- return _row_to_obj(self, row)
+ stmt = sa.insert(self.table).values(**record).returning(*self.table.columns)
+ return _row_to_obj(self, self._one(stmt, write=True))
# %% ../00_core.ipynb #367b8d35
@@ -640,10 +676,7 @@ def insert_all(
self.result = []
return self
stmt = sa.insert(self.table).returning(*self.table.columns)
- result = self.conn.execute(stmt, recs)
- rows = result.fetchall()
- self.conn.commit()
- self.result = [_row_to_obj(self, r) for r in rows]
+ self.result = [_row_to_obj(self, r) for r in self._all(stmt, recs, write=True)]
return self
@@ -697,8 +730,8 @@ def count_where(
**kw,
) -> int:
stmt = sa.select(sa.func.count()).select_from(self.table)
- if where: stmt = stmt.where(_where(where, where_args, **kw))
- return int(self.conn.execute(stmt).scalar_one())
+ if where or self.xtra_id or kw: stmt = stmt.where(_where(where, where_args, self.xtra_id, **kw))
+ return int(self._scalar(stmt))
# %% ../00_core.ipynb #24e25a12
@@ -734,7 +767,7 @@ def rows_where(
if order_by: query = query.order_by(sa.text(order_by))
if limit is not None: query = query.limit(limit)
if offset is not None: query = query.offset(offset)
- rows = self.conn.execute(query).mappings().all()
+ rows = self._all(query, write=False, mappings=True)
for row in rows: yield dict(row)
@@ -845,8 +878,7 @@ def get(
if len(cols) != len(vals): raise NotFoundError(f"Need {len(cols)} pk")
cond = sa.and_(*[col == val for col, val in zip(cols, vals)])
qry = sa.select(self.table).where(cond)
- result = self.conn.execute(qry).first()
- if not result:
+ if not (result := self._one_or_none(qry)):
if default is UNSET: raise NotFoundError()
return default
return _row_to_obj(self, result, as_cls=as_cls)
@@ -898,12 +930,7 @@ def update(
if pk_values is None: pk_values = [d[o.name] for o in self.table.primary_key]
else: pk_values = listify(pk_values)
qry = self._pk_where("update", pk_values).values(**d).returning(*self.table.columns)
- result = self.conn.execute(qry)
- if (row := result.one_or_none()) is None:
- self.conn.rollback()
- raise NotFoundError()
-
- self.conn.commit()
+ if (row := self._one_or_none(qry, write=True)) is None: raise NotFoundError()
return _row_to_obj(self, row)
# %% ../00_core.ipynb #433e6a31
@@ -918,30 +945,9 @@ def update_where(
) -> list:
"Update rows matching `where` with `updates`. Returns updated rows."
stmt = self.table.update().values(**updates)
- if where: stmt = stmt.where(_where(where, where_args, xtra or getattr(self, "xtra_id", {}), **kw))
- rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()
- self.conn.commit()
- return [_row_to_obj(self, r) for r in rows]
-
-
-# %% ../00_core.ipynb #d2106980
-@patch
-def update_where(
- self: DBTable,
- updates: dict,
- where: str | None = None,
- where_args: dict | Iterable | None = None,
- xtra: dict | None = None,
- **kw,
-) -> list:
- "Update rows matching `where` with `updates`. Returns updated rows."
- stmt = self.table.update().values(**updates)
- if where:
- stmt = stmt.where(
- _where(where, where_args, xtra or getattr(self, "xtra_id", {}), **kw)
- )
- rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()
- self.conn.commit()
+ xtra = xtra or getattr(self, 'xtra_id', {})
+ if where or xtra or kw: stmt = stmt.where(_where(where, where_args, xtra, **kw))
+ rows = self._all(stmt.returning(*self.table.columns), write=True)
return [_row_to_obj(self, r) for r in rows]
@@ -949,12 +955,8 @@ def update_where(
@patch
def delete(self: DBTable, key):
"Delete item with PK `key` and return the deleted object"
- result = self.conn.execute(
- self._pk_where("delete", key).returning(*self.table.columns)
- )
- row = result.one()
- self.conn.commit()
- return _row_to_obj(self, row)
+ stmt = self._pk_where('delete', key).returning(*self.table.columns)
+ return _row_to_obj(self, self._one(stmt, write=True))
# %% ../00_core.ipynb #06a6e82c
@@ -967,9 +969,9 @@ def delete_where(
**kw,
):
stmt = self.table.delete()
- if where: stmt = stmt.where(_where(where, where_args, xtra or getattr(self, "xtra_id", {}), **kw))
- rows = self.conn.execute(stmt.returning(*self.table.columns)).fetchall()
- self.conn.commit()
+ xtra = xtra or getattr(self, 'xtra_id', {})
+ if where or xtra or kw: stmt = stmt.where(_where(where, where_args, xtra, **kw))
+ rows = self._all(stmt.returning(*self.table.columns), write=True)
return [_row_to_obj(self, r) for r in rows]
@@ -994,8 +996,7 @@ def __contains__(
def drop(self: DBTable, ignore: bool = False):
"Drop this table from the database"
try:
- self.table.drop(self.db.engine)
- self.conn.commit()
+ with self.db.conn(True) as conn: self.table.drop(conn)
if self.table.name in self.db._tables: del self.db._tables[self.table.name]
if self.table.name in self.db.meta.tables: self.db.meta.remove(self.table)
except Exception as e:
@@ -1033,26 +1034,22 @@ def _get_migrations(mdir):
@patch
def migrate(self:Database, mdir):
if '_meta' not in self.t: self._add_meta()
- cver = self.version
for v, p in _get_migrations(mdir)[self.version:]:
- try:
- if p.suffix == '.sql':
+ if p.suffix == '.sql':
+ with self.tx():
for stmt in filter(str.strip, sqlparse.split(p.read_text())): self.execute(sa.text(stmt))
- elif p.suffix == '.py':
- subprocess.run([sys.executable, p, self.conn_str], check=True)
+ self.version = v
+ elif p.suffix == '.py':
+ subprocess.run([sys.executable, p, self.conn_str], check=True)
self.version = v
- self.conn.commit()
- print(f"Applied migration {v}: {p.name}")
- except Exception as e:
- self.conn.rollback()
- raise e
- self.conn.commit()
- cls_map = {nm: tbl.cls for nm, tbl in self._tables.items() if tbl.cls}
+ print(f"Applied migration {v}: {p.name}")
+
self._tables.clear()
self.meta.clear()
self.meta.reflect(bind=self.engine)
for tbl in self.t: tbl.dataclass()
+
# %% ../00_core.ipynb #bd8573a8
from fastcore.net import urlsave
@@ -1099,25 +1096,29 @@ def sql(self: Connection, statement, nm="Row", *args, **kwargs):
t = self.execute(statement)
try:
return t.tuples()
- except ResourceClosedError:
+ except sa.exc.ResourceClosedError:
pass # statement didn't return anything
@patch
def sql(self: MetaData, statement, *args, **kwargs):
- "Execute `statement` string and return `DataFrame` of results (if any)"
- return self.conn.sql(statement, *args, **kwargs)
+ "Execute `statement` string and return results (if any)"
+ if conn := _ctx_conn.get(): return conn.sql(statement, *args, **kwargs)
+ eng = getattr(self, 'bind', None)
+ if eng is None: raise AttributeError('MetaData.bind is not set')
+ with eng.connect() as conn: return conn.sql(statement, *args, **kwargs)
# %% ../00_core.ipynb #e2359ec9
@patch
def get(self: Table, where=None, limit=None):
"Select from table, optionally limited by `where` and `limit` clauses"
- return self.metadata.conn.sql(self.select().where(where).limit(limit))
+ return self.metadata.sql(self.select().where(where).limit(limit))
# %% ../00_core.ipynb #2802f9e0
@patch
def close(self: MetaData):
"Close the connection"
- self.conn.close()
+ eng = getattr(self, 'bind', None)
+ if eng is not None: eng.dispose()
diff --git a/index.ipynb b/index.ipynb
index 8e28b6c..9f8cff4 100644
--- a/index.ipynb
+++ b/index.ipynb
@@ -862,7 +862,7 @@
"id": "fb6b2429",
"metadata": {},
"source": [
- "It we set `xtra` then the additional fields are used for `insert`, `update`, and `delete`:"
+ "It we set `xtra` then the additional fields are used for `insert`, `update`, and `delete`. `xtra` is stored per execution context (thread/task), so it's safe to use in concurrent apps:"
]
},
{
@@ -1103,6 +1103,151 @@
"cats.insert(cat)\n"
]
},
+ {
+ "cell_type": "markdown",
+ "id": "ed905ed4",
+ "metadata": {},
+ "source": [
+ "## Connections and transactions"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6822d723",
+ "metadata": {},
+ "source": [
+ "`Database` keeps a SQLAlchemy `Engine` (connection pool). Each call checks out a connection when needed and returns it immediately, so a global `db` is safe to share across threads (sync routes) and tasks.\n",
+ "\n",
+ "`db.execute` buffers results (lists of row mappings) and there is no `db.conn` attribute; use `db.engine` if you need the underlying SQLAlchemy engine."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ba25cc8d",
+ "metadata": {},
+ "source": [
+ "### .tx()\n",
+ "Use `db.tx()` to run multiple operations on a single connection in one transaction (handy for sync web routes). Nested `db.tx()` uses a SAVEPOINT."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "609c9e23",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/markdown": [
+ "```sql\n",
+ "CREATE TABLE tx_item (\n",
+ "\tid INTEGER, \n",
+ "\tname VARCHAR, \n",
+ "\tPRIMARY KEY (id)\n",
+ ")\n",
+ "```"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "class TxItem: id:int; name:str\n",
+ "\n",
+ "tx = db.table('tx_item')\n",
+ "tx.drop(ignore=True)\n",
+ "tx = db.create(TxItem, name='tx_item')\n",
+ "hl_md(tx.schema, 'sql')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e42db77b",
+ "metadata": {},
+ "source": [
+ "A failing block rolls back:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4736383f",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "try:\n",
+ " with db.tx():\n",
+ " tx.insert(id=1, name='a')\n",
+ " tx.insert(id=2, name='b')\n",
+ " raise RuntimeError('boom')\n",
+ "except RuntimeError: pass\n",
+ "\n",
+ "tx()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7c775f25",
+ "metadata": {},
+ "source": [
+ "Nested transactions work too:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "78ac28b5",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[TxItem(id=1, name='outer')]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "with db.tx():\n",
+ " tx.insert(id=1, name='outer')\n",
+ " try:\n",
+ " with db.tx():\n",
+ " tx.insert(id=2, name='inner')\n",
+ " raise ValueError('nope')\n",
+ " except ValueError: pass\n",
+ "\n",
+ "tx()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "36ff8682",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tx.drop()"
+ ]
+ },
{
"cell_type": "markdown",
"id": "1551d93c",
@@ -1740,7 +1885,7 @@
"id": "h6x84dtxuyl",
"metadata": {},
"source": [
- "Migrations can also be Python scripts. Create a file like `2-update_data.py` that accepts the database connection string as a command line argument to perform more complex data transformations. Python migration scripts must handle their own commits:\n",
+ "Migrations can also be Python scripts. Create a file like `2-update_data.py` that accepts the database connection string as a command line argument to perform more complex data transformations. Use `db.tx()` to make the changes atomic:\n",
"\n",
"```python\n",
"# migrations/2-update_data.py\n",
@@ -1750,16 +1895,51 @@
"conn_str = sys.argv[1]\n",
"db = database(conn_str)\n",
"\n",
- "# Perform complex data transformations\n",
- "for cat in db.t.cat():\n",
- " if cat.weight > 10:\n",
- " db.t.cat.update({'id': cat.id, 'priority': 1})\n",
- "\n",
- "# Python migrations must commit their own changes\n",
- "db.conn.commit()\n",
+ "with db.tx():\n",
+ " for cat in db.t.cat():\n",
+ " if cat.weight > 10:\n",
+ " db.t.cat.update({'id': cat.id, 'priority': 1})\n",
"```"
]
},
+ {
+ "cell_type": "markdown",
+ "id": "9e854960",
+ "metadata": {},
+ "source": [
+ "### Concurrent writes (Postgres)\n",
+ "Postgres handles concurrent writes across multiple connections. FastSQL uses a pool, so concurrent sync requests can write safely as long as the pool has capacity.\n",
+ "\n",
+ "Set `FASTSQL_PG` to a SQLAlchemy URL (e.g. `postgresql+psycopg://...`) and pass pool settings through `database(...)`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e79909d2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| eval: false\n",
+ "from fastcore.parallel import parallel\n",
+ "\n",
+ "import os\n",
+ "\n",
+ "conn_str = os.environ['FASTSQL_PG']\n",
+ "pg = database(conn_str, pool_size=10, max_overflow=20, pool_pre_ping=True)\n",
+ "\n",
+ "class PgItem: id:int; name:str\n",
+ "\n",
+ "t = pg.table('pg_item')\n",
+ "t.drop(ignore=True)\n",
+ "t = pg.create(PgItem, name='pg_item')\n",
+ "\n",
+ "def ins(i): t.insert(id=i, name=str(i))\n",
+ "parallel(ins, range(100), n_workers=10, threadpool=True)\n",
+ "\n",
+ "len(t())"
+ ]
+ },
{
"cell_type": "markdown",
"id": "2e781445",
diff --git a/test_insert.ipynb b/test_insert.ipynb
index b6a6ca1..3e6b63f 100644
--- a/test_insert.ipynb
+++ b/test_insert.ipynb
@@ -20,9 +20,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ad470f25",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.315363+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"from fastcore.test import *\n",
@@ -41,9 +39,7 @@
"cell_type": "code",
"execution_count": null,
"id": "97dd1b48",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.361492+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"db = database(':memory:')"
@@ -53,9 +49,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5102a3ac",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.390053+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"class People: id: int; name: str"
@@ -65,9 +59,7 @@
"cell_type": "code",
"execution_count": null,
"id": "9188c149",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.413604+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"people = db.create(People, pk='id')"
@@ -109,9 +101,7 @@
"cell_type": "code",
"execution_count": null,
"id": "fba0c4f7",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.437254+00:00"
- },
+ "metadata": {},
"outputs": [
{
"data": {
@@ -119,7 +109,7 @@
"{}"
]
},
- "execution_count": 0,
+ "execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
@@ -140,9 +130,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ace59c88",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.461369+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -162,9 +150,7 @@
"cell_type": "code",
"execution_count": null,
"id": "a93ec70a",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.484641+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -176,9 +162,7 @@
"cell_type": "code",
"execution_count": null,
"id": "79cd5186",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.506829+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty dataclass doesn't change anything\n",
@@ -192,9 +176,7 @@
"cell_type": "code",
"execution_count": null,
"id": "aa988175",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.530242+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty class instance doesn't change anything\n",
@@ -224,9 +206,7 @@
"cell_type": "code",
"execution_count": null,
"id": "1fdd0aaf",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.552858+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.insert(name='Alice').name == 'Alice'"
@@ -244,9 +224,7 @@
"cell_type": "code",
"execution_count": null,
"id": "c736aa0f",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.576603+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.insert(People(name='Bobba')).name == 'Bobba'"
@@ -264,9 +242,7 @@
"cell_type": "code",
"execution_count": null,
"id": "cfd90ab0",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.599211+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"class Student: pass\n",
@@ -288,9 +264,7 @@
"cell_type": "code",
"execution_count": null,
"id": "72a25f8d",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.727277+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.count == 3"
@@ -329,9 +303,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5a968d13",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.750276+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.insert(name=None)\n",
@@ -350,9 +322,7 @@
"cell_type": "code",
"execution_count": null,
"id": "92d53608",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.772110+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.insert(name='')\n",
@@ -363,9 +333,7 @@
"cell_type": "code",
"execution_count": null,
"id": "51cb29b1",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.795407+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.get(pk_values=4).name == None"
@@ -403,9 +371,7 @@
"cell_type": "code",
"execution_count": null,
"id": "972bab86",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.818259+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.insert(name='O\\'Connor').name == \"O'Connor\"\n",
@@ -424,9 +390,7 @@
"cell_type": "code",
"execution_count": null,
"id": "55364dd6",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.841664+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"p1 = people.insert(name='Test1')\n",
@@ -446,9 +410,7 @@
"cell_type": "code",
"execution_count": null,
"id": "45a4c2aa",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.864922+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.insert({'name': 'Dict Test'}).name == 'Dict Test'"
@@ -466,9 +428,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ba6afc6e",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.888450+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"from sqlalchemy.exc import CompileError"
@@ -478,9 +438,7 @@
"cell_type": "code",
"execution_count": null,
"id": "e9fd1822",
- "metadata": {
- "time_run": "2026-02-03T15:34:01.912153+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"test_fail(people.insert, kwargs=dict(name='Extra', age=25, title='Dr'), exc=CompileError)"
@@ -522,9 +480,7 @@
"cell_type": "code",
"execution_count": null,
"id": "c8a95079",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.804848+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -544,9 +500,7 @@
"cell_type": "code",
"execution_count": null,
"id": "cee37620",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.828074+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -568,9 +522,7 @@
"cell_type": "code",
"execution_count": null,
"id": "98118662",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.851136+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -599,9 +551,7 @@
"cell_type": "code",
"execution_count": null,
"id": "96632dfb",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.874388+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -621,9 +571,7 @@
"cell_type": "code",
"execution_count": null,
"id": "b110b0a7",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.897923+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -644,9 +592,7 @@
"cell_type": "code",
"execution_count": null,
"id": "803e6bc9",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.921945+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -668,9 +614,7 @@
"cell_type": "code",
"execution_count": null,
"id": "570d5dce",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.945788+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -701,9 +645,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ca76eb12",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.969415+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -729,9 +671,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5a37e482",
- "metadata": {
- "time_run": "2026-02-03T15:32:04.993004+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -756,9 +696,7 @@
"cell_type": "code",
"execution_count": null,
"id": "da81a215",
- "metadata": {
- "time_run": "2026-02-03T15:32:05.015829+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -783,9 +721,7 @@
"cell_type": "code",
"execution_count": null,
"id": "a3ede2de",
- "metadata": {
- "time_run": "2026-02-03T15:32:05.038915+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"from sqlalchemy.exc import StatementError"
@@ -795,19 +731,14 @@
"cell_type": "code",
"execution_count": null,
"id": "9682bd4d",
- "metadata": {
- "time_run": "2026-02-03T15:32:05.062179+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"test_fail(people.insert_all, args=[[{'name': 'Valid'}, {'invalid_col': 'Bad'}]], exc=StatementError)"
]
}
],
- "metadata": {
- "solveit_dialog_mode": "learning",
- "solveit_ver": 2
- },
+ "metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
diff --git a/test_update.ipynb b/test_update.ipynb
index 69e0349..3d5c4d8 100644
--- a/test_update.ipynb
+++ b/test_update.ipynb
@@ -20,9 +20,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ad470f25",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.435843+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"from dataclasses import is_dataclass\n",
@@ -42,9 +40,7 @@
"cell_type": "code",
"execution_count": null,
"id": "97dd1b48",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.546309+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"db = database(':memory:')"
@@ -54,9 +50,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5102a3ac",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.573282+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"class People: id: int; name: str"
@@ -66,9 +60,7 @@
"cell_type": "code",
"execution_count": null,
"id": "9188c149",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.596298+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"people = db.create(People, pk='id')"
@@ -110,9 +102,7 @@
"cell_type": "code",
"execution_count": null,
"id": "fba0c4f7",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.619580+00:00"
- },
+ "metadata": {},
"outputs": [
{
"data": {
@@ -120,7 +110,7 @@
"{}"
]
},
- "execution_count": 0,
+ "execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
@@ -141,9 +131,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ace59c88",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.643767+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -163,9 +151,7 @@
"cell_type": "code",
"execution_count": null,
"id": "a93ec70a",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.667050+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -177,9 +163,7 @@
"cell_type": "code",
"execution_count": null,
"id": "79cd5186",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.690443+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty dataclass doesn't change anything\n",
@@ -193,9 +177,7 @@
"cell_type": "code",
"execution_count": null,
"id": "aa988175",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.713459+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty class instance doesn't change anything\n",
@@ -225,9 +207,7 @@
"cell_type": "code",
"execution_count": null,
"id": "1fdd0aaf",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.736858+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"person = people.insert(name='Alice')\n",
@@ -248,9 +228,7 @@
"cell_type": "code",
"execution_count": null,
"id": "e5753017",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.761028+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people[person.id].name == 'Bob'"
@@ -268,9 +246,7 @@
"cell_type": "code",
"execution_count": null,
"id": "c736aa0f",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.784046+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"dc = People(id=person.id, name='Bobby')\n",
@@ -291,9 +267,7 @@
"cell_type": "code",
"execution_count": null,
"id": "cfd90ab0",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.807472+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"class Student: pass\n",
@@ -338,9 +312,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5a968d13",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.892804+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.update(dict(id=person.id, name=None))\n",
@@ -360,9 +332,7 @@
"cell_type": "code",
"execution_count": null,
"id": "92d53608",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.893883+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.update(dict(id=person.id, name=''))\n",
@@ -390,9 +360,7 @@
"cell_type": "code",
"execution_count": null,
"id": "972bab86",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.916500+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.update(dict(id=person.id, name='O\\'Connor')).name == \"O'Connor\"\n",
@@ -413,9 +381,7 @@
"cell_type": "code",
"execution_count": null,
"id": "1fee74f4",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.939993+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"from sqlalchemy.exc import CompileError"
@@ -425,19 +391,14 @@
"cell_type": "code",
"execution_count": null,
"id": "76652d97",
- "metadata": {
- "time_run": "2026-02-03T15:33:24.963671+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"test_fail(people.update, kwargs=dict(id=person.id, name='Extra', age=25, title='Dr'), exc=CompileError)"
]
}
],
- "metadata": {
- "solveit_dialog_mode": "learning",
- "solveit_ver": 2
- },
+ "metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
diff --git a/test_upsert.ipynb b/test_upsert.ipynb
index f8c5584..d4dbcac 100644
--- a/test_upsert.ipynb
+++ b/test_upsert.ipynb
@@ -3,9 +3,7 @@
{
"cell_type": "markdown",
"id": "fd325418",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"# Test Upsert Operations"
]
@@ -13,9 +11,7 @@
{
"cell_type": "markdown",
"id": "417f2c4e",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"## Setup"
]
@@ -24,14 +20,11 @@
"cell_type": "code",
"execution_count": null,
"id": "ad470f25",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:23:34.461281+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
- "from fastcore.parallel import parallel\n",
"from fastcore.test import *\n",
+ "from fastcore.utils import *\n",
"from fastsql import *\n",
"from sqlalchemy.exc import CompileError, ProgrammingError"
]
@@ -39,35 +32,27 @@
{
"cell_type": "markdown",
"id": "e4788661",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:24:28.876733+00:00"
- },
+ "metadata": {},
"source": [
- "Note: Make sure to use fastsql's `database()` here"
+ "Note: Make sure to use fastsql's `database()` here with a file path if you want to test thread safety as it is not supported with `:memory:`"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "97dd1b48",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.518593+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
- "db = database(':memory:')"
+ "dbp = Path('/tmp/data.db')\n",
+ "db = database(dbp)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5102a3ac",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.546302+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"class People: id: int; name: str"
@@ -77,10 +62,7 @@
"cell_type": "code",
"execution_count": null,
"id": "9188c149",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.568581+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"people = db.create(People, pk='id')"
@@ -89,9 +71,7 @@
{
"cell_type": "markdown",
"id": "6c99cbae",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"## Test Single Upserts"
]
@@ -99,9 +79,7 @@
{
"cell_type": "markdown",
"id": "dbc67ac6",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Here we test `upsert()`"
]
@@ -109,9 +87,7 @@
{
"cell_type": "markdown",
"id": "a0673d88",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"### Test Cases for `upsert()` Where Nothing Is Inserted"
]
@@ -119,9 +95,7 @@
{
"cell_type": "markdown",
"id": "eb45e038",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test that calling `upsert()` without any parameters doesn't change anything, and returns nothing"
]
@@ -130,10 +104,7 @@
"cell_type": "code",
"execution_count": null,
"id": "fba0c4f7",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.590727+00:00"
- },
+ "metadata": {},
"outputs": [
{
"data": {
@@ -141,7 +112,7 @@
"{}"
]
},
- "execution_count": 0,
+ "execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
@@ -153,9 +124,7 @@
{
"cell_type": "markdown",
"id": "0355fe0a",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test None doesn't change anything."
]
@@ -164,10 +133,7 @@
"cell_type": "code",
"execution_count": null,
"id": "ace59c88",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.614613+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -178,9 +144,7 @@
{
"cell_type": "markdown",
"id": "2ab1795b",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test empty dict doesn't change anything "
]
@@ -189,10 +153,7 @@
"cell_type": "code",
"execution_count": null,
"id": "a93ec70a",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.637763+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -204,10 +165,7 @@
"cell_type": "code",
"execution_count": null,
"id": "79cd5186",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.661193+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty dataclass doesn't change anything\n",
@@ -221,10 +179,7 @@
"cell_type": "code",
"execution_count": null,
"id": "aa988175",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.683902+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"# Test empty class instance doesn't change anything\n",
@@ -237,9 +192,7 @@
{
"cell_type": "markdown",
"id": "811bc666",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"### Single Insert Types"
]
@@ -247,9 +200,7 @@
{
"cell_type": "markdown",
"id": "157baebb",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test upsert with keyword argument without id. Result should be a MissingPrimaryKey error"
]
@@ -258,10 +209,7 @@
"cell_type": "code",
"execution_count": null,
"id": "1fdd0aaf",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.707315+00:00"
- },
+ "metadata": {},
"outputs": [
{
"name": "stdout",
@@ -279,9 +227,7 @@
{
"cell_type": "markdown",
"id": "e1300c26",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Use upsert to insert a new record via a dataclass. Since it can't find the id, it adds the record"
]
@@ -290,10 +236,7 @@
"cell_type": "code",
"execution_count": null,
"id": "de73d39a",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.730311+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"person = people.upsert(People(name='Alice', id=people.count+1))"
@@ -302,9 +245,7 @@
{
"cell_type": "markdown",
"id": "447e13c9",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test upsert that updates with dataclass. Since it can find the id, it updates the record."
]
@@ -313,10 +254,7 @@
"cell_type": "code",
"execution_count": null,
"id": "c736aa0f",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.753637+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.upsert(People(name='Bobba', id=person.id)).name == 'Bobba'"
@@ -325,9 +263,7 @@
{
"cell_type": "markdown",
"id": "77e6e4c0",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Use upsert to insert a new record via a class. Since it can't find the id, it adds the record"
]
@@ -336,10 +272,7 @@
"cell_type": "code",
"execution_count": null,
"id": "dd80748f",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.777160+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -355,9 +288,7 @@
{
"cell_type": "markdown",
"id": "0b4eb6df",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test upsert that updates with class. Since it can find the id, it updates the record."
]
@@ -366,10 +297,7 @@
"cell_type": "code",
"execution_count": null,
"id": "cfd90ab0",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.862206+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"count = people.count\n",
@@ -384,9 +312,7 @@
{
"cell_type": "markdown",
"id": "26a9c38a",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"### None and Empty String Handling"
]
@@ -394,9 +320,7 @@
{
"cell_type": "markdown",
"id": "37ad998d",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test upserting a record with name set to None. First assert checks the method result, the second assert tests that the database was altered correctly."
]
@@ -405,10 +329,7 @@
"cell_type": "code",
"execution_count": null,
"id": "5a968d13",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.863970+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.upsert(People(name=None, id=person.id))\n",
@@ -419,9 +340,7 @@
{
"cell_type": "markdown",
"id": "dd0c180d",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test with empty string."
]
@@ -430,10 +349,7 @@
"cell_type": "code",
"execution_count": null,
"id": "92d53608",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.886420+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"result = people.upsert(People(name='', id=person.id))\n",
@@ -444,9 +360,7 @@
{
"cell_type": "markdown",
"id": "d855c6a8",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"### Other Cases"
]
@@ -454,9 +368,7 @@
{
"cell_type": "markdown",
"id": "1ee61d32",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test upserts with special characters. Let's do updates first"
]
@@ -465,10 +377,7 @@
"cell_type": "code",
"execution_count": null,
"id": "972bab86",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.910083+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.upsert(People(name='O\\'Connor', id=person.id)).name == \"O'Connor\"\n",
@@ -480,9 +389,7 @@
{
"cell_type": "markdown",
"id": "b1069ca8",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Now test special characters with upserts that insert."
]
@@ -491,10 +398,7 @@
"cell_type": "code",
"execution_count": null,
"id": "2b702435",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.934453+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"person = people.upsert(People(name='O\\'Connor', id=people.count+1))\n",
@@ -508,9 +412,7 @@
{
"cell_type": "markdown",
"id": "f27e986a",
- "metadata": {
- "hidden": true
- },
+ "metadata": {},
"source": [
"Test dict upsert"
]
@@ -519,10 +421,7 @@
"cell_type": "code",
"execution_count": null,
"id": "45a4c2aa",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:11:03.958496+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"assert people.upsert({'name': 'Dict Test', 'id': person.id}).name == 'Dict Test'"
@@ -532,9 +431,7 @@
"cell_type": "code",
"execution_count": null,
"id": "a76aec1a",
- "metadata": {
- "time_run": "2026-02-03T15:25:48.244215+00:00"
- },
+ "metadata": {},
"outputs": [],
"source": [
"test_fail(people.upsert, kwargs=dict(name='Extra', age=25, title='Dr', id=person.id), exc=CompileError)"
@@ -543,32 +440,37 @@
{
"cell_type": "markdown",
"id": "fc63ecc4",
- "metadata": {
- "hidden": true,
- "time_run": "2026-02-03T15:26:18.547359+00:00"
- },
+ "metadata": {},
"source": [
- "Test that threaded concurrency fails on writes"
+ "Test that threaded concurrency succeeds on writes"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "3a2e7353",
- "metadata": {
- "time_run": "2026-02-03T15:26:20.086450+00:00"
- },
+ "id": "019647ec",
+ "metadata": {},
"outputs": [],
"source": [
- "def up(i): return people.upsert(People(id=i, name=f'Person {i}'))\n",
- "test_fail(parallel, args=[up, range(1, 100)], kwargs={'threadpool': True}, exc=ProgrammingError)"
+ "def up(i): return db.t.people.upsert(People(id=i, name=f'Person {i}'))\n",
+ "results = parallel(up, range(1, 100), threadpool=True)\n",
+ "test_eq(len(results), 99)\n",
+ "test_eq(people.count, 99)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e506809f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Cleanup\n",
+ "dbp.unlink(missing_ok=True)"
]
}
],
- "metadata": {
- "solveit_dialog_mode": "learning",
- "solveit_ver": 2
- },
+ "metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}