Skip to content

Commit 2b7da3c

Browse files
authored
Merge pull request #29 from AnswerDotAI/add-returning-to-inserts
Add returning to inserts
2 parents 2baf5e4 + d6f792d commit 2b7da3c

File tree

3 files changed

+164
-71
lines changed

3 files changed

+164
-71
lines changed

README.md

Lines changed: 81 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,12 @@
2222

2323
## Use
2424

25+
``` python
26+
from fastcore.utils import *
27+
from fastcore.test import *
28+
from typing import Any
29+
```
30+
2531
First, import the sqlite-miniutils library. Through the use of the
2632
**all** attribute in our Python modules by using `import *` we only
2733
bring in the `Database`, `Queryable`, `Table`, `View` classes. There’s
@@ -107,7 +113,7 @@ users.insert(dict(name='Pigeon', age=3, pwd='keptsecret'))
107113
users.insert(dict(name='Eagle', age=7, pwd='s3cr3t'))
108114
```
109115

110-
<Table Users (id, name, age, pwd)>
116+
{'id': 5, 'name': 'Eagle', 'age': 7, 'pwd': 's3cr3t'}
111117

112118
A simple unfiltered select can be executed using `rows` property on the
113119
table object.
@@ -116,7 +122,7 @@ table object.
116122
users.rows
117123
```
118124

119-
<generator object Queryable.rows_where at 0x10849f6f0>
125+
<generator object Queryable.rows_where>
120126

121127
Let’s iterate over that generator to see the results:
122128

@@ -169,3 +175,76 @@ except ValueError as e:
169175
```
170176

171177
Cannot use offset without limit
178+
179+
## Transactions
180+
181+
If you have any SQL calls outside an explicit transaction, they are
182+
committed instantly.
183+
184+
To group 2 or more queries together into 1 transaction, wrap them in a
185+
BEGIN and COMMIT, executing ROLLBACK if an exception is caught:
186+
187+
``` python
188+
users.get(1)
189+
```
190+
191+
{'id': 1, 'name': 'Raven', 'age': 8, 'pwd': 's3cret'}
192+
193+
``` python
194+
db.begin()
195+
try:
196+
users.delete([1])
197+
db.execute('FNOOORD')
198+
db.commit()
199+
except Exception as e:
200+
print(e)
201+
db.rollback()
202+
```
203+
204+
near "FNOOORD": syntax error
205+
206+
Because the transaction was rolled back, the user was not deleted:
207+
208+
``` python
209+
users.get(1)
210+
```
211+
212+
{'id': 1, 'name': 'Raven', 'age': 8, 'pwd': 's3cret'}
213+
214+
Let’s do it again, but without the DB error, to check the transaction is
215+
successful:
216+
217+
``` python
218+
db.begin()
219+
try:
220+
users.delete([1])
221+
db.commit()
222+
except Exception as e: db.rollback()
223+
```
224+
225+
``` python
226+
try:
227+
users.get(1)
228+
print("Delete failed!")
229+
except: print("Delete succeeded!")
230+
```
231+
232+
Delete succeeded!
233+
234+
## Returning
235+
236+
sqlite-minutils is different from sqlite-utils in that write actions
237+
(`INSERT`, `UPDATE`, `UPSERT`) return back the record(s) they have
238+
affected without relying on `last_rowid`. It does this through the
239+
`RETURNING` SQL keyword.
240+
241+
``` python
242+
user = users.insert(dict(name='Turkey', age=2, pwd='gravy'))
243+
user
244+
```
245+
246+
{'id': 8, 'name': 'Turkey', 'age': 2, 'pwd': 'gravy'}
247+
248+
``` python
249+
test(user['name'], 'Turkey', equals)
250+
```

nbs/index.ipynb

Lines changed: 60 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,25 @@
11
{
22
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": null,
6+
"metadata": {},
7+
"outputs": [
8+
{
9+
"name": "stdout",
10+
"output_type": "stream",
11+
"text": [
12+
"The autoreload extension is already loaded. To reload it, use:\n",
13+
" %reload_ext autoreload\n"
14+
]
15+
}
16+
],
17+
"source": [
18+
"#| hide\n",
19+
"%load_ext autoreload\n",
20+
"%autoreload 2"
21+
]
22+
},
323
{
424
"cell_type": "markdown",
525
"metadata": {},
@@ -50,8 +70,8 @@
5070
"metadata": {},
5171
"outputs": [],
5272
"source": [
53-
"#| hide\n",
5473
"from fastcore.utils import *\n",
74+
"from fastcore.test import *\n",
5575
"from typing import Any"
5676
]
5777
},
@@ -233,7 +253,7 @@
233253
{
234254
"data": {
235255
"text/plain": [
236-
"<Table Users (id, name, age, pwd)>"
256+
"{'id': 5, 'name': 'Eagle', 'age': 7, 'pwd': 's3cr3t'}"
237257
]
238258
},
239259
"execution_count": null,
@@ -547,12 +567,49 @@
547567
"except: print(\"Delete succeeded!\")"
548568
]
549569
},
570+
{
571+
"cell_type": "markdown",
572+
"metadata": {},
573+
"source": [
574+
"## Returning"
575+
]
576+
},
577+
{
578+
"cell_type": "markdown",
579+
"metadata": {},
580+
"source": [
581+
"sqlite-minutils is different from sqlite-utils in that write actions (`INSERT`, `UPDATE`, `UPSERT`) return back the record(s) they have affected without relying on `last_rowid`. It does this through the `RETURNING` SQL keyword."
582+
]
583+
},
584+
{
585+
"cell_type": "code",
586+
"execution_count": null,
587+
"metadata": {},
588+
"outputs": [
589+
{
590+
"data": {
591+
"text/plain": [
592+
"{'id': 8, 'name': 'Turkey', 'age': 2, 'pwd': 'gravy'}"
593+
]
594+
},
595+
"execution_count": null,
596+
"metadata": {},
597+
"output_type": "execute_result"
598+
}
599+
],
600+
"source": [
601+
"user = users.insert(dict(name='Turkey', age=2, pwd='gravy'))\n",
602+
"user"
603+
]
604+
},
550605
{
551606
"cell_type": "code",
552607
"execution_count": null,
553608
"metadata": {},
554609
"outputs": [],
555-
"source": []
610+
"source": [
611+
"test(user['name'], 'Turkey', equals)"
612+
]
556613
}
557614
],
558615
"metadata": {

sqlite_minutils/db.py

Lines changed: 23 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -2877,7 +2877,7 @@ def insert_chunk(
28772877
num_records_processed,
28782878
replace,
28792879
ignore,
2880-
):
2880+
) -> List[Dict]:
28812881
queries_and_params = self.build_insert_queries_and_params(
28822882
extracts,
28832883
chunk,
@@ -2893,68 +2893,24 @@ def insert_chunk(
28932893
ignore,
28942894
)
28952895

2896-
result = None
2896+
records = []
28972897
for query, params in queries_and_params:
2898-
try:
2899-
result = self.db.execute(query, tuple(params))
2900-
except OperationalError as e:
2901-
if alter and (" column" in e.args[0]):
2902-
# Attempt to add any missing columns, then try again
2903-
self.add_missing_columns(chunk)
2904-
result = self.db.execute(query, params)
2905-
elif e.args[0] == "too many SQL variables":
2906-
first_half = chunk[: len(chunk) // 2]
2907-
second_half = chunk[len(chunk) // 2 :]
2908-
2909-
self.insert_chunk(
2910-
alter,
2911-
extracts,
2912-
first_half,
2913-
all_columns,
2914-
hash_id,
2915-
hash_id_columns,
2916-
upsert,
2917-
pk,
2918-
not_null,
2919-
conversions,
2920-
num_records_processed,
2921-
replace,
2922-
ignore,
2923-
)
2924-
2925-
self.insert_chunk(
2926-
alter,
2927-
extracts,
2928-
second_half,
2929-
all_columns,
2930-
hash_id,
2931-
hash_id_columns,
2932-
upsert,
2933-
pk,
2934-
not_null,
2935-
conversions,
2936-
num_records_processed,
2937-
replace,
2938-
ignore,
2939-
)
2898+
cursor = self.db.execute(query, tuple(params))
2899+
columns = [c[0] for c in cursor.description]
2900+
record = dict(zip(columns, cursor.fetchone()))
2901+
records.append(record)
29402902

2903+
# Preserve old self.last_pk functionality
2904+
if (hash_id or pk) and not upsert:
2905+
if hash_id:
2906+
self.last_pk = row[hash_id]
2907+
elif isinstance(pk, str):
2908+
self.last_pk = row[pk]
29412909
else:
2942-
raise
2943-
if num_records_processed == 1:
2944-
rid = self.db.get_last_rowid()
2945-
if rid is not None:
2946-
self.last_pk = self.last_rowid = rid
2947-
# self.last_rowid will be 0 if a "INSERT OR IGNORE" happened
2948-
if (hash_id or pk) and not upsert:
2949-
row = list(self.rows_where("rowid = ?", [rid]))[0]
2950-
if hash_id:
2951-
self.last_pk = row[hash_id]
2952-
elif isinstance(pk, str):
2953-
self.last_pk = row[pk]
2954-
else:
2955-
self.last_pk = tuple(row[p] for p in pk)
2956-
2957-
return
2910+
self.last_pk = tuple(row[p] for p in pk)
2911+
self.last_rowid = self.last_pk
2912+
2913+
return records
29582914

29592915
def insert(
29602916
self,
@@ -2973,7 +2929,7 @@ def insert(
29732929
conversions: Optional[Union[Dict[str, str], Default]] = DEFAULT,
29742930
columns: Optional[Union[Dict[str, Any], Default]] = DEFAULT,
29752931
strict: Optional[Union[bool, Default]] = DEFAULT,
2976-
) -> "Table":
2932+
) -> Dict:
29772933
"""
29782934
Insert a single record into the table. The table will be created with a schema that matches
29792935
the inserted record if it does not already exist, see :ref:`python_api_creating_tables`.
@@ -3023,7 +2979,7 @@ def insert(
30232979
conversions=conversions,
30242980
columns=columns,
30252981
strict=strict,
3026-
)
2982+
)[0]
30272983

30282984
def insert_all(
30292985
self,
@@ -3046,7 +3002,7 @@ def insert_all(
30463002
upsert=False,
30473003
analyze=False,
30483004
strict=DEFAULT,
3049-
) -> "Table":
3005+
) -> List[Dict]:
30503006
"""
30513007
Like ``.insert()`` but takes a list of records and ensures that the table
30523008
that it creates (if table does not exist) has columns for ALL of that data.
@@ -3105,6 +3061,7 @@ def insert_all(
31053061
self.last_pk = None
31063062
if truncate and self.exists():
31073063
self.db.execute("DELETE FROM [{}];".format(self.name))
3064+
records = []
31083065
for chunk in chunks(itertools.chain([first_record], records), batch_size):
31093066
chunk = list(chunk)
31103067
num_records_processed += len(chunk)
@@ -3139,7 +3096,7 @@ def insert_all(
31393096

31403097
first = False
31413098

3142-
self.insert_chunk(
3099+
records.extend(self.insert_chunk(
31433100
alter,
31443101
extracts,
31453102
chunk,
@@ -3153,12 +3110,12 @@ def insert_all(
31533110
num_records_processed,
31543111
replace,
31553112
ignore,
3156-
)
3113+
))
31573114

31583115
if analyze:
31593116
self.analyze()
31603117

3161-
return self
3118+
return records
31623119

31633120
def upsert(
31643121
self,

0 commit comments

Comments
 (0)