Skip to content

Commit e0a7c6d

Browse files
Merge pull request #1341 from datajoint/feat/singleton-tables
feat: Add singleton tables (empty primary keys)
2 parents 9775d0a + 5010b85 commit e0a7c6d

File tree

6 files changed

+155
-18
lines changed

6 files changed

+155
-18
lines changed

src/datajoint/adapters/postgres.py

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -990,26 +990,47 @@ def json_path_expr(self, column: str, path: str, return_type: str | None = None)
990990
path : str
991991
JSON path (e.g., 'field' or 'nested.field').
992992
return_type : str, optional
993-
Return type specification (not used in PostgreSQL jsonb_extract_path_text).
993+
Return type specification for casting (e.g., 'float', 'decimal(10,2)').
994994
995995
Returns
996996
-------
997997
str
998-
PostgreSQL jsonb_extract_path_text() expression.
998+
PostgreSQL jsonb_extract_path_text() expression, with optional cast.
999999
10001000
Examples
10011001
--------
10021002
>>> adapter.json_path_expr('data', 'field')
10031003
'jsonb_extract_path_text("data", \\'field\\')'
10041004
>>> adapter.json_path_expr('data', 'nested.field')
10051005
'jsonb_extract_path_text("data", \\'nested\\', \\'field\\')'
1006+
>>> adapter.json_path_expr('data', 'value', 'float')
1007+
'jsonb_extract_path_text("data", \\'value\\')::float'
10061008
"""
10071009
quoted_col = self.quote_identifier(column)
1008-
# Split path by '.' for nested access
1009-
path_parts = path.split(".")
1010+
# Split path by '.' for nested access, handling array notation
1011+
path_parts = []
1012+
for part in path.split("."):
1013+
# Handle array access like field[0]
1014+
if "[" in part:
1015+
base, rest = part.split("[", 1)
1016+
path_parts.append(base)
1017+
# Extract array indices
1018+
indices = rest.rstrip("]").split("][")
1019+
path_parts.extend(indices)
1020+
else:
1021+
path_parts.append(part)
10101022
path_args = ", ".join(f"'{part}'" for part in path_parts)
1011-
# Note: PostgreSQL jsonb_extract_path_text doesn't use return type parameter
1012-
return f"jsonb_extract_path_text({quoted_col}, {path_args})"
1023+
expr = f"jsonb_extract_path_text({quoted_col}, {path_args})"
1024+
# Add cast if return type specified
1025+
if return_type:
1026+
# Map DataJoint types to PostgreSQL types
1027+
pg_type = return_type.lower()
1028+
if pg_type in ("unsigned", "signed"):
1029+
pg_type = "integer"
1030+
elif pg_type == "double":
1031+
pg_type = "double precision"
1032+
expr = f"({expr})::{pg_type}"
1033+
return expr
10131034

10141035
def translate_expression(self, expr: str) -> str:
10151036
"""

src/datajoint/condition.py

Lines changed: 18 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -31,14 +31,17 @@
3131
JSON_PATTERN = re.compile(r"^(?P<attr>\w+)(\.(?P<path>[\w.*\[\]]+))?(:(?P<type>[\w(,\s)]+))?$")
3232

3333

34-
def translate_attribute(key: str) -> tuple[dict | None, str]:
34+
def translate_attribute(key: str, adapter=None) -> tuple[dict | None, str]:
3535
"""
3636
Translate an attribute key, handling JSON path notation.
3737
3838
Parameters
3939
----------
4040
key : str
4141
Attribute name, optionally with JSON path (e.g., ``"attr.path.field"``).
42+
adapter : DatabaseAdapter, optional
43+
Database adapter for backend-specific SQL generation.
44+
If not provided, uses MySQL syntax for backward compatibility.
4245
4346
Returns
4447
-------
@@ -53,9 +56,14 @@ def translate_attribute(key: str) -> tuple[dict | None, str]:
5356
if match["path"] is None:
5457
return match, match["attr"]
5558
else:
56-
return match, "json_value(`{}`, _utf8mb4'$.{}'{})".format(
57-
*[((f" returning {v}" if k == "type" else v) if v else "") for k, v in match.items()]
58-
)
59+
# Use adapter's json_path_expr if available, otherwise fall back to MySQL syntax
60+
if adapter is not None:
61+
return match, adapter.json_path_expr(match["attr"], match["path"], match["type"])
62+
else:
63+
# Legacy MySQL syntax for backward compatibility
64+
return match, "json_value(`{}`, _utf8mb4'$.{}'{})".format(
65+
*[((f" returning {v}" if k == "type" else v) if v else "") for k, v in match.items()]
66+
)
5967

6068

6169
class PromiscuousOperand:
@@ -306,14 +314,16 @@ def make_condition(
306314

307315
def prep_value(k, v):
308316
"""prepare SQL condition"""
309-
key_match, k = translate_attribute(k)
310-
if key_match["path"] is None:
317+
key_match, k = translate_attribute(k, adapter)
318+
is_json_path = key_match is not None and key_match.get("path") is not None
319+
320+
if not is_json_path:
311321
k = adapter.quote_identifier(k)
312-
if query_expression.heading[key_match["attr"]].json and key_match["path"] is not None and isinstance(v, dict):
322+
if is_json_path and isinstance(v, dict):
313323
return f"{k}='{json.dumps(v)}'"
314324
if v is None:
315325
return f"{k} IS NULL"
316-
if query_expression.heading[key_match["attr"]].uuid:
326+
if key_match is not None and query_expression.heading[key_match["attr"]].uuid:
317327
if not isinstance(v, uuid.UUID):
318328
try:
319329
v = uuid.UUID(v)

src/datajoint/declare.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -473,7 +473,19 @@ def declare(
473473
attribute_sql.extend(job_metadata_sql)
474474

475475
if not primary_key:
476-
raise DataJointError("Table must have a primary key")
476+
# Singleton table: add hidden sentinel attribute
477+
primary_key = ["_singleton"]
478+
singleton_comment = ":bool:singleton primary key"
479+
sql_type = adapter.core_type_to_sql("bool")
480+
singleton_sql = adapter.format_column_definition(
481+
name="_singleton",
482+
sql_type=sql_type,
483+
nullable=False,
484+
default="NOT NULL DEFAULT TRUE",
485+
comment=singleton_comment,
486+
)
487+
attribute_sql.insert(0, singleton_sql)
488+
column_comments["_singleton"] = singleton_comment
477489

478490
pre_ddl = [] # DDL to run BEFORE CREATE TABLE (e.g., CREATE TYPE for enums)
479491
post_ddl = [] # DDL to run AFTER CREATE TABLE (e.g., COMMENT ON)
@@ -742,7 +754,7 @@ def compile_index(line: str, index_sql: list[str], adapter) -> None:
742754
"""
743755

744756
def format_attribute(attr):
745-
match, attr = translate_attribute(attr)
757+
match, attr = translate_attribute(attr, adapter)
746758
if match is None:
747759
return attr
748760
if match["path"] is None:

src/datajoint/expression.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -457,7 +457,8 @@ def proj(self, *attributes, **named_attributes):
457457
from other attributes available before the projection.
458458
Each attribute name can only be used once.
459459
"""
460-
named_attributes = {k: translate_attribute(v)[1] for k, v in named_attributes.items()}
460+
adapter = self.connection.adapter if hasattr(self, "connection") and self.connection else None
461+
named_attributes = {k: translate_attribute(v, adapter)[1] for k, v in named_attributes.items()}
461462
# new attributes in parentheses are included again with the new name without removing original
462463
duplication_pattern = re.compile(rf"^\s*\(\s*(?!{'|'.join(CONSTANT_LITERALS)})(?P<name>[a-zA-Z_]\w*)\s*\)\s*$")
463464
# attributes without parentheses renamed

src/datajoint/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# version bump auto managed by Github Actions:
22
# label_prs.yaml(prep), release.yaml(bump), post_release.yaml(edit)
33
# manually set this version will be eventually overwritten by the above actions
4-
__version__ = "2.1.0a2"
4+
__version__ = "2.1.0a5"

tests/integration/test_declare.py

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -368,3 +368,96 @@ class Table_With_Underscores(dj.Manual):
368368
schema_any(TableNoUnderscores)
369369
with pytest.raises(dj.DataJointError, match="must be alphanumeric in CamelCase"):
370370
schema_any(Table_With_Underscores)
371+
372+
373+
class TestSingletonTables:
374+
"""Tests for singleton tables (empty primary keys)."""
375+
376+
def test_singleton_declaration(self, schema_any):
377+
"""Singleton table creates correctly with hidden _singleton attribute."""
378+
379+
@schema_any
380+
class Config(dj.Lookup):
381+
definition = """
382+
# Global configuration
383+
---
384+
setting : varchar(100)
385+
"""
386+
387+
# Access attributes first to trigger lazy loading from database
388+
visible_attrs = Config.heading.attributes
389+
all_attrs = Config.heading._attributes
390+
391+
# Table should exist and have _singleton as hidden PK
392+
assert "_singleton" in all_attrs
393+
assert "_singleton" not in visible_attrs
394+
assert Config.heading.primary_key == [] # Visible PK is empty for singleton
395+
396+
def test_singleton_insert_and_fetch(self, schema_any):
397+
"""Insert and fetch work without specifying _singleton."""
398+
399+
@schema_any
400+
class Settings(dj.Lookup):
401+
definition = """
402+
---
403+
value : int32
404+
"""
405+
406+
# Insert without specifying _singleton
407+
Settings.insert1({"value": 42})
408+
409+
# Fetch should work
410+
result = Settings.fetch1()
411+
assert result["value"] == 42
412+
assert "_singleton" not in result # Hidden attribute excluded
413+
414+
def test_singleton_uniqueness(self, schema_any):
415+
"""Second insert raises DuplicateError."""
416+
417+
@schema_any
418+
class SingleValue(dj.Lookup):
419+
definition = """
420+
---
421+
data : varchar(50)
422+
"""
423+
424+
SingleValue.insert1({"data": "first"})
425+
426+
# Second insert should fail
427+
with pytest.raises(dj.errors.DuplicateError):
428+
SingleValue.insert1({"data": "second"})
429+
430+
def test_singleton_with_multiple_attributes(self, schema_any):
431+
"""Singleton table with multiple secondary attributes."""
432+
433+
@schema_any
434+
class PipelineConfig(dj.Lookup):
435+
definition = """
436+
# Pipeline configuration singleton
437+
---
438+
version : varchar(20)
439+
max_workers : int32
440+
debug_mode : bool
441+
"""
442+
443+
PipelineConfig.insert1({"version": "1.0.0", "max_workers": 4, "debug_mode": False})
444+
445+
result = PipelineConfig.fetch1()
446+
assert result["version"] == "1.0.0"
447+
assert result["max_workers"] == 4
448+
assert result["debug_mode"] == 0 # bool stored as tinyint
449+
450+
def test_singleton_describe(self, schema_any):
451+
"""Describe should show the singleton nature."""
452+
453+
@schema_any
454+
class Metadata(dj.Lookup):
455+
definition = """
456+
---
457+
info : varchar(255)
458+
"""
459+
460+
description = Metadata.describe()
461+
# Description should show just the secondary attribute
462+
assert "info" in description
463+
# _singleton is hidden, implementation detail

0 commit comments

Comments
 (0)