fix: export_serving — Arrow-based copy, rename to analytics.duckdb

Two bugs fixed:

1. Cross-connection COPY: DuckDB doesn't support referencing another
   connection's tables as src.serving.table. Replace with Arrow as
   intermediate: src reads to Arrow, dst.register() + CREATE TABLE.

2. Catalog/schema name collision: naming the export file serving.duckdb
   made DuckDB assign catalog name "serving" — same as the schema we
   create inside it. Every serving.table query became ambiguous. Rename
   to analytics.duckdb (catalog "analytics", schema "serving" = no clash).

   SERVING_DUCKDB_PATH values updated: serving.duckdb → analytics.duckdb
   in supervisor, service, bootstrap, dev_run.sh, .env.example, docker-compose.

3. Temp file: use _export.duckdb (not serving.duckdb.tmp) to avoid
   the same catalog collision during the write phase.

Verified: 6 tables exported, serving.* queries work read-only.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Deeman
2026-02-22 12:54:39 +01:00
parent ac8ab47448
commit 9ee7a3d9d3
7 changed files with 21 additions and 16 deletions

View File

@@ -29,12 +29,16 @@ def export_serving() -> None:
assert serving_path, "SERVING_DUCKDB_PATH must be set"
assert os.path.exists(pipeline_path), f"Pipeline DB not found: {pipeline_path}"
tmp_path = serving_path + ".tmp"
# Temp path must not start with "serving" — DuckDB names the catalog after
# the filename stem, so "serving.duckdb.tmp" → catalog "serving", which
# clashes with the schema we create inside it.
tmp_path = os.path.join(os.path.dirname(os.path.abspath(serving_path)), "_export.duckdb")
src = duckdb.connect(pipeline_path, read_only=True)
try:
tables = src.sql(
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'serving' ORDER BY table_name"
"SELECT table_name FROM information_schema.tables"
" WHERE table_schema = 'serving' ORDER BY table_name"
).fetchall()
assert tables, f"No tables found in serving schema of {pipeline_path}"
logger.info(f"Exporting {len(tables)} serving tables: {[t[0] for t in tables]}")
@@ -43,10 +47,11 @@ def export_serving() -> None:
try:
dst.execute("CREATE SCHEMA IF NOT EXISTS serving")
for (table,) in tables:
dst.execute(
f"CREATE OR REPLACE TABLE serving.{table} AS "
f"SELECT * FROM src.serving.{table}",
)
# Read via Arrow so there is no cross-connection catalog ambiguity.
arrow_data = src.sql(f"SELECT * FROM serving.{table}").arrow()
dst.register("_src", arrow_data)
dst.execute(f"CREATE OR REPLACE TABLE serving.{table} AS SELECT * FROM _src")
dst.unregister("_src")
row_count = dst.sql(f"SELECT count(*) FROM serving.{table}").fetchone()[0]
logger.info(f" serving.{table}: {row_count:,} rows")
finally: