feat(extract): convert geonames to JSONL output
- cities_global.jsonl.gz replaces .json.gz (one city object per line)
- Empty placeholder writes a minimal .jsonl.gz (null row, filtered in staging)
- Eliminates the {"rows": [...]} blob wrapper and maximum_object_size workaround
stg_population_geonames: UNION ALL transition (jsonl_rows + blob_rows)
- jsonl_rows: read_json JSONL, explicit columns, no UNNEST
- blob_rows: existing UNNEST(rows) pattern with 40MB size limit retained
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,14 +10,14 @@ highest padel investment opportunity (white space markets).
|
||||
|
||||
Requires: GEONAMES_USERNAME env var (free registration at geonames.org)
|
||||
|
||||
Landing: {LANDING_DIR}/geonames/{year}/{month}/cities_global.json.gz
|
||||
Output: {"rows": [{"geoname_id": 2950159, "city_name": "Berlin",
|
||||
"country_code": "DE", "population": 3644826,
|
||||
"lat": 52.524, "lon": 13.411,
|
||||
"admin1_code": "16", "admin2_code": "00",
|
||||
"ref_year": 2024}], "count": N}
|
||||
Landing: {LANDING_DIR}/geonames/{year}/{month}/cities_global.jsonl.gz
|
||||
Output: one JSON object per line, e.g.:
|
||||
{"geoname_id": 2950159, "city_name": "Berlin", "country_code": "DE",
|
||||
"population": 3644826, "lat": 52.524, "lon": 13.411,
|
||||
"admin1_code": "16", "admin2_code": "00", "ref_year": 2024}
|
||||
"""
|
||||
|
||||
import gzip
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
@@ -28,7 +28,7 @@ from pathlib import Path
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging
|
||||
from .utils import get_last_cursor, landing_path, write_gzip_atomic
|
||||
from .utils import compress_jsonl_atomic, get_last_cursor, landing_path
|
||||
|
||||
logger = setup_logging("padelnomics.extract.geonames")
|
||||
|
||||
@@ -131,9 +131,12 @@ def extract(
|
||||
logger.warning("GEONAMES_USERNAME not set — writing empty placeholder so SQLMesh models can run")
|
||||
year, month = year_month.split("/")
|
||||
dest_dir = landing_path(landing_dir, "geonames", year, month)
|
||||
dest = dest_dir / "cities_global.json.gz"
|
||||
dest = dest_dir / "cities_global.jsonl.gz"
|
||||
if not dest.exists():
|
||||
write_gzip_atomic(dest, b'{"rows": [], "count": 0}')
|
||||
tmp = dest.with_suffix(".gz.tmp")
|
||||
with gzip.open(tmp, "wt") as f:
|
||||
f.write('{"geoname_id":null}\n') # filtered by WHERE geoname_id IS NOT NULL
|
||||
tmp.rename(dest)
|
||||
return {"files_written": 0, "files_skipped": 1, "bytes_written": 0}
|
||||
|
||||
last_cursor = get_last_cursor(conn, EXTRACTOR_NAME)
|
||||
@@ -164,9 +167,12 @@ def extract(
|
||||
logger.info("parsed %d global locations (pop ≥1K)", len(rows))
|
||||
|
||||
dest_dir = landing_path(landing_dir, "geonames", year, month)
|
||||
dest = dest_dir / "cities_global.json.gz"
|
||||
payload = json.dumps({"rows": rows, "count": len(rows)}).encode()
|
||||
bytes_written = write_gzip_atomic(dest, payload)
|
||||
dest = dest_dir / "cities_global.jsonl.gz"
|
||||
working_path = dest.with_suffix(".working.jsonl")
|
||||
with open(working_path, "w") as f:
|
||||
for row in rows:
|
||||
f.write(json.dumps(row, separators=(",", ":")) + "\n")
|
||||
bytes_written = compress_jsonl_atomic(working_path, dest)
|
||||
logger.info("written %s bytes compressed", f"{bytes_written:,}")
|
||||
|
||||
return {
|
||||
|
||||
Reference in New Issue
Block a user