feat(extract): replace OpenWeatherMap with Open-Meteo weather extractor
Replaced the OWM extractor (8 locations, API key required, 14,600-call backfill over 30+ days) with Open-Meteo (12 locations, no API key, ERA5 reanalysis, full backfill in 12 API calls ~30 seconds). - Rename extract/openweathermap → extract/openmeteo (git mv) - Rewrite api.py: fetch_archive (ERA5, date-range) + fetch_recent (forecast, past_days=10 to cover ERA5 lag); 9 daily variables incl. et0 and VPD - Rewrite execute.py: _split_and_write() unzips parallel arrays into per-day flat JSON; no cursor / rate limiting / call cap needed - Update pipelines.py: --package openmeteo, timeout 120s (was 1200s) - Update fct_weather_daily.sql: flat Open-Meteo field names (temperature_2m_* etc.), remove pressure_afternoon_hpa, add et0_mm + vpd_max_kpa + is_high_vpd - Remove OPENWEATHERMAP_API_KEY from CLAUDE.md env vars table Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
20
extract/openmeteo/pyproject.toml
Normal file
20
extract/openmeteo/pyproject.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[project]
|
||||
name = "openmeteo"
|
||||
version = "0.1.0"
|
||||
description = "Open-Meteo daily weather extractor for coffee-growing regions"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"extract_core",
|
||||
"niquests>=3.14.1",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
extract_weather = "openmeteo.execute:extract_weather"
|
||||
extract_weather_backfill = "openmeteo.execute:extract_weather_backfill"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/openmeteo"]
|
||||
1
extract/openmeteo/src/openmeteo/__init__.py
Normal file
1
extract/openmeteo/src/openmeteo/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
116
extract/openmeteo/src/openmeteo/api.py
Normal file
116
extract/openmeteo/src/openmeteo/api.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Open-Meteo weather API client.
|
||||
|
||||
Two endpoints:
|
||||
|
||||
fetch_archive(session, lat, lon, start_date, end_date) -> dict
|
||||
ERA5 reanalysis data — consistent, scientifically validated.
|
||||
Available from 1940 to ~5 days ago (reanalysis processing lag).
|
||||
Use for historical backfill.
|
||||
|
||||
fetch_recent(session, lat, lon, past_days) -> dict
|
||||
Forecast model blended with recent observations.
|
||||
Covers the last N days + today (fills the ERA5 lag window).
|
||||
Use for daily updates.
|
||||
|
||||
Both return the same structure:
|
||||
{
|
||||
"daily": {
|
||||
"time": ["2020-01-01", "2020-01-02", ...],
|
||||
"temperature_2m_max": [28.5, 27.1, ...],
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
No API key required. No rate limits for reasonable usage (~12 calls/day).
|
||||
"""
|
||||
|
||||
import niquests
|
||||
|
||||
ARCHIVE_URL = "https://archive-api.open-meteo.com/v1/archive"
|
||||
FORECAST_URL = "https://api.open-meteo.com/v1/forecast"
|
||||
HTTP_TIMEOUT_SECONDS = 60
|
||||
MAX_RESPONSE_BYTES = 2_000_000 # multi-year response ~200 KB; 2 MB is generous
|
||||
|
||||
# Variables fetched for each location. All metric units.
|
||||
# wind_speed_unit=ms ensures m/s (Open-Meteo default is km/h).
|
||||
DAILY_VARIABLES = ",".join([
|
||||
"temperature_2m_max",
|
||||
"temperature_2m_min",
|
||||
"temperature_2m_mean",
|
||||
"precipitation_sum",
|
||||
"wind_speed_10m_max",
|
||||
"relative_humidity_2m_max",
|
||||
"cloud_cover_mean",
|
||||
"et0_fao_evapotranspiration", # FAO Penman-Monteith ET — direct crop water demand signal
|
||||
"vapour_pressure_deficit_max", # VPD >1.5 kPa = significant plant water stress
|
||||
])
|
||||
|
||||
|
||||
def _get(session: niquests.Session, url: str, params: dict) -> dict:
|
||||
"""GET url, validate HTTP 200, return parsed JSON dict."""
|
||||
response = session.get(url, params=params, timeout=HTTP_TIMEOUT_SECONDS)
|
||||
|
||||
assert response.status_code == 200, (
|
||||
f"Open-Meteo returned HTTP {response.status_code}: {response.text[:300]}"
|
||||
)
|
||||
assert len(response.content) <= MAX_RESPONSE_BYTES, (
|
||||
f"Open-Meteo response unexpectedly large: {len(response.content):,} bytes"
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
assert isinstance(data, dict), f"Expected dict, got {type(data)}"
|
||||
|
||||
# Open-Meteo signals some errors as HTTP 200 with error=true in body
|
||||
if data.get("error"):
|
||||
raise ValueError(f"Open-Meteo API error: {data.get('reason', data)}")
|
||||
|
||||
assert "daily" in data, f"Open-Meteo response missing 'daily' key: {list(data.keys())}"
|
||||
assert "time" in data["daily"], "Open-Meteo 'daily' missing 'time' array"
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def fetch_archive(
|
||||
session: niquests.Session,
|
||||
lat: float,
|
||||
lon: float,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
) -> dict:
|
||||
"""Fetch ERA5 reanalysis daily data for a date range (YYYY-MM-DD strings)."""
|
||||
assert start_date and len(start_date) == 10, f"start_date must be YYYY-MM-DD, got {start_date!r}"
|
||||
assert end_date and len(end_date) == 10, f"end_date must be YYYY-MM-DD, got {end_date!r}"
|
||||
assert start_date <= end_date, f"start_date {start_date} must be <= end_date {end_date}"
|
||||
|
||||
return _get(session, ARCHIVE_URL, {
|
||||
"latitude": lat,
|
||||
"longitude": lon,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"daily": DAILY_VARIABLES,
|
||||
"wind_speed_unit": "ms",
|
||||
"timezone": "UTC",
|
||||
})
|
||||
|
||||
|
||||
def fetch_recent(
|
||||
session: niquests.Session,
|
||||
lat: float,
|
||||
lon: float,
|
||||
past_days: int = 10,
|
||||
) -> dict:
|
||||
"""Fetch recent weather via Open-Meteo forecast model (fills ERA5 lag window).
|
||||
|
||||
past_days=10 captures the ~5-day ERA5 lag plus buffer for missed daily runs.
|
||||
"""
|
||||
assert 1 <= past_days <= 92, f"past_days must be 1-92, got {past_days}"
|
||||
|
||||
return _get(session, FORECAST_URL, {
|
||||
"latitude": lat,
|
||||
"longitude": lon,
|
||||
"daily": DAILY_VARIABLES,
|
||||
"wind_speed_unit": "ms",
|
||||
"timezone": "UTC",
|
||||
"past_days": past_days,
|
||||
"forecast_days": 1,
|
||||
})
|
||||
212
extract/openmeteo/src/openmeteo/execute.py
Normal file
212
extract/openmeteo/src/openmeteo/execute.py
Normal file
@@ -0,0 +1,212 @@
|
||||
"""Open-Meteo daily weather extraction for coffee-growing regions.
|
||||
|
||||
Two entry points:
|
||||
|
||||
extract_weather()
|
||||
Daily run: fetches the last 10 days for all 12 locations.
|
||||
10 days covers the ~5-day ERA5 reanalysis lag plus buffer for missed runs.
|
||||
Uses the forecast API (fills the recent window not yet in ERA5 archive).
|
||||
12 API calls total. Completes in ~10 seconds.
|
||||
|
||||
extract_weather_backfill()
|
||||
Historical fill: fetches 2020-01-01 → yesterday for all 12 locations.
|
||||
Uses the archive API. One date-range request per location = 12 total calls.
|
||||
Completes in ~30 seconds. No cursor needed.
|
||||
|
||||
Landing path: LANDING_DIR/weather/{location_id}/{year}/{date}.json.gz
|
||||
|
||||
Each file is a flat JSON object with Open-Meteo variable names:
|
||||
{"date": "2020-01-01", "temperature_2m_max": 28.5, "precipitation_sum": 12.5, ...}
|
||||
|
||||
No API key required. No rate limiting. Fully idempotent (file existence check).
|
||||
"""
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import date, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
from extract_core import end_run, landing_path, open_state_db, start_run, write_bytes_atomic
|
||||
|
||||
from openmeteo.api import fetch_archive, fetch_recent
|
||||
from openmeteo.locations import LOCATIONS
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
handlers=[logging.StreamHandler(sys.stdout)],
|
||||
)
|
||||
logger = logging.getLogger("Open-Meteo Extractor")
|
||||
|
||||
LANDING_DIR = Path(os.getenv("LANDING_DIR", "data/landing"))
|
||||
LANDING_SUBDIR = "weather"
|
||||
|
||||
EXTRACTOR_DAILY = "openmeteo_daily"
|
||||
EXTRACTOR_BACKFILL = "openmeteo_backfill"
|
||||
|
||||
BACKFILL_START = date(2020, 1, 1)
|
||||
|
||||
# Small sleep between location calls — polite usage of the free community API.
|
||||
SLEEP_BETWEEN_LOCATIONS_SECONDS = 0.5
|
||||
|
||||
|
||||
# ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _write_day_file(location_id: str, date_str: str, record: dict) -> int:
|
||||
"""Write one day's weather record as gzipped JSON. Returns bytes written, or 0 if skipped."""
|
||||
assert location_id and date_str and record
|
||||
|
||||
year = date_str[:4]
|
||||
dest_dir = landing_path(LANDING_DIR, LANDING_SUBDIR, location_id, year)
|
||||
local_file = dest_dir / f"{date_str}.json.gz"
|
||||
|
||||
if local_file.exists():
|
||||
return 0
|
||||
|
||||
compressed = gzip.compress(json.dumps(record, separators=(",", ":")).encode("utf-8"))
|
||||
bytes_written = write_bytes_atomic(local_file, compressed)
|
||||
logger.debug(f"Stored {local_file} ({bytes_written:,} bytes)")
|
||||
return bytes_written
|
||||
|
||||
|
||||
def _split_and_write(location_id: str, response: dict) -> tuple[int, int, int]:
|
||||
"""Split an Open-Meteo array response into per-day JSON.gz files.
|
||||
|
||||
Open-Meteo returns parallel arrays per variable under response['daily']['time'].
|
||||
We zip these into one flat dict per day and write each as a separate file.
|
||||
|
||||
Returns (files_written, files_skipped, bytes_written).
|
||||
"""
|
||||
daily = response["daily"]
|
||||
dates = daily["time"]
|
||||
variables = [k for k in daily if k != "time"]
|
||||
|
||||
files_written = 0
|
||||
files_skipped = 0
|
||||
bytes_written_total = 0
|
||||
|
||||
for i, date_str in enumerate(dates):
|
||||
if not date_str:
|
||||
continue
|
||||
|
||||
record = {"date": date_str}
|
||||
for var in variables:
|
||||
values = daily[var]
|
||||
record[var] = values[i] if i < len(values) else None
|
||||
|
||||
bw = _write_day_file(location_id, date_str, record)
|
||||
if bw > 0:
|
||||
files_written += 1
|
||||
bytes_written_total += bw
|
||||
else:
|
||||
files_skipped += 1
|
||||
|
||||
return files_written, files_skipped, bytes_written_total
|
||||
|
||||
|
||||
# ── daily extractor ───────────────────────────────────────────────────────────
|
||||
|
||||
def extract_weather() -> None:
|
||||
"""Fetch the last 10 days of weather for all 12 locations.
|
||||
|
||||
Uses Open-Meteo forecast API (past_days=10). 12 API calls. ~10 seconds.
|
||||
"""
|
||||
conn = open_state_db(LANDING_DIR)
|
||||
run_id = start_run(conn, EXTRACTOR_DAILY)
|
||||
files_written = 0
|
||||
files_skipped = 0
|
||||
bytes_written_total = 0
|
||||
|
||||
try:
|
||||
with niquests.Session() as session:
|
||||
for loc in LOCATIONS:
|
||||
logger.info(f"Fetching recent: {loc['id']} ({loc['country']})")
|
||||
response = fetch_recent(session, loc["lat"], loc["lon"], past_days=10)
|
||||
w, s, bw = _split_and_write(loc["id"], response)
|
||||
files_written += w
|
||||
files_skipped += s
|
||||
bytes_written_total += bw
|
||||
time.sleep(SLEEP_BETWEEN_LOCATIONS_SECONDS)
|
||||
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="success",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=date.today().isoformat(),
|
||||
)
|
||||
logger.info(f"Daily weather complete: {files_written} new, {files_skipped} skipped")
|
||||
except Exception as e:
|
||||
end_run(conn, run_id, status="failed", error_message=str(e))
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
# ── backfill extractor ────────────────────────────────────────────────────────
|
||||
|
||||
def extract_weather_backfill() -> None:
|
||||
"""Fetch full weather history (2020-01-01 → yesterday) for all 12 locations.
|
||||
|
||||
Uses Open-Meteo archive API (ERA5 reanalysis). One date-range request per
|
||||
location = 12 calls total. Completes in ~30 seconds.
|
||||
|
||||
Idempotent: per-day files already on disk are skipped when splitting the
|
||||
response. Safe to re-run at any time — will skip what already exists.
|
||||
"""
|
||||
yesterday = (date.today() - timedelta(days=1)).isoformat()
|
||||
start_date = BACKFILL_START.isoformat()
|
||||
|
||||
conn = open_state_db(LANDING_DIR)
|
||||
run_id = start_run(conn, EXTRACTOR_BACKFILL)
|
||||
files_written = 0
|
||||
files_skipped = 0
|
||||
bytes_written_total = 0
|
||||
|
||||
try:
|
||||
with niquests.Session() as session:
|
||||
for loc in LOCATIONS:
|
||||
logger.info(
|
||||
f"Backfill {loc['id']} ({loc['country']}) "
|
||||
f"{start_date} → {yesterday}"
|
||||
)
|
||||
response = fetch_archive(
|
||||
session, loc["lat"], loc["lon"],
|
||||
start_date=start_date,
|
||||
end_date=yesterday,
|
||||
)
|
||||
w, s, bw = _split_and_write(loc["id"], response)
|
||||
files_written += w
|
||||
files_skipped += s
|
||||
bytes_written_total += bw
|
||||
logger.info(f" {loc['id']}: {w} new, {s} already existed")
|
||||
time.sleep(SLEEP_BETWEEN_LOCATIONS_SECONDS)
|
||||
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="success",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=yesterday,
|
||||
)
|
||||
logger.info(
|
||||
f"Backfill complete: {files_written} new files, "
|
||||
f"{files_skipped} already existed"
|
||||
)
|
||||
except Exception as e:
|
||||
end_run(conn, run_id, status="failed", error_message=str(e))
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
extract_weather()
|
||||
@@ -1,20 +0,0 @@
|
||||
[project]
|
||||
name = "openweathermap"
|
||||
version = "0.1.0"
|
||||
description = "OpenWeatherMap daily weather extractor for coffee-growing regions"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"extract_core",
|
||||
"niquests>=3.14.1",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
extract_weather = "openweathermap.execute:extract_weather"
|
||||
extract_weather_backfill = "openweathermap.execute:extract_weather_backfill"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/openweathermap"]
|
||||
@@ -1,76 +0,0 @@
|
||||
"""Thin client for the OpenWeatherMap One Call API 3.0 — Day Summary endpoint.
|
||||
|
||||
Endpoint: GET https://api.openweathermap.org/data/3.0/onecall/day_summary
|
||||
Docs: https://openweathermap.org/api/one-call-3#history_daily_aggregation
|
||||
|
||||
Returns one JSON object per (lat, lon, date) with daily aggregates:
|
||||
temperature.{min,max,morning,afternoon,evening,night}
|
||||
precipitation.total
|
||||
humidity.afternoon
|
||||
cloud_cover.afternoon
|
||||
wind.max.{speed,direction}
|
||||
pressure.afternoon
|
||||
|
||||
This module contains only the HTTP call and basic response validation.
|
||||
All business logic (file storage, rate limiting, cursor tracking) lives in execute.py.
|
||||
"""
|
||||
|
||||
import niquests
|
||||
|
||||
OWM_BASE_URL = "https://api.openweathermap.org/data/3.0/onecall/day_summary"
|
||||
HTTP_TIMEOUT_SECONDS = 30
|
||||
MAX_RESPONSE_BYTES = 10_000 # Day summary is ~500 bytes; 10 KB is a generous bound
|
||||
|
||||
|
||||
class RateLimitError(Exception):
|
||||
"""Raised when OWM returns HTTP 429 (rate limit exceeded)."""
|
||||
|
||||
|
||||
def fetch_day_summary(
|
||||
session: niquests.Session,
|
||||
lat: float,
|
||||
lon: float,
|
||||
date_str: str,
|
||||
api_key: str,
|
||||
) -> dict:
|
||||
"""Fetch the OWM One Call 3.0 day summary for a single (lat, lon, date).
|
||||
|
||||
date_str must be YYYY-MM-DD format.
|
||||
Returns the parsed JSON dict on success.
|
||||
|
||||
Raises RateLimitError on HTTP 429 — caller is responsible for sleeping and retrying.
|
||||
Raises AssertionError on any other non-200 status.
|
||||
"""
|
||||
assert api_key, "api_key must not be empty"
|
||||
assert date_str and len(date_str) == 10, f"date_str must be YYYY-MM-DD, got {date_str!r}"
|
||||
assert -90.0 <= lat <= 90.0, f"lat out of range: {lat}"
|
||||
assert -180.0 <= lon <= 180.0, f"lon out of range: {lon}"
|
||||
|
||||
response = session.get(
|
||||
OWM_BASE_URL,
|
||||
params={
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"date": date_str,
|
||||
"appid": api_key,
|
||||
"units": "metric",
|
||||
},
|
||||
timeout=HTTP_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
if response.status_code == 429:
|
||||
raise RateLimitError(f"OWM rate limit hit for lat={lat} lon={lon} date={date_str}")
|
||||
|
||||
assert response.status_code == 200, (
|
||||
f"OWM API returned HTTP {response.status_code} for "
|
||||
f"lat={lat} lon={lon} date={date_str}: {response.text[:200]}"
|
||||
)
|
||||
assert len(response.content) <= MAX_RESPONSE_BYTES, (
|
||||
f"OWM response unexpectedly large ({len(response.content)} bytes) for {date_str}"
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
assert isinstance(data, dict), f"Expected dict response, got {type(data)}"
|
||||
assert "date" in data, f"OWM response missing 'date' field: {list(data.keys())}"
|
||||
|
||||
return data
|
||||
@@ -1,330 +0,0 @@
|
||||
"""OpenWeatherMap daily weather extraction for coffee-growing regions.
|
||||
|
||||
Two entry points:
|
||||
|
||||
extract_weather()
|
||||
Daily run: fetches yesterday + today for all 8 locations (16 calls max).
|
||||
Yesterday is included to cover the midnight edge case — if the daily job
|
||||
fires just after midnight UTC, today's OWM data may still be partial.
|
||||
Idempotent: skips if the landing file already exists.
|
||||
|
||||
extract_weather_backfill()
|
||||
Historical fill: iterates (date, location) pairs from 2020-01-01 to
|
||||
yesterday. Bounded to MAX_CALLS_PER_BACKFILL_RUN per run; re-run daily
|
||||
to advance. Resumes from cursor on restart.
|
||||
|
||||
Landing path: LANDING_DIR/weather/{location_id}/{year}/{date}.json.gz
|
||||
|
||||
Idempotency: file existence check. Past weather is immutable — (location_id, date)
|
||||
uniquely identifies a file that never changes once written.
|
||||
|
||||
Backfill cursor format: '{location_id}:{date}' (e.g. 'brazil_parana:2022-07-15').
|
||||
Encodes both dimensions so a mid-run crash resumes at the exact (location, date) pair.
|
||||
"""
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import date, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
from extract_core import end_run, get_last_cursor, landing_path, open_state_db, start_run, write_bytes_atomic
|
||||
|
||||
from openweathermap.api import RateLimitError, fetch_day_summary
|
||||
from openweathermap.locations import LOCATIONS
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
handlers=[logging.StreamHandler(sys.stdout)],
|
||||
)
|
||||
logger = logging.getLogger("OWM Weather Extractor")
|
||||
|
||||
LANDING_DIR = Path(os.getenv("LANDING_DIR", "data/landing"))
|
||||
LANDING_SUBDIR = "weather"
|
||||
|
||||
EXTRACTOR_DAILY = "owm_weather_daily"
|
||||
EXTRACTOR_BACKFILL = "owm_weather_backfill"
|
||||
|
||||
# Rate limiting: OWM free tier = 1000 calls/day (~0.7/s).
|
||||
# 1.5s between calls stays comfortably below the limit for the daily run.
|
||||
# 2.0s for backfill (more conservative, many sequential calls).
|
||||
SLEEP_BETWEEN_CALLS_SECONDS = 1.5
|
||||
SLEEP_BETWEEN_BACKFILL_CALLS_SECONDS = 2.0
|
||||
|
||||
# On 429: wait 60s, then one retry. If still 429, abort the run.
|
||||
SLEEP_ON_RATE_LIMIT_SECONDS = 60
|
||||
MAX_RATE_LIMIT_RETRIES = 1
|
||||
|
||||
# Cap backfill at 500 calls per run (~17 min at 2s/call).
|
||||
# 5-year backfill = 14,600 calls → ~30 runs. Re-run daily until complete.
|
||||
MAX_CALLS_PER_BACKFILL_RUN = 500
|
||||
|
||||
|
||||
# ── helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def _write_weather_file(location_id: str, date_str: str, payload: dict) -> int:
|
||||
"""Gzip-compress payload JSON and write atomically to the landing zone.
|
||||
|
||||
Returns bytes_written, or 0 if the file already exists (idempotent skip).
|
||||
Path: LANDING_DIR/weather/{location_id}/{year}/{date}.json.gz
|
||||
"""
|
||||
assert location_id, "location_id must not be empty"
|
||||
assert date_str and len(date_str) == 10, f"date_str must be YYYY-MM-DD, got {date_str!r}"
|
||||
assert isinstance(payload, dict) and payload, "payload must be a non-empty dict"
|
||||
|
||||
year = date_str[:4]
|
||||
dest_dir = landing_path(LANDING_DIR, LANDING_SUBDIR, location_id, year)
|
||||
local_file = dest_dir / f"{date_str}.json.gz"
|
||||
|
||||
if local_file.exists():
|
||||
logger.debug(f"Already exists, skipping: {local_file}")
|
||||
return 0
|
||||
|
||||
compressed = gzip.compress(json.dumps(payload, separators=(",", ":")).encode("utf-8"))
|
||||
bytes_written = write_bytes_atomic(local_file, compressed)
|
||||
logger.info(f"Stored {local_file} ({bytes_written:,} bytes)")
|
||||
return bytes_written
|
||||
|
||||
|
||||
def _fetch_with_retry(session: niquests.Session, loc: dict, date_str: str, api_key: str) -> dict | None:
|
||||
"""Fetch OWM day summary with one 429-retry.
|
||||
|
||||
Returns the JSON dict on success, or None if rate limit persists after retry.
|
||||
"""
|
||||
for attempt in range(MAX_RATE_LIMIT_RETRIES + 1):
|
||||
try:
|
||||
return fetch_day_summary(session, loc["lat"], loc["lon"], date_str, api_key)
|
||||
except RateLimitError:
|
||||
if attempt < MAX_RATE_LIMIT_RETRIES:
|
||||
logger.warning(
|
||||
f"Rate limit hit for {loc['id']} {date_str} — "
|
||||
f"sleeping {SLEEP_ON_RATE_LIMIT_SECONDS}s before retry"
|
||||
)
|
||||
time.sleep(SLEEP_ON_RATE_LIMIT_SECONDS)
|
||||
else:
|
||||
logger.error(f"Rate limit persisted after retry for {loc['id']} {date_str}")
|
||||
return None
|
||||
return None # unreachable; satisfies type checker
|
||||
|
||||
|
||||
def _file_exists(location_id: str, date_str: str) -> bool:
|
||||
year = date_str[:4]
|
||||
return (LANDING_DIR / LANDING_SUBDIR / location_id / year / f"{date_str}.json.gz").exists()
|
||||
|
||||
|
||||
# ── daily extractor ───────────────────────────────────────────────────────────
|
||||
|
||||
def extract_weather() -> None:
|
||||
"""Fetch yesterday + today weather for all 8 coffee-growing locations.
|
||||
|
||||
Up to 16 API calls. Both days are skipped if files already exist,
|
||||
so re-running costs zero API calls (fully idempotent).
|
||||
"""
|
||||
api_key = os.environ.get("OPENWEATHERMAP_API_KEY", "")
|
||||
assert api_key, "OPENWEATHERMAP_API_KEY environment variable must be set"
|
||||
|
||||
today = date.today()
|
||||
yesterday = today - timedelta(days=1)
|
||||
dates_to_fetch = [yesterday.isoformat(), today.isoformat()]
|
||||
|
||||
conn = open_state_db(LANDING_DIR)
|
||||
run_id = start_run(conn, EXTRACTOR_DAILY)
|
||||
files_written = 0
|
||||
files_skipped = 0
|
||||
bytes_written_total = 0
|
||||
|
||||
try:
|
||||
with niquests.Session() as session:
|
||||
for date_str in dates_to_fetch:
|
||||
for loc in LOCATIONS:
|
||||
if _file_exists(loc["id"], date_str):
|
||||
logger.info(f"Already exists: {loc['id']} {date_str}")
|
||||
files_skipped += 1
|
||||
continue
|
||||
|
||||
data = _fetch_with_retry(session, loc, date_str, api_key)
|
||||
if data is None:
|
||||
logger.error(f"Skipping {loc['id']} {date_str} after persistent rate limit")
|
||||
continue
|
||||
|
||||
bw = _write_weather_file(loc["id"], date_str, data)
|
||||
if bw > 0:
|
||||
files_written += 1
|
||||
bytes_written_total += bw
|
||||
else:
|
||||
files_skipped += 1
|
||||
|
||||
time.sleep(SLEEP_BETWEEN_CALLS_SECONDS)
|
||||
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="success",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=today.isoformat(),
|
||||
)
|
||||
logger.info(f"Daily weather complete: {files_written} new, {files_skipped} skipped")
|
||||
except Exception as e:
|
||||
end_run(conn, run_id, status="failed", error_message=str(e))
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
# ── backfill extractor ────────────────────────────────────────────────────────
|
||||
|
||||
def extract_weather_backfill() -> None:
|
||||
"""Fill historical weather data from 2020-01-01 to yesterday.
|
||||
|
||||
Iterates (date, location) pairs in date-ascending, LOCATIONS-list order.
|
||||
Bounded to MAX_CALLS_PER_BACKFILL_RUN per run — re-run daily to advance.
|
||||
|
||||
Cursor format: '{location_id}:{date}' (e.g. 'brazil_parana:2022-07-15').
|
||||
Encodes both dimensions: on resume, all pairs at or before the cursor are
|
||||
skipped (via cursor comparison first, then file-existence check).
|
||||
|
||||
5-year backfill (2020–2025) = 14,600 calls. At 500/run = ~30 runs.
|
||||
|
||||
429 handling: sleep 60s, one retry. If still 429, save cursor and exit
|
||||
with status='failed' so the cursor does not advance beyond the last
|
||||
successfully written pair. Safe to re-run the next day.
|
||||
"""
|
||||
api_key = os.environ.get("OPENWEATHERMAP_API_KEY", "")
|
||||
assert api_key, "OPENWEATHERMAP_API_KEY environment variable must be set"
|
||||
|
||||
start = date(2020, 1, 1)
|
||||
end = date.today() - timedelta(days=1) # never fetch today in backfill
|
||||
|
||||
conn = open_state_db(LANDING_DIR)
|
||||
run_id = start_run(conn, EXTRACTOR_BACKFILL)
|
||||
files_written = 0
|
||||
files_skipped = 0
|
||||
bytes_written_total = 0
|
||||
calls_made = 0
|
||||
last_cursor: str | None = None
|
||||
|
||||
# Load resume cursor from last successful run
|
||||
resume_cursor = get_last_cursor(conn, EXTRACTOR_BACKFILL)
|
||||
if resume_cursor:
|
||||
logger.info(f"Resuming backfill from cursor: {resume_cursor}")
|
||||
else:
|
||||
logger.info(f"Starting fresh backfill from {start.isoformat()}")
|
||||
|
||||
# Parse cursor into (location_id, date_str) for skip comparison
|
||||
resume_location_id: str | None = None
|
||||
resume_date_str: str | None = None
|
||||
if resume_cursor and ":" in resume_cursor:
|
||||
resume_location_id, resume_date_str = resume_cursor.split(":", 1)
|
||||
|
||||
location_ids = [loc["id"] for loc in LOCATIONS]
|
||||
resume_loc_idx = -1
|
||||
if resume_location_id and resume_location_id in location_ids:
|
||||
resume_loc_idx = location_ids.index(resume_location_id)
|
||||
|
||||
try:
|
||||
with niquests.Session() as session:
|
||||
current = start
|
||||
while current <= end:
|
||||
date_str = current.isoformat()
|
||||
|
||||
for loc in LOCATIONS:
|
||||
loc_idx = location_ids.index(loc["id"])
|
||||
|
||||
# Cursor-based skip: (date, loc_idx) <= (resume_date, resume_loc_idx)
|
||||
# This skips everything already processed in previous runs.
|
||||
if resume_date_str:
|
||||
if date_str < resume_date_str:
|
||||
files_skipped += 1
|
||||
continue
|
||||
if date_str == resume_date_str and loc_idx <= resume_loc_idx:
|
||||
files_skipped += 1
|
||||
continue
|
||||
|
||||
# File-existence check: idempotency guard for files already on disk
|
||||
# (e.g. written by the daily extractor, or a previous partial run)
|
||||
if _file_exists(loc["id"], date_str):
|
||||
files_skipped += 1
|
||||
last_cursor = f"{loc['id']}:{date_str}"
|
||||
continue
|
||||
|
||||
# Per-run call cap
|
||||
if calls_made >= MAX_CALLS_PER_BACKFILL_RUN:
|
||||
logger.info(
|
||||
f"Reached cap of {MAX_CALLS_PER_BACKFILL_RUN} calls. "
|
||||
f"Re-run to continue from {last_cursor or resume_cursor}"
|
||||
)
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="success",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=last_cursor or resume_cursor,
|
||||
)
|
||||
return
|
||||
|
||||
data = _fetch_with_retry(session, loc, date_str, api_key)
|
||||
calls_made += 1
|
||||
|
||||
if data is None:
|
||||
logger.warning(f"Persistent rate limit at {loc['id']} {date_str} — stopping run")
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="failed",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=last_cursor or resume_cursor,
|
||||
error_message="Persistent rate limit — resume from cursor",
|
||||
)
|
||||
return
|
||||
|
||||
bw = _write_weather_file(loc["id"], date_str, data)
|
||||
if bw > 0:
|
||||
files_written += 1
|
||||
bytes_written_total += bw
|
||||
else:
|
||||
files_skipped += 1
|
||||
|
||||
last_cursor = f"{loc['id']}:{date_str}"
|
||||
time.sleep(SLEEP_BETWEEN_BACKFILL_CALLS_SECONDS)
|
||||
|
||||
current += timedelta(days=1)
|
||||
|
||||
final_cursor = last_cursor or resume_cursor or end.isoformat()
|
||||
logger.info(
|
||||
f"Backfill complete: {files_written} written, "
|
||||
f"{files_skipped} skipped, {calls_made} API calls"
|
||||
)
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="success",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=final_cursor,
|
||||
)
|
||||
except Exception as e:
|
||||
end_run(
|
||||
conn, run_id,
|
||||
status="failed",
|
||||
files_written=files_written,
|
||||
files_skipped=files_skipped,
|
||||
bytes_written=bytes_written_total,
|
||||
cursor_value=last_cursor or resume_cursor,
|
||||
error_message=str(e),
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
extract_weather()
|
||||
Reference in New Issue
Block a user