Compare commits

...

5 Commits

Author SHA1 Message Date
Deeman
b2ffad055b fix(supervisor): use file path for export_serving (not -m module syntax)
All checks were successful
CI / test (push) Successful in 1m0s
CI / tag (push) Successful in 3s
-m padelnomics.export_serving resolves to web package, not src/padelnomics.
src/padelnomics is not a uv workspace member so it's not importable by name.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-07 18:08:46 +01:00
Deeman
544891611f feat(transform): opportunity score v4 — market validation + population-weighted aggregation
All checks were successful
CI / test (push) Successful in 57s
CI / tag (push) Successful in 2s
Two targeted fixes for inflated country scores (ES 83, SE 77):

1. pseo_country_overview: replace AVG() with population-weighted averages
   for avg_opportunity_score and avg_market_score. Madrid/Barcelona now
   dominate Spain's average instead of hundreds of 30K-town white-space
   towns. Expected ES drop from ~83 to ~55-65.

2. location_profiles: replace dead sports culture component (10 pts,
   tennis data all zeros) with market validation signal.
   Split scored CTE into: market_scored → country_market → scored.
   country_market aggregates AVG(market_score) per country from cities
   with padel courts (market_score > 0), so zero-court locations don't
   dilute the signal. ES (~60/100) → ~6 pts. SE (~35/100) → ~3.5 pts.
   NULL → 0.5 neutral → 5 pts (untested market, not penalised).

Score budget unchanged: 25+20+30+15+10 = 100 pts.
No new models, no new data sources, no cycles.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-07 17:23:11 +01:00
Deeman
b071199895 fix(docker): copy content/ directory into image
All checks were successful
CI / test (push) Successful in 54s
CI / tag (push) Successful in 2s
content/articles/ holds the cornerstone .md source files which
_sync_static_articles() reads on every /admin/articles load.
Without this COPY they were absent from the container.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-07 15:03:44 +01:00
Deeman
af536f22ea refactor: introduce REPO_ROOT in core.py, replace all CWD-relative paths
All checks were successful
CI / test (push) Successful in 56s
CI / tag (push) Successful in 2s
2026-03-07 14:52:38 +01:00
Deeman
c320bef83e refactor: introduce REPO_ROOT in core.py, replace all CWD-relative paths
Defines REPO_ROOT = Path(__file__).parents[3] once in core.py.
Replaces Path(__file__).parent.parent...parent chains and Path("data/...")
CWD-relative references in admin/routes.py, content/__init__.py,
content/routes.py, and worker.py (4x local repo_root variables).

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-07 14:51:34 +01:00
9 changed files with 61 additions and 29 deletions

View File

@@ -26,6 +26,7 @@ RUN mkdir -p /app/data && chown -R appuser:appuser /app
COPY --from=build --chown=appuser:appuser /app .
COPY --from=css-build /app/web/src/padelnomics/static/css/output.css ./web/src/padelnomics/static/css/output.css
COPY --chown=appuser:appuser infra/supervisor/workflows.toml ./infra/supervisor/workflows.toml
COPY --chown=appuser:appuser content/ ./content/
USER appuser
ENV PYTHONUNBUFFERED=1
ENV DATABASE_PATH=/app/data/app.db

View File

@@ -42,7 +42,7 @@ do
# The web app detects the inode change on next query — no restart needed.
DUCKDB_PATH="${DUCKDB_PATH:-/data/padelnomics/lakehouse.duckdb}" \
SERVING_DUCKDB_PATH="${SERVING_DUCKDB_PATH:-/data/padelnomics/analytics.duckdb}" \
uv run python -m padelnomics.export_serving
uv run python src/padelnomics/export_serving.py
) || {
if [ -n "${ALERT_WEBHOOK_URL:-}" ]; then

View File

@@ -16,7 +16,7 @@
-- 10 pts economic context — income PPS normalised to 200 ceiling
-- 10 pts data quality — completeness discount
--
-- Padelnomics Opportunity Score (Marktpotenzial-Score v3, 0100):
-- Padelnomics Opportunity Score (Marktpotenzial-Score v4, 0100):
-- "Where should I build a padel court?"
-- Computed for ALL locations — zero-court locations score highest on supply gap.
-- H3 catchment methodology: addressable market and supply gap use a regional
@@ -26,7 +26,9 @@
-- 20 pts economic power — income PPS, normalised to 35,000
-- 30 pts supply gap — inverted catchment venue density; 0 courts = full marks
-- 15 pts catchment gap — distance to nearest padel court
-- 10 pts sports culture — tennis courts within 25km
-- 10 pts market validation — country-level avg market maturity (from market_scored CTE).
-- Replaces sports culture proxy (v3: tennis data was all zeros).
-- ES (~60/100) → ~6 pts, SE (~35/100) → ~3.5 pts, unknown → 5 pts.
--
-- Consumers query directly with WHERE filters:
-- cities API: WHERE country_slug = ? AND city_slug IS NOT NULL
@@ -130,8 +132,8 @@ with_pricing AS (
LEFT JOIN catchment ct
ON b.geoname_id = ct.geoname_id
),
-- Both scores computed from the enriched base
scored AS (
-- Step 1: market score only — needed first so we can aggregate country averages.
market_scored AS (
SELECT *,
-- City-level venue density (from dim_cities exact count, not dim_locations spatial 5km)
CASE WHEN population > 0
@@ -180,8 +182,24 @@ scored AS (
END
, 1)
ELSE 0
END AS market_score,
-- ── Opportunity Score (Marktpotenzial-Score v3, H3 catchment) ──────────
END AS market_score
FROM with_pricing
),
-- Step 2: country-level avg market maturity — used as market validation signal (10 pts).
-- Filter to market_score > 0 (cities with padel courts only) so zero-court locations
-- don't dilute the country signal. ES proven demand → ~60, SE struggling → ~35.
country_market AS (
SELECT
country_code,
ROUND(AVG(market_score), 1) AS country_avg_market_score
FROM market_scored
WHERE market_score > 0
GROUP BY country_code
),
-- Step 3: add opportunity_score using country market validation signal.
scored AS (
SELECT ms.*,
-- ── Opportunity Score (Marktpotenzial-Score v4, H3 catchment) ──────────
ROUND(
-- Addressable market (25 pts): log-scaled catchment population, ceiling 500K
25.0 * LEAST(1.0, LN(GREATEST(catchment_population, 1)) / LN(500000))
@@ -195,10 +213,14 @@ scored AS (
END, 0.0) / 8.0)
-- Catchment gap (15 pts): distance to nearest court
+ 15.0 * COALESCE(LEAST(1.0, nearest_padel_court_km / 30.0), 0.5)
-- Sports culture (10 pts): tennis courts within 25km
+ 10.0 * LEAST(1.0, tennis_courts_within_25km / 10.0)
-- Market validation (10 pts): country-level avg market maturity.
-- Replaces sports culture (v3 tennis data was all zeros = dead code).
-- ES (~60/100): proven demand → ~6 pts. SE (~35/100): struggling → ~3.5 pts.
-- NULL (no courts in country yet): 0.5 neutral → 5 pts (untested, not penalised).
+ 10.0 * COALESCE(cm.country_avg_market_score / 100.0, 0.5)
, 1) AS opportunity_score
FROM with_pricing
FROM market_scored ms
LEFT JOIN country_market cm ON ms.country_code = cm.country_code
)
SELECT
s.geoname_id,

View File

@@ -18,13 +18,14 @@ SELECT
country_slug,
COUNT(*) AS city_count,
SUM(padel_venue_count) AS total_venues,
ROUND(AVG(market_score), 1) AS avg_market_score,
-- Population-weighted: large cities (Madrid, Barcelona) dominate, not hundreds of small towns
ROUND(SUM(market_score * population) / NULLIF(SUM(population), 0), 1) AS avg_market_score,
MAX(market_score) AS top_city_market_score,
-- Top 5 cities by venue count (prominence), then score for internal linking
LIST(city_slug ORDER BY padel_venue_count DESC, market_score DESC NULLS LAST)[1:5] AS top_city_slugs,
LIST(city_name ORDER BY padel_venue_count DESC, market_score DESC NULLS LAST)[1:5] AS top_city_names,
-- Opportunity score aggregates (NULL-safe: cities without geoname_id match excluded from AVG)
ROUND(AVG(opportunity_score), 1) AS avg_opportunity_score,
-- Opportunity score aggregates (population-weighted: saturated megacities dominate, not hundreds of small towns)
ROUND(SUM(opportunity_score * population) / NULLIF(SUM(population), 0), 1) AS avg_opportunity_score,
MAX(opportunity_score) AS top_opportunity_score,
-- Top 5 opportunity cities by population (prominence), then opportunity score
LIST(city_slug ORDER BY population DESC, opportunity_score DESC NULLS LAST)[1:5] AS top_opportunity_slugs,

View File

@@ -27,6 +27,7 @@ from quart import (
from ..auth.routes import role_required
from ..core import (
EMAIL_ADDRESSES,
REPO_ROOT,
config,
count_where,
csrf_protect,
@@ -2182,7 +2183,7 @@ async def scenario_pdf(scenario_id: int):
# Article Management
# =============================================================================
_ARTICLES_DIR = Path(__file__).parent.parent.parent.parent.parent / "data" / "content" / "articles"
_ARTICLES_DIR = REPO_ROOT / "content" / "articles"
_FRONTMATTER_RE = re.compile(r"^---\s*\n(.*?)\n---\s*\n", re.DOTALL)
@@ -2792,7 +2793,7 @@ async def article_new():
(build_dir / f"{article_slug}.html").write_text(body_html)
# Save markdown source
md_dir = Path("data/content/articles")
md_dir = REPO_ROOT / "content" / "articles"
md_dir.mkdir(parents=True, exist_ok=True)
(md_dir / f"{article_slug}.md").write_text(body)
@@ -2863,7 +2864,7 @@ async def article_edit(article_id: int):
build_dir.mkdir(parents=True, exist_ok=True)
(build_dir / f"{article['slug']}.html").write_text(body_html)
md_dir = Path("data/content/articles")
md_dir = REPO_ROOT / "content" / "articles"
md_dir.mkdir(parents=True, exist_ok=True)
(md_dir / f"{article['slug']}.md").write_text(body)
@@ -3054,7 +3055,7 @@ async def _rebuild_article(article_id: int):
)
else:
# Manual article: re-render from markdown file
md_path = Path("data/content/articles") / f"{article['slug']}.md"
md_path = REPO_ROOT / "content" / "articles" / f"{article['slug']}.md"
if not md_path.exists():
return
raw = md_path.read_text()

View File

@@ -17,14 +17,14 @@ import yaml
from jinja2 import ChainableUndefined, Environment
from ..analytics import fetch_analytics
from ..core import slugify, transaction, utcnow_iso
from ..core import REPO_ROOT, slugify, transaction, utcnow_iso
logger = logging.getLogger(__name__)
# ── Constants ────────────────────────────────────────────────────────────────
TEMPLATES_DIR = Path(__file__).parent / "templates"
BUILD_DIR = Path("data/content/_build")
BUILD_DIR = REPO_ROOT / "data" / "content" / "_build"
# Threshold functions per template slug.
# Return True → article should be noindex (insufficient data for quality content).

View File

@@ -9,7 +9,14 @@ from jinja2 import Environment, FileSystemLoader
from markupsafe import Markup
from quart import Blueprint, abort, g, redirect, render_template, request
from ..core import capture_waitlist_email, csrf_protect, feature_gate, fetch_all, fetch_one
from ..core import (
REPO_ROOT,
capture_waitlist_email,
csrf_protect,
feature_gate,
fetch_all,
fetch_one,
)
from ..i18n import get_translations
bp = Blueprint(
@@ -18,7 +25,7 @@ bp = Blueprint(
template_folder=str(Path(__file__).parent / "templates"),
)
BUILD_DIR = Path("data/content/_build")
BUILD_DIR = REPO_ROOT / "data" / "content" / "_build"
RESERVED_PREFIXES = (
"/admin", "/auth", "/planner", "/billing", "/dashboard",

View File

@@ -27,6 +27,9 @@ from quart import g, make_response, render_template, request, session # noqa: E
load_dotenv()
# Repo root: web/src/padelnomics/core.py → 4 levels up
REPO_ROOT = Path(__file__).parents[3]
def _env(key: str, default: str) -> str:
"""Get env var, treating empty string same as unset."""

View File

@@ -11,6 +11,7 @@ from datetime import datetime, timedelta
from .core import (
EMAIL_ADDRESSES,
REPO_ROOT,
config,
execute,
fetch_all,
@@ -710,9 +711,8 @@ async def handle_run_extraction(payload: dict) -> None:
If absent, runs all extractors via the umbrella `extract` entry point.
"""
import subprocess
from pathlib import Path
repo_root = Path(__file__).resolve().parents[4]
repo_root = REPO_ROOT
extractor = payload.get("extractor", "").strip()
if extractor:
cmd_name = f"extract-{extractor.replace('_', '-')}"
@@ -743,9 +743,8 @@ async def handle_run_transform(payload: dict) -> None:
2-hour absolute timeout — same as extraction.
"""
import subprocess
from pathlib import Path
repo_root = Path(__file__).resolve().parents[4]
repo_root = REPO_ROOT
result = await asyncio.to_thread(
subprocess.run,
["uv", "run", "sqlmesh", "-p", "transform/sqlmesh_padelnomics", "plan", "prod", "--auto-apply"],
@@ -769,9 +768,8 @@ async def handle_run_export(payload: dict) -> None:
10-minute absolute timeout.
"""
import subprocess
from pathlib import Path
repo_root = Path(__file__).resolve().parents[4]
repo_root = REPO_ROOT
result = await asyncio.to_thread(
subprocess.run,
["uv", "run", "python", "src/padelnomics/export_serving.py"],
@@ -791,9 +789,8 @@ async def handle_run_export(payload: dict) -> None:
async def handle_run_pipeline(payload: dict) -> None:
"""Run full ELT pipeline: extract → transform → export, stopping on first failure."""
import subprocess
from pathlib import Path
repo_root = Path(__file__).resolve().parents[4]
repo_root = REPO_ROOT
steps = [
(