From 3d99b8c3752a3806122ba097bf9a04946698282f Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 09:53:58 +0100 Subject: [PATCH 01/98] fix(cms): change articles unique constraint to (url_path, language) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit url_path UNIQUE prevented multilingual generation — the second language (e.g. EN after DE) always failed with UNIQUE constraint, leaving tasks in a retry loop and only the first 1-2 articles visible. Migration 0020 recreates the articles table with UNIQUE(url_path, language) and adds a composite index. Adds idx_articles_url_lang for the new lookup pattern used by article_page and generate_articles upsert. Also adds search/country/venue_type filters to the admin Scenarios tab and clarifies what "Published Scenarios" means in the subtitle. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 41 +++++++++- .../admin/templates/admin/scenarios.html | 47 +++++++++-- .../0020_articles_unique_url_language.py | 81 +++++++++++++++++++ 3 files changed, 160 insertions(+), 9 deletions(-) create mode 100644 web/src/padelnomics/migrations/versions/0020_articles_unique_url_language.py diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 89336cc..1657fe7 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1395,11 +1395,46 @@ SCENARIO_FORM_FIELDS = [ @bp.route("/scenarios") @role_required("admin") async def scenarios(): - """List published scenarios.""" + """List published scenarios with optional filters.""" + search = request.args.get("search", "").strip() + country_filter = request.args.get("country", "") + venue_filter = request.args.get("venue_type", "") + + wheres = ["1=1"] + params: list = [] + if search: + wheres.append("(title LIKE ? OR location LIKE ? OR slug LIKE ?)") + params.extend([f"%{search}%", f"%{search}%", f"%{search}%"]) + if country_filter: + wheres.append("country = ?") + params.append(country_filter) + if venue_filter: + wheres.append("venue_type = ?") + params.append(venue_filter) + + where = " AND ".join(wheres) scenario_list = await fetch_all( - "SELECT * FROM published_scenarios ORDER BY created_at DESC" + f"SELECT * FROM published_scenarios WHERE {where} ORDER BY created_at DESC", + tuple(params), + ) + countries = await fetch_all( + "SELECT DISTINCT country FROM published_scenarios WHERE country != '' ORDER BY country" + ) + venue_types = await fetch_all( + "SELECT DISTINCT venue_type FROM published_scenarios WHERE venue_type != '' ORDER BY venue_type" + ) + total = await fetch_one("SELECT COUNT(*) as cnt FROM published_scenarios") + + return await render_template( + "admin/scenarios.html", + scenarios=scenario_list, + countries=[r["country"] for r in countries], + venue_types=[r["venue_type"] for r in venue_types], + total=total["cnt"] if total else 0, + current_search=search, + current_country=country_filter, + current_venue_type=venue_filter, ) - return await render_template("admin/scenarios.html", scenarios=scenario_list) @bp.route("/scenarios/new", methods=["GET", "POST"]) diff --git a/web/src/padelnomics/admin/templates/admin/scenarios.html b/web/src/padelnomics/admin/templates/admin/scenarios.html index 2ccb481..c32270e 100644 --- a/web/src/padelnomics/admin/templates/admin/scenarios.html +++ b/web/src/padelnomics/admin/templates/admin/scenarios.html @@ -1,20 +1,55 @@ {% extends "admin/base_admin.html" %} {% set admin_page = "scenarios" %} -{% block title %}Published Scenarios - Admin - {{ config.APP_NAME }}{% endblock %} +{% block title %}Scenarios - Admin - {{ config.APP_NAME }}{% endblock %} {% block admin_content %} -
+
-

Published Scenarios

-

{{ scenarios | length }} scenario{{ 's' if scenarios | length != 1 }}

+

Scenarios

+

+ Pre-computed calculator outputs — embedded as cards in articles and PDFs. + Showing {{ scenarios | length }} of {{ total }}. +

+
+
+ + +
+
+ + +
+
+ + +
+
+ + {% if current_search or current_country or current_venue_type %} + Clear + {% endif %} +
+
+
{% if scenarios %} @@ -51,7 +86,7 @@
{% else %} -

No published scenarios yet.

+

No scenarios match the current filters.

{% endif %}
{% endblock %} diff --git a/web/src/padelnomics/migrations/versions/0020_articles_unique_url_language.py b/web/src/padelnomics/migrations/versions/0020_articles_unique_url_language.py new file mode 100644 index 0000000..0cda8d6 --- /dev/null +++ b/web/src/padelnomics/migrations/versions/0020_articles_unique_url_language.py @@ -0,0 +1,81 @@ +"""Change articles unique constraint from url_path alone to (url_path, language). + +Previously url_path was declared UNIQUE, which prevented multiple languages +from sharing the same url_path (e.g. /markets/germany/berlin for both de and en). +""" + + +def up(conn) -> None: + # ── 1. Drop FTS triggers + virtual table ────────────────────────────────── + conn.execute("DROP TRIGGER IF EXISTS articles_ai") + conn.execute("DROP TRIGGER IF EXISTS articles_ad") + conn.execute("DROP TRIGGER IF EXISTS articles_au") + conn.execute("DROP TABLE IF EXISTS articles_fts") + + # ── 2. Recreate articles with UNIQUE(url_path, language) ────────────────── + conn.execute(""" + CREATE TABLE articles_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + url_path TEXT NOT NULL, + slug TEXT UNIQUE NOT NULL, + title TEXT NOT NULL, + meta_description TEXT, + country TEXT, + region TEXT, + og_image_url TEXT, + status TEXT NOT NULL DEFAULT 'draft', + published_at TEXT, + template_slug TEXT, + language TEXT NOT NULL DEFAULT 'en', + date_modified TEXT, + seo_head TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT, + UNIQUE(url_path, language) + ) + """) + conn.execute(""" + INSERT INTO articles_new + (id, url_path, slug, title, meta_description, country, region, + og_image_url, status, published_at, template_slug, language, + date_modified, seo_head, created_at, updated_at) + SELECT id, url_path, slug, title, meta_description, country, region, + og_image_url, status, published_at, template_slug, language, + date_modified, seo_head, created_at, updated_at + FROM articles + """) + conn.execute("DROP TABLE articles") + conn.execute("ALTER TABLE articles_new RENAME TO articles") + + conn.execute("CREATE INDEX IF NOT EXISTS idx_articles_url_path ON articles(url_path)") + conn.execute("CREATE INDEX IF NOT EXISTS idx_articles_url_lang ON articles(url_path, language)") + conn.execute("CREATE INDEX IF NOT EXISTS idx_articles_slug ON articles(slug)") + conn.execute("CREATE INDEX IF NOT EXISTS idx_articles_status ON articles(status, published_at)") + + # ── 3. Recreate FTS + triggers ───────────────────────────────────────────── + conn.execute(""" + CREATE VIRTUAL TABLE IF NOT EXISTS articles_fts USING fts5( + title, meta_description, country, region, + content='articles', content_rowid='id' + ) + """) + conn.execute(""" + CREATE TRIGGER IF NOT EXISTS articles_ai AFTER INSERT ON articles BEGIN + INSERT INTO articles_fts(rowid, title, meta_description, country, region) + VALUES (new.id, new.title, new.meta_description, new.country, new.region); + END + """) + conn.execute(""" + CREATE TRIGGER IF NOT EXISTS articles_ad AFTER DELETE ON articles BEGIN + INSERT INTO articles_fts(articles_fts, rowid, title, meta_description, country, region) + VALUES ('delete', old.id, old.title, old.meta_description, old.country, old.region); + END + """) + conn.execute(""" + CREATE TRIGGER IF NOT EXISTS articles_au AFTER UPDATE ON articles BEGIN + INSERT INTO articles_fts(articles_fts, rowid, title, meta_description, country, region) + VALUES ('delete', old.id, old.title, old.meta_description, old.country, old.region); + INSERT INTO articles_fts(rowid, title, meta_description, country, region) + VALUES (new.id, new.title, new.meta_description, new.country, new.region); + END + """) From c2bf82917ae28d2d0fddf1b82bf4519594af324a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:10:39 +0100 Subject: [PATCH 02/98] feat(i18n): add Market Score methodology page keys (EN + DE) Subtask 1/6: ~40 mscore_* keys per locale covering page title, meta, section headings, category descriptions, score band interpretations, data sources, limitations, CTAs, and 5 FAQ Q&A pairs. DE content written as native German (Du-form), not translated from EN. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/locales/de.json | 46 ++++++++++++++++++++++++++++- web/src/padelnomics/locales/en.json | 46 ++++++++++++++++++++++++++++- 2 files changed, 90 insertions(+), 2 deletions(-) diff --git a/web/src/padelnomics/locales/de.json b/web/src/padelnomics/locales/de.json index ee73447..f932973 100644 --- a/web/src/padelnomics/locales/de.json +++ b/web/src/padelnomics/locales/de.json @@ -1641,5 +1641,49 @@ "email_business_plan_preheader": "Professioneller Padel-Finanzplan \u2014 jetzt herunterladen", "email_footer_tagline": "Die Planungsplattform f\u00fcr Padel-Unternehmer", - "email_footer_copyright": "\u00a9 {year} {app_name}. Du erh\u00e4ltst diese E-Mail, weil du ein Konto hast oder eine Anfrage gestellt hast." + "email_footer_copyright": "\u00a9 {year} {app_name}. Du erh\u00e4ltst diese E-Mail, weil du ein Konto hast oder eine Anfrage gestellt hast.", + + "footer_market_score": "Market Score", + "mscore_page_title": "Der padelnomics Market Score \u2014 So messen wir Marktpotenzial", + "mscore_meta_desc": "Der padelnomics Market Score bewertet St\u00e4dte von 0 bis 100 nach ihrem Potenzial f\u00fcr Padel-Investitionen. Erfahre, wie Demografie, Wirtschaftskraft, Nachfragesignale und Datenabdeckung einflie\u00dfen.", + "mscore_og_desc": "Ein datengest\u00fctzter Komposit-Score (0\u2013100), der die Attraktivit\u00e4t einer Stadt f\u00fcr Padelanlagen-Investitionen misst. Was steckt dahinter \u2014 und was bedeutet er f\u00fcr Deine Planung?", + "mscore_h1": "Der padelnomics Market Score", + "mscore_subtitle": "Ein datengest\u00fctztes Ma\u00df f\u00fcr die Attraktivit\u00e4t einer Stadt als Padel-Investitionsstandort.", + "mscore_what_h2": "Was der Score misst", + "mscore_what_intro": "Der Market Score ist ein Komposit-Index von 0 bis 100, der das Potenzial einer Stadt als Standort f\u00fcr Padelanlagen bewertet. Vier Datenkategorien flie\u00dfen in eine einzige Kennzahl ein \u2014 damit Du schnell einsch\u00e4tzen kannst, welche M\u00e4rkte sich n\u00e4her anzuschauen lohnt.", + "mscore_cat_demo_h3": "Demografie", + "mscore_cat_demo_p": "Bev\u00f6lkerungsgr\u00f6\u00dfe als Indikator f\u00fcr den adressierbaren Markt. Gr\u00f6\u00dfere St\u00e4dte tragen in der Regel mehr Anlagen und h\u00f6here Auslastung.", + "mscore_cat_econ_h3": "Wirtschaftskraft", + "mscore_cat_econ_p": "Regionale Kaufkraft und Einkommensindikatoren. In M\u00e4rkten mit h\u00f6herem verf\u00fcgbarem Einkommen ist die Nachfrage nach Freizeitsportarten wie Padel tendenziell st\u00e4rker.", + "mscore_cat_demand_h3": "Nachfrageindikatoren", + "mscore_cat_demand_p": "Signale aus dem laufenden Betrieb bestehender Anlagen \u2014 Auslastungsraten, Buchungsdaten, Anzahl aktiver Standorte. Wo sich reale Nachfrage bereits messen l\u00e4sst, ist das der st\u00e4rkste Indikator.", + "mscore_cat_data_h3": "Datenqualit\u00e4t", + "mscore_cat_data_p": "Wie umfassend die Datenlage f\u00fcr eine Stadt ist. Ein Score auf Basis unvollst\u00e4ndiger Daten ist weniger belastbar \u2014 wir machen das transparent, damit Du wei\u00dft, wo eigene Recherche sinnvoll ist.", + "mscore_read_h2": "Wie Du den Score liest", + "mscore_band_high_label": "70\u2013100: Starker Markt", + "mscore_band_high_p": "Gro\u00dfe Bev\u00f6lkerung, hohe Wirtschaftskraft und nachgewiesene Nachfrage durch bestehende Anlagen. Diese St\u00e4dte haben validierte Padel-M\u00e4rkte mit belastbaren Benchmarks f\u00fcr die Finanzplanung.", + "mscore_band_mid_label": "45\u201369: Solides Mittelfeld", + "mscore_band_mid_p": "Gute Grundlagen mit Wachstumspotenzial. Genug Daten f\u00fcr fundierte Planung, aber weniger Wettbewerb als in den Top-St\u00e4dten. H\u00e4ufig der Sweet Spot f\u00fcr Neueinsteiger.", + "mscore_band_low_label": "Unter 45: Fr\u00fcher Markt", + "mscore_band_low_p": "Weniger validierte Daten oder kleinere Bev\u00f6lkerung. Das hei\u00dft nicht, dass die Stadt unattraktiv ist \u2014 es kann weniger Wettbewerb und bessere Konditionen f\u00fcr Fr\u00fcheinsteiger bedeuten. Rechne mit mehr eigener Recherche vor Ort.", + "mscore_read_note": "Ein niedriger Score bedeutet nicht automatisch eine schlechte Investition. Er kann auf begrenzte Datenlage oder einen noch jungen Markt hinweisen \u2014 weniger Wettbewerb und g\u00fcnstigere Einstiegsbedingungen sind m\u00f6glich.", + "mscore_sources_h2": "Datenquellen", + "mscore_sources_p": "Der Market Score basiert auf Daten europ\u00e4ischer Statistik\u00e4mter (Bev\u00f6lkerung und Wirtschaftsindikatoren), Buchungsplattformen f\u00fcr Padelanlagen (Standortanzahl, Preise, Auslastung) und geografischen Datenbanken (Standortdaten). Die Daten werden monatlich aktualisiert.", + "mscore_limits_h2": "Einschr\u00e4nkungen", + "mscore_limits_p1": "Der Score bildet die verf\u00fcgbare Datenlage ab, nicht die absolute Marktwahrheit. St\u00e4dte, in denen weniger Anlagen auf Buchungsplattformen erfasst sind, k\u00f6nnen bei den Nachfrageindikatoren niedrigere Werte zeigen \u2014 selbst wenn die lokale Nachfrage hoch ist.", + "mscore_limits_p2": "Der Score ber\u00fccksichtigt keine lokalen Faktoren wie Immobilienkosten, Genehmigungszeitr\u00e4ume, Wettbewerbsdynamik oder regulatorische Rahmenbedingungen. Diese Aspekte sind entscheidend und erfordern Recherche vor Ort.", + "mscore_limits_p3": "Nutze den Market Score als Ausgangspunkt f\u00fcr die Priorisierung, nicht als finale Investitionsentscheidung. Im Finanzplaner kannst Du Dein konkretes Szenario durchrechnen.", + "mscore_cta_markets": "Stadtbewertungen ansehen", + "mscore_cta_planner": "Dein Investment modellieren", + "mscore_faq_h2": "H\u00e4ufig gestellte Fragen", + "mscore_faq_q1": "Was ist der padelnomics Market Score?", + "mscore_faq_a1": "Ein Komposit-Index von 0 bis 100, der die Attraktivit\u00e4t einer Stadt f\u00fcr Padelanlagen-Investitionen misst. Er kombiniert Demografie, Wirtschaftskraft, Nachfrageindikatoren und Datenqualit\u00e4t in einer vergleichbaren Kennzahl.", + "mscore_faq_q2": "Wie oft wird der Score aktualisiert?", + "mscore_faq_a2": "Monatlich. Neue Daten aus Statistik\u00e4mtern, Buchungsplattformen und Standortdatenbanken werden regelm\u00e4\u00dfig extrahiert und verarbeitet. Der Score spiegelt immer die aktuellsten verf\u00fcgbaren Daten wider.", + "mscore_faq_q3": "Warum hat meine Stadt einen niedrigen Score?", + "mscore_faq_a3": "Meist wegen begrenzter Datenabdeckung oder geringerer Bev\u00f6lkerung. Ein niedriger Score bedeutet nicht, dass die Stadt unattraktiv ist \u2014 sondern dass uns weniger Daten zur Quantifizierung der Chance vorliegen. Eigene Recherche kann die L\u00fccken schlie\u00dfen.", + "mscore_faq_q4": "Kann ich Scores l\u00e4nder\u00fcbergreifend vergleichen?", + "mscore_faq_a4": "Ja. Die Methodik ist f\u00fcr alle M\u00e4rkte einheitlich, sodass ein Score von 72 in Deutschland direkt vergleichbar ist mit einem 72 in Spanien oder Gro\u00dfbritannien.", + "mscore_faq_q5": "Garantiert ein hoher Score eine gute Investition?", + "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivit\u00e4t auf Makroebene. Deine konkrete Investition h\u00e4ngt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen." } diff --git a/web/src/padelnomics/locales/en.json b/web/src/padelnomics/locales/en.json index 70927f4..9b5a0ab 100644 --- a/web/src/padelnomics/locales/en.json +++ b/web/src/padelnomics/locales/en.json @@ -1641,5 +1641,49 @@ "email_business_plan_preheader": "Professional padel facility financial plan \u2014 download now", "email_footer_tagline": "The padel business planning platform", - "email_footer_copyright": "\u00a9 {year} {app_name}. You received this email because you have an account or submitted a request." + "email_footer_copyright": "\u00a9 {year} {app_name}. You received this email because you have an account or submitted a request.", + + "footer_market_score": "Market Score", + "mscore_page_title": "The padelnomics Market Score \u2014 How We Measure Market Potential", + "mscore_meta_desc": "The padelnomics Market Score rates cities from 0 to 100 on their potential for padel investment. Learn how demographics, economic strength, demand signals, and data coverage feed into the score.", + "mscore_og_desc": "A data-driven composite score (0\u2013100) that measures how attractive a city is for padel court investment. See what goes into it and what it means for your planning.", + "mscore_h1": "The padelnomics Market Score", + "mscore_subtitle": "A data-driven measure of how attractive a city is for padel investment.", + "mscore_what_h2": "What It Measures", + "mscore_what_intro": "The Market Score is a composite index from 0 to 100 that evaluates a city\u2019s potential as a location for padel court investment. It combines four categories of data into a single number designed to help you prioritize markets worth investigating further.", + "mscore_cat_demo_h3": "Demographics", + "mscore_cat_demo_p": "Population size as a proxy for the addressable market. Larger cities generally support more venues and higher utilization.", + "mscore_cat_econ_h3": "Economic Strength", + "mscore_cat_econ_p": "Regional purchasing power and income indicators. Markets where people have higher disposable income tend to sustain stronger demand for leisure sports like padel.", + "mscore_cat_demand_h3": "Demand Evidence", + "mscore_cat_demand_p": "Signals from existing venue activity \u2014 occupancy rates, booking data, and the number of operating venues. Where real demand is already measurable, it\u2019s the strongest indicator.", + "mscore_cat_data_h3": "Data Completeness", + "mscore_cat_data_p": "How much data we have for that city. A score influenced by incomplete data is less reliable \u2014 we surface this explicitly so you know when to dig deeper on your own.", + "mscore_read_h2": "How To Read the Score", + "mscore_band_high_label": "70\u2013100: Strong market", + "mscore_band_high_p": "Large population, economic power, and proven demand from existing venues. These cities have validated padel markets with reliable benchmarks for financial planning.", + "mscore_band_mid_label": "45\u201369: Solid mid-tier", + "mscore_band_mid_p": "Good fundamentals with room for growth. Enough data to plan with confidence, but less competition than top-tier cities. Often the sweet spot for new entrants.", + "mscore_band_low_label": "Below 45: Early-stage market", + "mscore_band_low_p": "Less validated data or smaller populations. This does not mean a city is a bad investment \u2014 it may mean less competition and first-mover advantage. Expect to do more local research.", + "mscore_read_note": "A lower score does not mean a city is a bad investment. It may indicate less available data or a market still developing \u2014 which can mean less competition and better terms for early entrants.", + "mscore_sources_h2": "Data Sources", + "mscore_sources_p": "The Market Score draws on data from European statistical offices (population and economic indicators), court booking platforms (venue counts, pricing, occupancy), and geographic databases (venue locations). Data is refreshed monthly as new extractions run.", + "mscore_limits_h2": "Limitations", + "mscore_limits_p1": "The score reflects available data, not absolute market truth. Cities where fewer venues are tracked on booking platforms may score lower on demand evidence \u2014 even if local demand is strong.", + "mscore_limits_p2": "The score does not account for local factors like real estate costs, permitting timelines, competitive dynamics, or regulatory environment. These matter enormously and require on-the-ground research.", + "mscore_limits_p3": "Use the Market Score as a starting point for prioritization, not a final investment decision. The financial planner is where you model your specific scenario.", + "mscore_cta_markets": "Browse city scores", + "mscore_cta_planner": "Model your investment", + "mscore_faq_h2": "Frequently Asked Questions", + "mscore_faq_q1": "What is the padelnomics Market Score?", + "mscore_faq_a1": "A composite index from 0 to 100 that measures how attractive a city is for padel court investment. It combines demographics, economic strength, demand evidence, and data completeness into a single comparable number.", + "mscore_faq_q2": "How often is the score updated?", + "mscore_faq_a2": "Monthly. New data from statistical offices, booking platforms, and venue databases is extracted and processed on a regular cycle. Scores reflect the most recent available data.", + "mscore_faq_q3": "Why is my city\u2019s score low?", + "mscore_faq_a3": "Usually because of limited data coverage or smaller population. A low score doesn\u2019t mean the city is unattractive \u2014 it means we have less data to quantify the opportunity. Local research can fill the gaps.", + "mscore_faq_q4": "Can I compare scores across countries?", + "mscore_faq_a4": "Yes. The methodology is consistent across all markets we track, so a score of 72 in Germany is directly comparable to a 72 in Spain or the UK.", + "mscore_faq_q5": "Does a high score guarantee a good investment?", + "mscore_faq_a5": "No. The score measures market attractiveness at a macro level. Your specific investment depends on venue type, build costs, lease terms, and dozens of other factors. Use the financial planner to model your scenario with real numbers." } From 33aa705ef956a6c3f0cd1f228eecdabfbf767963 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:11:07 +0100 Subject: [PATCH 03/98] feat(routes): add /market-score route + legacy redirect MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Subtask 2/6: Route handler in public blueprint, 301 redirect from /market-score → /en/market-score for bookmarks without lang prefix. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/app.py | 4 ++++ web/src/padelnomics/public/routes.py | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/web/src/padelnomics/app.py b/web/src/padelnomics/app.py index 75c9f78..cbf1c1a 100644 --- a/web/src/padelnomics/app.py +++ b/web/src/padelnomics/app.py @@ -292,6 +292,10 @@ def create_app() -> Quart: async def legacy_suppliers(): return redirect("/en/suppliers", 301) + @app.route("/market-score") + async def legacy_market_score(): + return redirect("/en/market-score", 301) + # ------------------------------------------------------------------------- # Blueprint registration # ------------------------------------------------------------------------- diff --git a/web/src/padelnomics/public/routes.py b/web/src/padelnomics/public/routes.py index f84bb36..94b9eb4 100644 --- a/web/src/padelnomics/public/routes.py +++ b/web/src/padelnomics/public/routes.py @@ -59,6 +59,11 @@ async def about(): return await render_template("about.html") +@bp.route("/market-score") +async def market_score(): + return await render_template("market_score.html") + + @bp.route("/imprint") async def imprint(): lang = g.get("lang", "en") From 39fe025e5b3e8623cc793b3aca3233598e025b72 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:12:03 +0100 Subject: [PATCH 04/98] feat(template): create Market Score methodology page Subtask 3/6: Standalone informational page extending base.html. Sections: hero, what it measures (4-card grid), score bands, data sources, limitations, CTAs, FAQ with details/summary. JSON-LD: WebPage + BreadcrumbList + FAQPage. Co-Authored-By: Claude Opus 4.6 --- .../public/templates/market_score.html | 175 ++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 web/src/padelnomics/public/templates/market_score.html diff --git a/web/src/padelnomics/public/templates/market_score.html b/web/src/padelnomics/public/templates/market_score.html new file mode 100644 index 0000000..df98930 --- /dev/null +++ b/web/src/padelnomics/public/templates/market_score.html @@ -0,0 +1,175 @@ +{% extends "base.html" %} + +{% block title %}{{ t.mscore_page_title }}{% endblock %} + +{% block head %} + + + + +{% endblock %} + +{% block content %} +
+
+ + +
+

+ padelnomics + Market Score +

+

{{ t.mscore_subtitle }}

+
+ + +
+

{{ t.mscore_what_h2 }}

+

{{ t.mscore_what_intro }}

+ +
+
+
👥
+

{{ t.mscore_cat_demo_h3 }}

+

{{ t.mscore_cat_demo_p }}

+
+
+
💶
+

{{ t.mscore_cat_econ_h3 }}

+

{{ t.mscore_cat_econ_p }}

+
+
+
📈
+

{{ t.mscore_cat_demand_h3 }}

+

{{ t.mscore_cat_demand_p }}

+
+
+
🔍
+

{{ t.mscore_cat_data_h3 }}

+

{{ t.mscore_cat_data_p }}

+
+
+
+ + +
+

{{ t.mscore_read_h2 }}

+
+
+
+ + {{ t.mscore_band_high_label }} +
+

{{ t.mscore_band_high_p }}

+
+
+
+ + {{ t.mscore_band_mid_label }} +
+

{{ t.mscore_band_mid_p }}

+
+
+
+ + {{ t.mscore_band_low_label }} +
+

{{ t.mscore_band_low_p }}

+
+
+

{{ t.mscore_read_note }}

+
+ + +
+

{{ t.mscore_sources_h2 }}

+

{{ t.mscore_sources_p }}

+
+ + +
+

{{ t.mscore_limits_h2 }}

+
+

{{ t.mscore_limits_p1 }}

+

{{ t.mscore_limits_p2 }}

+

{{ t.mscore_limits_p3 }}

+
+
+ + + + + +
+

{{ t.mscore_faq_h2 }}

+
+ {% for i in range(1, 6) %} +
+ {{ t['mscore_faq_q' ~ i] }} +

{{ t['mscore_faq_a' ~ i] }}

+
+ {% endfor %} +
+
+ +
+
+{% endblock %} From 2a038e48be8360161002fec628e058766f8f7f5c Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:12:29 +0100 Subject: [PATCH 05/98] feat(seo): add Market Score to sitemap and footer Subtask 4/6: Add /market-score to STATIC_PATHS for sitemap generation (both lang variants + hreflang). Add footer link in Product column between Markets and For Suppliers. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/sitemap.py | 1 + web/src/padelnomics/templates/base.html | 1 + 2 files changed, 2 insertions(+) diff --git a/web/src/padelnomics/sitemap.py b/web/src/padelnomics/sitemap.py index 8a279cd..02e5010 100644 --- a/web/src/padelnomics/sitemap.py +++ b/web/src/padelnomics/sitemap.py @@ -23,6 +23,7 @@ STATIC_PATHS = [ "/imprint", "/suppliers", "/markets", + "/market-score", "/planner/", "/directory/", ] diff --git a/web/src/padelnomics/templates/base.html b/web/src/padelnomics/templates/base.html index a17ce5a..f2a066e 100644 --- a/web/src/padelnomics/templates/base.html +++ b/web/src/padelnomics/templates/base.html @@ -171,6 +171,7 @@
  • {{ t.nav_planner }}
  • {{ t.nav_directory }}
  • {{ t.nav_markets }}
  • +
  • {{ t.footer_market_score }}
  • {{ t.nav_suppliers }}
  • From f76d2889e508be817437a2d458c495408f87a80d Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:12:50 +0100 Subject: [PATCH 06/98] fix(core): add utcnow()/utcnow_iso() helpers, migrate core.py usages Replace deprecated datetime.utcnow() with datetime.now(UTC). - utcnow() -> datetime: for in-memory datetime math - utcnow_iso() -> str: strftime format preserving existing SQLite TEXT format Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/core.py | 38 ++++++++++++++++++++++++++++++------- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/web/src/padelnomics/core.py b/web/src/padelnomics/core.py index 5260ec0..c762d5f 100644 --- a/web/src/padelnomics/core.py +++ b/web/src/padelnomics/core.py @@ -10,7 +10,7 @@ import re import secrets import unicodedata from contextvars import ContextVar -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from functools import wraps from pathlib import Path @@ -88,6 +88,26 @@ class Config: config = Config() + +# ============================================================================= +# Datetime helpers +# ============================================================================= + + +def utcnow() -> datetime: + """Timezone-aware UTC now (replaces deprecated datetime.utcnow()).""" + return datetime.now(UTC) + + +def utcnow_iso() -> str: + """UTC now as naive ISO string for SQLite TEXT columns. + + Produces YYYY-MM-DDTHH:MM:SS (no +00:00 suffix) to match the existing + format stored in the DB so lexicographic SQL comparisons keep working. + """ + return datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S") + + # ============================================================================= # Database # ============================================================================= @@ -528,17 +548,18 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t """ limit = limit or config.RATE_LIMIT_REQUESTS window = window or config.RATE_LIMIT_WINDOW - now = datetime.utcnow() + now = utcnow() window_start = now - timedelta(seconds=window) # Clean old entries and count recent await execute( - "DELETE FROM rate_limits WHERE key = ? AND timestamp < ?", (key, window_start.isoformat()) + "DELETE FROM rate_limits WHERE key = ? AND timestamp < ?", + (key, window_start.strftime("%Y-%m-%dT%H:%M:%S")), ) result = await fetch_one( "SELECT COUNT(*) as count FROM rate_limits WHERE key = ? AND timestamp > ?", - (key, window_start.isoformat()), + (key, window_start.strftime("%Y-%m-%dT%H:%M:%S")), ) count = result["count"] if result else 0 @@ -552,7 +573,10 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t return False, info # Record this request - await execute("INSERT INTO rate_limits (key, timestamp) VALUES (?, ?)", (key, now.isoformat())) + await execute( + "INSERT INTO rate_limits (key, timestamp) VALUES (?, ?)", + (key, now.strftime("%Y-%m-%dT%H:%M:%S")), + ) return True, info @@ -628,7 +652,7 @@ async def soft_delete(table: str, id: int) -> bool: """Mark record as deleted.""" result = await execute( f"UPDATE {table} SET deleted_at = ? WHERE id = ? AND deleted_at IS NULL", - (datetime.utcnow().isoformat(), id), + (utcnow_iso(), id), ) return result > 0 @@ -647,7 +671,7 @@ async def hard_delete(table: str, id: int) -> bool: async def purge_deleted(table: str, days: int = 30) -> int: """Purge records deleted more than X days ago.""" - cutoff = (datetime.utcnow() - timedelta(days=days)).isoformat() + cutoff = (utcnow() - timedelta(days=days)).strftime("%Y-%m-%dT%H:%M:%S") return await execute( f"DELETE FROM {table} WHERE deleted_at IS NOT NULL AND deleted_at < ?", (cutoff,) ) From 815edf3cefaaea0f2fd152b30470d63721380c69 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:13:40 +0100 Subject: [PATCH 07/98] feat(seo): link first Market Score mention to methodology page Subtask 5/6: Wrap first "padelnomics Market Score" per language section in anchor to /{language}/market-score. Updated templates: - city-cost-de.md.jinja (DE intro + EN intro) - city-pricing.md.jinja (DE comparison + EN comparison) - country-overview.md.jinja (DE intro + EN intro) Creates hub-and-spoke internal linking from hundreds of city articles to the methodology page. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/content/templates/city-cost-de.md.jinja | 4 ++-- web/src/padelnomics/content/templates/city-pricing.md.jinja | 4 ++-- .../padelnomics/content/templates/country-overview.md.jinja | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/web/src/padelnomics/content/templates/city-cost-de.md.jinja b/web/src/padelnomics/content/templates/city-cost-de.md.jinja index 4efb49b..09ca287 100644 --- a/web/src/padelnomics/content/templates/city-cost-de.md.jinja +++ b/web/src/padelnomics/content/templates/city-cost-de.md.jinja @@ -33,7 +33,7 @@ priority_column: population -{{ city_name }} erreicht einen **padelnomics Market Score von {{ market_score | round(1) }}/100** — damit liegt die Stadt{% if market_score >= 70 %} unter den stärksten Padel-Märkten in {{ country_name_en }}{% elif market_score >= 45 %} im soliden Mittelfeld der Padel-Märkte in {{ country_name_en }}{% else %} in einem frühen Padel-Markt mit Wachstumspotenzial{% endif %}. Aktuell gibt es **{{ padel_venue_count }} Padelanlagen** für {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} Einwohner — das entspricht {{ venues_per_100k | round(1) }} Anlagen pro 100.000 Einwohner. +{{ city_name }} erreicht einen **padelnomics Market Score von {{ market_score | round(1) }}/100** — damit liegt die Stadt{% if market_score >= 70 %} unter den stärksten Padel-Märkten in {{ country_name_en }}{% elif market_score >= 45 %} im soliden Mittelfeld der Padel-Märkte in {{ country_name_en }}{% else %} in einem frühen Padel-Markt mit Wachstumspotenzial{% endif %}. Aktuell gibt es **{{ padel_venue_count }} Padelanlagen** für {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} Einwohner — das entspricht {{ venues_per_100k | round(1) }} Anlagen pro 100.000 Einwohner. Die entscheidende Frage für Investoren: Was bringt ein Padel-Investment bei den aktuellen Preisen, Auslastungsraten und Baukosten tatsächlich? Das Finanzmodell unten rechnet mit echten Marktdaten aus {{ city_name }}. @@ -140,7 +140,7 @@ Der padelnomics Market Score of {{ market_score | round(1) }}/100** — placing it{% if market_score >= 70 %} among the strongest padel markets in {{ country_name_en }}{% elif market_score >= 45 %} in the mid-tier of {{ country_name_en }}'s padel markets{% else %} in an early-stage padel market with room for growth{% endif %}. The city currently has **{{ padel_venue_count }} padel venues** serving a population of {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} residents — a density of {{ venues_per_100k | round(1) }} venues per 100,000 people. +{{ city_name }} has a **padelnomics Market Score of {{ market_score | round(1) }}/100** — placing it{% if market_score >= 70 %} among the strongest padel markets in {{ country_name_en }}{% elif market_score >= 45 %} in the mid-tier of {{ country_name_en }}'s padel markets{% else %} in an early-stage padel market with room for growth{% endif %}. The city currently has **{{ padel_venue_count }} padel venues** serving a population of {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} residents — a density of {{ venues_per_100k | round(1) }} venues per 100,000 people. The question investors actually need answered is: given current pricing, occupancy, and build costs, what does the return look like? The financial model below uses real {{ city_name }} market data to give you that answer. diff --git a/web/src/padelnomics/content/templates/city-pricing.md.jinja b/web/src/padelnomics/content/templates/city-pricing.md.jinja index 51711cf..13b8b03 100644 --- a/web/src/padelnomics/content/templates/city-pricing.md.jinja +++ b/web/src/padelnomics/content/templates/city-pricing.md.jinja @@ -55,7 +55,7 @@ Die Preisspanne von {{ hourly_rate_p25 | round(0) | int }} bis {{ hourly_rate_p7 ## Wie steht {{ city_name }} im Vergleich da? -{{ city_name }} hat {{ padel_venue_count }} Padelanlagen für {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} Einwohner ({{ venues_per_100k | round(1) }} Anlagen pro 100K Einwohner). {% if market_score >= 65 %}Mit einem Market Score von {{ market_score | round(1) }}/100 gehört {{ city_name }} zu den stärksten Padel-Märkten in {{ country_name_en }} — höhere Auslastung und Preise sind typisch für dichte, etablierte Märkte. {% elif market_score >= 40 %}Ein Market Score von {{ market_score | round(1) }}/100 steht für einen Markt im Aufbau: genug Angebot für marktgerechte Preise, aber Raum für neue Anlagen. {% else %}Ein Market Score von {{ market_score | round(1) }}/100 deutet auf einen Markt in der Frühphase hin, in dem sich Preise und Auslastung mit dem Wachstum des Sports noch deutlich entwickeln können. {% endif %} +{{ city_name }} hat {{ padel_venue_count }} Padelanlagen für {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} Einwohner ({{ venues_per_100k | round(1) }} Anlagen pro 100K Einwohner). {% if market_score >= 65 %}Mit einem padelnomics Market Score von {{ market_score | round(1) }}/100 gehört {{ city_name }} zu den stärksten Padel-Märkten in {{ country_name_en }} — höhere Auslastung und Preise sind typisch für dichte, etablierte Märkte. {% elif market_score >= 40 %}Ein Market Score von {{ market_score | round(1) }}/100 steht für einen Markt im Aufbau: genug Angebot für marktgerechte Preise, aber Raum für neue Anlagen. {% else %}Ein Market Score von {{ market_score | round(1) }}/100 deutet auf einen Markt in der Frühphase hin, in dem sich Preise und Auslastung mit dem Wachstum des Sports noch deutlich entwickeln können. {% endif %} Die Anlagendichte von {{ venues_per_100k | round(1) }} pro 100K Einwohner beeinflusst die Preisgestaltung direkt: {% if venues_per_100k >= 3.0 %}Höhere Dichte bedeutet mehr Wettbewerb, was die Preise eher stabilisiert oder senkt.{% elif venues_per_100k >= 1.0 %}Moderate Dichte ermöglicht marktgerechte Preise bei gleichzeitigem Wachstumsspielraum.{% else %}Niedrige Dichte gibt Betreibern mehr Preissetzungsmacht — vorausgesetzt, die Nachfrage ist da.{% endif %} @@ -153,7 +153,7 @@ The P25–P75 price range of {{ hourly_rate_p25 | round(0) | int }} to {{ hourly ## How Does {{ city_name }} Compare? -{{ city_name }} has {{ padel_venue_count }} padel venues for a population of {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} ({{ venues_per_100k | round(1) }} venues per 100K residents). {% if market_score >= 65 %}With a market score of {{ market_score | round(1) }}/100, {{ city_name }} is one of the stronger padel markets in {{ country_name_en }} — higher occupancy and pricing typically follow dense, competitive markets. {% elif market_score >= 40 %}A market score of {{ market_score | round(1) }}/100 reflects a mid-tier market: enough supply to have competitive pricing, but room for new venues to grow. {% else %}A market score of {{ market_score | round(1) }}/100 indicates an early-stage market where pricing and occupancy benchmarks may shift as the sport grows. {% endif %} +{{ city_name }} has {{ padel_venue_count }} padel venues for a population of {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} ({{ venues_per_100k | round(1) }} venues per 100K residents). {% if market_score >= 65 %}With a padelnomics Market Score of {{ market_score | round(1) }}/100, {{ city_name }} is one of the stronger padel markets in {{ country_name_en }} — higher occupancy and pricing typically follow dense, competitive markets. {% elif market_score >= 40 %}A market score of {{ market_score | round(1) }}/100 reflects a mid-tier market: enough supply to have competitive pricing, but room for new venues to grow. {% else %}A market score of {{ market_score | round(1) }}/100 indicates an early-stage market where pricing and occupancy benchmarks may shift as the sport grows. {% endif %} Venue density of {{ venues_per_100k | round(1) }} per 100K residents directly influences pricing: {% if venues_per_100k >= 3.0 %}higher density means more competition, which tends to stabilize or compress prices.{% elif venues_per_100k >= 1.0 %}moderate density supports market-rate pricing with room for growth.{% else %}low density gives operators more pricing power — provided demand exists.{% endif %} diff --git a/web/src/padelnomics/content/templates/country-overview.md.jinja b/web/src/padelnomics/content/templates/country-overview.md.jinja index 86b6bcf..d907a39 100644 --- a/web/src/padelnomics/content/templates/country-overview.md.jinja +++ b/web/src/padelnomics/content/templates/country-overview.md.jinja @@ -34,7 +34,7 @@ priority_column: total_venues -In {{ country_name_en }} erfassen wir aktuell **{{ total_venues }} Padelanlagen** in **{{ city_count }} Städten**. Der durchschnittliche padelnomics Market Score liegt bei **{{ avg_market_score }}/100**{% if avg_market_score >= 65 %} — ein starker Markt mit breiter Infrastruktur und belastbaren Preisdaten{% elif avg_market_score >= 40 %} — ein wachsender Markt mit guter Abdeckung{% else %} — ein aufstrebender Markt, in dem Früheinsteiger noch Premiumstandorte sichern können{% endif %}. +In {{ country_name_en }} erfassen wir aktuell **{{ total_venues }} Padelanlagen** in **{{ city_count }} Städten**. Der durchschnittliche padelnomics Market Score liegt bei **{{ avg_market_score }}/100**{% if avg_market_score >= 65 %} — ein starker Markt mit breiter Infrastruktur und belastbaren Preisdaten{% elif avg_market_score >= 40 %} — ein wachsender Markt mit guter Abdeckung{% else %} — ein aufstrebender Markt, in dem Früheinsteiger noch Premiumstandorte sichern können{% endif %}. ## Marktlandschaft @@ -124,7 +124,7 @@ Städte mit höherem padelnomics Market Score across tracked cities is **{{ avg_market_score }}/100**{% if avg_market_score >= 65 %} — a strong market with widespread venue penetration and solid pricing data{% elif avg_market_score >= 40 %} — a growing market with healthy city coverage{% else %} — an emerging market where early entrants can still capture prime locations{% endif %}. +{{ country_name_en }} has **{{ total_venues }} padel venues** tracked across **{{ city_count }} cities**. The average padelnomics Market Score across tracked cities is **{{ avg_market_score }}/100**{% if avg_market_score >= 65 %} — a strong market with widespread venue penetration and solid pricing data{% elif avg_market_score >= 40 %} — a growing market with healthy city coverage{% else %} — an emerging market where early entrants can still capture prime locations{% endif %}. ## Market Landscape From 4033e13e0595009f1b4858a74d8067ad73e90db7 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:14:21 +0100 Subject: [PATCH 08/98] feat(admin): live-poll Articles and Scenarios tabs during generation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add _is_generating() helper — queries tasks table for pending generate_articles tasks - Pass is_generating to article_results partial (both full page and HTMX route) - article_results.html: render invisible hx-trigger="every 3s" div when generating; polling stops naturally once generation completes and div is absent - Add /admin/scenarios/results HTMX partial route with same is_generating logic - Extract scenario table into admin/partials/scenario_results.html partial - scenarios.html: wrap table in #scenario-results div, include partial Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 50 ++++++++++++++++++- .../admin/partials/article_results.html | 7 +++ .../admin/partials/scenario_results.html | 44 ++++++++++++++++ .../admin/templates/admin/scenarios.html | 40 ++------------- 4 files changed, 103 insertions(+), 38 deletions(-) create mode 100644 web/src/padelnomics/admin/templates/admin/partials/scenario_results.html diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 1657fe7..c20a49f 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1434,6 +1434,42 @@ async def scenarios(): current_search=search, current_country=country_filter, current_venue_type=venue_filter, + is_generating=await _is_generating(), + ) + + +@bp.route("/scenarios/results") +@role_required("admin") +async def scenario_results(): + """HTMX partial for scenario results (used by live polling).""" + search = request.args.get("search", "").strip() + country_filter = request.args.get("country", "") + venue_filter = request.args.get("venue_type", "") + + wheres = ["1=1"] + params: list = [] + if search: + wheres.append("(title LIKE ? OR location LIKE ? OR slug LIKE ?)") + params.extend([f"%{search}%", f"%{search}%", f"%{search}%"]) + if country_filter: + wheres.append("country = ?") + params.append(country_filter) + if venue_filter: + wheres.append("venue_type = ?") + params.append(venue_filter) + + where = " AND ".join(wheres) + scenario_list = await fetch_all( + f"SELECT * FROM published_scenarios WHERE {where} ORDER BY created_at DESC LIMIT 500", + tuple(params), + ) + total = await fetch_one("SELECT COUNT(*) as cnt FROM published_scenarios") + + return await render_template( + "admin/partials/scenario_results.html", + scenarios=scenario_list, + total=total["cnt"] if total else 0, + is_generating=await _is_generating(), ) @@ -1683,6 +1719,14 @@ async def _get_article_stats() -> dict: return dict(row) if row else {"total": 0, "live": 0, "scheduled": 0, "draft": 0} +async def _is_generating() -> bool: + """Return True if a generate_articles task is currently pending.""" + row = await fetch_one( + "SELECT COUNT(*) AS cnt FROM tasks WHERE task_type = 'generate_articles' AND status = 'pending'" + ) + return bool(row and row["cnt"] > 0) + + @bp.route("/articles") @role_required("admin") async def articles(): @@ -1712,6 +1756,7 @@ async def articles(): current_template=template_filter, current_language=language_filter, page=page, + is_generating=await _is_generating(), ) @@ -1730,7 +1775,10 @@ async def article_results(): language=language_filter or None, search=search or None, page=page, ) return await render_template( - "admin/partials/article_results.html", articles=article_list, page=page, + "admin/partials/article_results.html", + articles=article_list, + page=page, + is_generating=await _is_generating(), ) diff --git a/web/src/padelnomics/admin/templates/admin/partials/article_results.html b/web/src/padelnomics/admin/templates/admin/partials/article_results.html index 2d95b07..d0ef5d8 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/article_results.html +++ b/web/src/padelnomics/admin/templates/admin/partials/article_results.html @@ -1,3 +1,10 @@ +{% if is_generating %} + +{% endif %} {% if articles %}
    diff --git a/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html new file mode 100644 index 0000000..93dd343 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html @@ -0,0 +1,44 @@ +{% if is_generating %} + +{% endif %} +{% if scenarios %} +
    + + + + + + + + + + + + {% for s in scenarios %} + + + + + + + + + {% endfor %} + +
    TitleSlugLocationConfigCreated
    {{ s.title }}{{ s.slug }}{{ s.location }}, {{ s.country }}{{ s.venue_type | capitalize }} · {{ s.court_config }}{{ s.created_at[:10] }} + Preview + PDF EN + PDF DE + Edit +
    + + +
    +
    +{% else %} +

    No scenarios match the current filters.

    +{% endif %} diff --git a/web/src/padelnomics/admin/templates/admin/scenarios.html b/web/src/padelnomics/admin/templates/admin/scenarios.html index c32270e..6d993fb 100644 --- a/web/src/padelnomics/admin/templates/admin/scenarios.html +++ b/web/src/padelnomics/admin/templates/admin/scenarios.html @@ -51,42 +51,8 @@
    - {% if scenarios %} - - - - - - - - - - - - - {% for s in scenarios %} - - - - - - - - - {% endfor %} - -
    TitleSlugLocationConfigCreated
    {{ s.title }}{{ s.slug }}{{ s.location }}, {{ s.country }}{{ s.venue_type | capitalize }} · {{ s.court_config }}{{ s.created_at[:10] }} - Preview - PDF EN - PDF DE - Edit -
    - - -
    -
    - {% else %} -

    No scenarios match the current filters.

    - {% endif %} +
    + {% include "admin/partials/scenario_results.html" %} +
    {% endblock %} From b7485902e65f1e6d80164ddc708884b047a390b0 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:15:36 +0100 Subject: [PATCH 09/98] test: add Market Score methodology page tests Subtask 6/6: 8 tests covering EN/DE 200 status, legacy 301 redirect, JSON-LD schema types, FAQ sections, OG tags, footer link. Add footer_market_score to i18n parity allowlist (branded term). Co-Authored-By: Claude Opus 4.6 --- web/tests/test_i18n_parity.py | 2 ++ web/tests/test_market_score.py | 59 ++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 web/tests/test_market_score.py diff --git a/web/tests/test_i18n_parity.py b/web/tests/test_i18n_parity.py index 2c6b466..62a28ab 100644 --- a/web/tests/test_i18n_parity.py +++ b/web/tests/test_i18n_parity.py @@ -57,6 +57,8 @@ _IDENTICAL_VALUE_ALLOWLIST = { # Business plan — Indoor/Outdoor same in DE, financial abbreviations "bp_indoor", "bp_outdoor", "bp_lbl_ebitda", "bp_lbl_irr", "bp_lbl_moic", "bp_lbl_opex", + # Market Score — branded term kept in English in DE + "footer_market_score", } diff --git a/web/tests/test_market_score.py b/web/tests/test_market_score.py new file mode 100644 index 0000000..2adf335 --- /dev/null +++ b/web/tests/test_market_score.py @@ -0,0 +1,59 @@ +"""Tests for the Market Score methodology page.""" + + +async def test_en_returns_200(client): + resp = await client.get("/en/market-score") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Market Score" in text + assert "padelnomics" in text + + +async def test_de_returns_200(client): + resp = await client.get("/de/market-score") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Market Score" in text + assert "padelnomics" in text + + +async def test_legacy_redirect(client): + resp = await client.get("/market-score") + assert resp.status_code == 301 + assert resp.headers["Location"].endswith("/en/market-score") + + +async def test_contains_jsonld(client): + resp = await client.get("/en/market-score") + text = await resp.get_data(as_text=True) + assert '"@type": "WebPage"' in text + assert '"@type": "FAQPage"' in text + assert '"@type": "BreadcrumbList"' in text + + +async def test_contains_faq_section(client): + resp = await client.get("/en/market-score") + text = await resp.get_data(as_text=True) + assert "Frequently Asked Questions" in text + assert " Date: Tue, 24 Feb 2026 10:16:15 +0100 Subject: [PATCH 10/98] docs: update CHANGELOG and PROJECT.md with Market Score page Co-Authored-By: Claude Opus 4.6 --- CHANGELOG.md | 10 ++++++++++ PROJECT.md | 1 + 2 files changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0031cb..6c69918 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +### Added +- **Market Score methodology page** — standalone page at `/{lang}/market-score` + explaining the padelnomics Market Score (Zillow Zestimate-style). Reveals four + input categories (demographics, economic strength, demand evidence, data + completeness) and score band interpretations without exposing weights or + formulas. Full JSON-LD (WebPage + FAQPage + BreadcrumbList), OG tags, and + bilingual content (EN professional, DE Du-form). Added to sitemap and footer. + First "padelnomics Market Score" mention in each article template now links + to the methodology page (hub-and-spoke internal linking). + ### Fixed - **Double language prefix in article URLs** — articles were served at `/en/en/markets/italy` (double prefix) because `generate_articles()` stored diff --git a/PROJECT.md b/PROJECT.md index 40e6064..957e102 100644 --- a/PROJECT.md +++ b/PROJECT.md @@ -118,6 +118,7 @@ - [x] Cookie consent banner (functional/A/B categories, 1-year cookie) - [x] Virtual office address on imprint - [x] SEO/GEO admin hub — GSC + Bing + Umami sync, search/funnel/scorecard views, daily background sync +- [x] Market Score methodology page (`/{lang}/market-score`) — Zillow-style explanation of the padelnomics Market Score; EN + DE; JSON-LD (WebPage + FAQPage + BreadcrumbList); hub-and-spoke internal linking from all article templates ### Testing - [x] Playwright visual/E2E test suite — 77 tests across 3 files (visual, e2e flows, quote wizard); single session-scoped server + browser; mocked emails + waitlist mode; ~59s runtime From 5644a1ebf8a20f0f8c333d5deb88adb26af5ec7f Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:22:42 +0100 Subject: [PATCH 11/98] fix: replace datetime.utcnow() with utcnow()/utcnow_iso() across all source files Migrates 15 source files from the deprecated datetime.utcnow() API. Uses utcnow() for in-memory math and utcnow_iso() (strftime format) for SQLite TEXT column writes to preserve lexicographic sort order. Also fixes datetime.utcfromtimestamp() in seo/_bing.py. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 26 +++++++------- web/src/padelnomics/app.py | 4 +-- web/src/padelnomics/auth/routes.py | 14 ++++---- web/src/padelnomics/billing/routes.py | 20 +++++------ web/src/padelnomics/content/__init__.py | 4 +-- web/src/padelnomics/credits.py | 12 +++---- web/src/padelnomics/dashboard/routes.py | 5 ++- web/src/padelnomics/directory/routes.py | 5 ++- web/src/padelnomics/leads/routes.py | 9 ++--- web/src/padelnomics/planner/routes.py | 6 ++-- web/src/padelnomics/scripts/seed_content.py | 4 +-- web/src/padelnomics/scripts/seed_dev_data.py | 18 +++++----- web/src/padelnomics/seo/_bing.py | 20 +++++------ web/src/padelnomics/seo/_gsc.py | 16 ++++----- web/src/padelnomics/seo/_queries.py | 6 ++-- web/src/padelnomics/seo/_umami.py | 14 ++++---- web/src/padelnomics/webhooks.py | 8 ++--- web/src/padelnomics/worker.py | 38 ++++++++++++-------- 18 files changed, 116 insertions(+), 113 deletions(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 89336cc..47913d6 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -29,6 +29,8 @@ from ..core import ( fetch_one, send_email, slugify, + utcnow, + utcnow_iso, ) # Blueprint with its own template folder @@ -64,9 +66,9 @@ def _admin_context(): async def get_dashboard_stats() -> dict: """Get admin dashboard statistics.""" - now = datetime.utcnow() + now = utcnow() today = now.date().isoformat() - week_ago = (now - timedelta(days=7)).isoformat() + week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S") users_total = await fetch_one("SELECT COUNT(*) as count FROM users WHERE deleted_at IS NULL") users_today = await fetch_one( "SELECT COUNT(*) as count FROM users WHERE created_at >= ? AND deleted_at IS NULL", @@ -211,7 +213,7 @@ async def retry_task(task_id: int) -> bool: SET status = 'pending', run_at = ?, error = NULL WHERE id = ? AND status = 'failed' """, - (datetime.utcnow().isoformat(), task_id) + (utcnow_iso(), task_id) ) return result > 0 @@ -522,7 +524,7 @@ async def lead_new(): from ..credits import HEAT_CREDIT_COSTS credit_cost = HEAT_CREDIT_COSTS.get(heat_score, 8) - now = datetime.utcnow().isoformat() + now = utcnow_iso() verified_at = now if status != "pending_verification" else None lead_id = await execute( @@ -567,7 +569,7 @@ async def lead_forward(lead_id: int): await flash("Already forwarded to this supplier.", "warning") return redirect(url_for("admin.lead_detail", lead_id=lead_id)) - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( """INSERT INTO lead_forwards (lead_id, supplier_id, credit_cost, status, created_at) VALUES (?, ?, 0, 'sent', ?)""", @@ -771,7 +773,7 @@ async def supplier_new(): instagram_url = form.get("instagram_url", "").strip() youtube_url = form.get("youtube_url", "").strip() - now = datetime.utcnow().isoformat() + now = utcnow_iso() supplier_id = await execute( """INSERT INTO suppliers (name, slug, country_code, city, region, website, description, category, @@ -865,7 +867,7 @@ async def flag_toggle(): return redirect(url_for("admin.flags")) new_enabled = 0 if row["enabled"] else 1 - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE feature_flags SET enabled = ?, updated_at = ? WHERE name = ?", (new_enabled, now, flag_name), @@ -940,7 +942,7 @@ async def get_email_stats() -> dict: total = await fetch_one("SELECT COUNT(*) as cnt FROM email_log") delivered = await fetch_one("SELECT COUNT(*) as cnt FROM email_log WHERE last_event = 'delivered'") bounced = await fetch_one("SELECT COUNT(*) as cnt FROM email_log WHERE last_event = 'bounced'") - today = datetime.utcnow().date().isoformat() + today = utcnow().date().isoformat() sent_today = await fetch_one("SELECT COUNT(*) as cnt FROM email_log WHERE created_at >= ?", (today,)) return { "total": total["cnt"] if total else 0, @@ -1487,7 +1489,7 @@ async def scenario_edit(scenario_id: int): dbl = state.get("dblCourts", 0) sgl = state.get("sglCourts", 0) court_config = f"{dbl} double + {sgl} single" - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( """UPDATE published_scenarios @@ -1740,7 +1742,7 @@ async def article_new(): md_dir.mkdir(parents=True, exist_ok=True) (md_dir / f"{article_slug}.md").write_text(body) - pub_dt = published_at or datetime.utcnow().isoformat() + pub_dt = published_at or utcnow_iso() await execute( """INSERT INTO articles @@ -1800,7 +1802,7 @@ async def article_edit(article_id: int): md_dir.mkdir(parents=True, exist_ok=True) (md_dir / f"{article['slug']}.md").write_text(body) - now = datetime.utcnow().isoformat() + now = utcnow_iso() pub_dt = published_at or article["published_at"] await execute( @@ -1867,7 +1869,7 @@ async def article_publish(article_id: int): return redirect(url_for("admin.articles")) new_status = "published" if article["status"] == "draft" else "draft" - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE articles SET status = ?, updated_at = ? WHERE id = ?", (new_status, now, article_id), diff --git a/web/src/padelnomics/app.py b/web/src/padelnomics/app.py index 75c9f78..93f57ce 100644 --- a/web/src/padelnomics/app.py +++ b/web/src/padelnomics/app.py @@ -208,7 +208,7 @@ def create_app() -> Quart: @app.context_processor def inject_globals(): - from datetime import datetime + from .core import utcnow as _utcnow lang = g.get("lang") or _detect_lang() g.lang = lang # ensure g.lang is always set (e.g. for dashboard/billing routes) effective_lang = lang if lang in SUPPORTED_LANGS else "en" @@ -217,7 +217,7 @@ def create_app() -> Quart: "user": g.get("user"), "subscription": g.get("subscription"), "is_admin": "admin" in (g.get("user") or {}).get("roles", []), - "now": datetime.utcnow(), + "now": _utcnow(), "csrf_token": get_csrf_token, "ab_variant": getattr(g, "ab_variant", None), "ab_tag": getattr(g, "ab_tag", None), diff --git a/web/src/padelnomics/auth/routes.py b/web/src/padelnomics/auth/routes.py index 12a30d0..908ca9d 100644 --- a/web/src/padelnomics/auth/routes.py +++ b/web/src/padelnomics/auth/routes.py @@ -18,6 +18,8 @@ from ..core import ( fetch_one, is_disposable_email, is_flag_enabled, + utcnow, + utcnow_iso, ) from ..i18n import SUPPORTED_LANGS, get_translations @@ -64,7 +66,7 @@ async def get_user_by_email(email: str) -> dict | None: async def create_user(email: str) -> int: """Create new user, return ID.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() return await execute( "INSERT INTO users (email, created_at) VALUES (?, ?)", (email.lower(), now) ) @@ -82,10 +84,10 @@ async def update_user(user_id: int, **fields) -> None: async def create_auth_token(user_id: int, token: str, minutes: int = None) -> int: """Create auth token for user.""" minutes = minutes or config.MAGIC_LINK_EXPIRY_MINUTES - expires = datetime.utcnow() + timedelta(minutes=minutes) + expires = utcnow() + timedelta(minutes=minutes) return await execute( "INSERT INTO auth_tokens (user_id, token, expires_at) VALUES (?, ?, ?)", - (user_id, token, expires.isoformat()), + (user_id, token, expires.strftime("%Y-%m-%dT%H:%M:%S")), ) @@ -98,14 +100,14 @@ async def get_valid_token(token: str) -> dict | None: JOIN users u ON u.id = at.user_id WHERE at.token = ? AND at.expires_at > ? AND at.used_at IS NULL """, - (token, datetime.utcnow().isoformat()), + (token, utcnow_iso()), ) async def mark_token_used(token_id: int) -> None: """Mark token as used.""" await execute( - "UPDATE auth_tokens SET used_at = ? WHERE id = ?", (datetime.utcnow().isoformat(), token_id) + "UPDATE auth_tokens SET used_at = ? WHERE id = ?", (utcnow_iso(), token_id) ) @@ -331,7 +333,7 @@ async def verify(): await mark_token_used(token_data["id"]) # Update last login - await update_user(token_data["user_id"], last_login_at=datetime.utcnow().isoformat()) + await update_user(token_data["user_id"], last_login_at=utcnow_iso()) # Set session session.permanent = True diff --git a/web/src/padelnomics/billing/routes.py b/web/src/padelnomics/billing/routes.py index b22bdd2..c1c521d 100644 --- a/web/src/padelnomics/billing/routes.py +++ b/web/src/padelnomics/billing/routes.py @@ -5,7 +5,7 @@ Payment provider: paddle import json import secrets -from datetime import datetime +from datetime import datetime, timedelta from pathlib import Path from paddle_billing import Client as PaddleClient @@ -14,7 +14,7 @@ from paddle_billing.Notifications import Secret, Verifier from quart import Blueprint, flash, g, jsonify, redirect, render_template, request, session, url_for from ..auth.routes import login_required -from ..core import config, execute, fetch_one, get_paddle_price +from ..core import config, execute, fetch_one, get_paddle_price, utcnow, utcnow_iso from ..i18n import get_translations @@ -69,7 +69,7 @@ async def upsert_subscription( current_period_end: str = None, ) -> int: """Create or update subscription. Finds existing by provider_subscription_id.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() existing = await fetch_one( "SELECT id FROM subscriptions WHERE provider_subscription_id = ?", @@ -104,7 +104,7 @@ async def get_subscription_by_provider_id(subscription_id: str) -> dict | None: async def update_subscription_status(provider_subscription_id: str, status: str, **extra) -> None: """Update subscription status by provider subscription ID.""" - extra["updated_at"] = datetime.utcnow().isoformat() + extra["updated_at"] = utcnow_iso() extra["status"] = status sets = ", ".join(f"{k} = ?" for k in extra) values = list(extra.values()) @@ -343,7 +343,7 @@ async def _handle_supplier_subscription_activated(data: dict, custom_data: dict) base_plan, tier = _derive_tier_from_plan(plan) monthly_credits = PLAN_MONTHLY_CREDITS.get(base_plan, 0) - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db_transaction() as db: # Update supplier record — Basic tier also gets is_verified = 1 @@ -392,7 +392,7 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None: """Handle one-time transaction completion (credit packs, sticky boosts, business plan).""" supplier_id = custom_data.get("supplier_id") user_id = custom_data.get("user_id") - now = datetime.utcnow().isoformat() + now = utcnow_iso() items = data.get("items", []) for item in items: @@ -412,10 +412,8 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None: # Sticky boost purchases elif key == "boost_sticky_week" and supplier_id: - from datetime import timedelta - from ..core import transaction as db_transaction - expires = (datetime.utcnow() + timedelta(weeks=1)).isoformat() + expires = (utcnow() + timedelta(weeks=1)).strftime("%Y-%m-%dT%H:%M:%S") country = custom_data.get("sticky_country", "") async with db_transaction() as db: await db.execute( @@ -430,10 +428,8 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None: ) elif key == "boost_sticky_month" and supplier_id: - from datetime import timedelta - from ..core import transaction as db_transaction - expires = (datetime.utcnow() + timedelta(days=30)).isoformat() + expires = (utcnow() + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S") country = custom_data.get("sticky_country", "") async with db_transaction() as db: await db.execute( diff --git a/web/src/padelnomics/content/__init__.py b/web/src/padelnomics/content/__init__.py index 70395e4..618679b 100644 --- a/web/src/padelnomics/content/__init__.py +++ b/web/src/padelnomics/content/__init__.py @@ -15,7 +15,7 @@ import yaml from jinja2 import ChainableUndefined, Environment from ..analytics import fetch_analytics -from ..core import execute, fetch_one, slugify +from ..core import execute, fetch_one, slugify, utcnow_iso # ── Constants ──────────────────────────────────────────────────────────────── @@ -301,7 +301,7 @@ async def generate_articles( publish_date = start_date published_today = 0 generated = 0 - now_iso = datetime.now(UTC).isoformat() + now_iso = utcnow_iso() for row in rows: for lang in config["languages"]: diff --git a/web/src/padelnomics/credits.py b/web/src/padelnomics/credits.py index ac58f37..77943f1 100644 --- a/web/src/padelnomics/credits.py +++ b/web/src/padelnomics/credits.py @@ -5,9 +5,7 @@ All balance mutations go through this module to keep credit_ledger (source of tr and suppliers.credit_balance (denormalized cache) in sync within a single transaction. """ -from datetime import datetime - -from .core import execute, fetch_all, fetch_one, transaction +from .core import execute, fetch_all, fetch_one, transaction, utcnow_iso # Credit cost per heat tier HEAT_CREDIT_COSTS = {"hot": 35, "warm": 20, "cool": 8} @@ -44,7 +42,7 @@ async def add_credits( note: str = None, ) -> int: """Add credits to a supplier. Returns new balance.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with transaction() as db: row = await db.execute_fetchall( "SELECT credit_balance FROM suppliers WHERE id = ?", (supplier_id,) @@ -73,7 +71,7 @@ async def spend_credits( note: str = None, ) -> int: """Spend credits from a supplier. Returns new balance. Raises InsufficientCredits.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with transaction() as db: row = await db.execute_fetchall( "SELECT credit_balance FROM suppliers WHERE id = ?", (supplier_id,) @@ -116,7 +114,7 @@ async def unlock_lead(supplier_id: int, lead_id: int) -> dict: raise ValueError("Lead not found") cost = lead["credit_cost"] or compute_credit_cost(lead) - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with transaction() as db: # Check balance @@ -180,7 +178,7 @@ async def monthly_credit_refill(supplier_id: int) -> int: if not row or not row["monthly_credits"]: return 0 - now = datetime.utcnow().isoformat() + now = utcnow_iso() new_balance = await add_credits( supplier_id, row["monthly_credits"], diff --git a/web/src/padelnomics/dashboard/routes.py b/web/src/padelnomics/dashboard/routes.py index 89fd001..6afb375 100644 --- a/web/src/padelnomics/dashboard/routes.py +++ b/web/src/padelnomics/dashboard/routes.py @@ -1,13 +1,12 @@ """ Dashboard domain: user dashboard and settings. """ -from datetime import datetime from pathlib import Path from quart import Blueprint, flash, g, redirect, render_template, request, url_for from ..auth.routes import login_required, update_user -from ..core import csrf_protect, fetch_one, soft_delete +from ..core import csrf_protect, fetch_one, soft_delete, utcnow_iso from ..i18n import get_translations bp = Blueprint( @@ -57,7 +56,7 @@ async def settings(): await update_user( g.user["id"], name=form.get("name", "").strip() or None, - updated_at=datetime.utcnow().isoformat(), + updated_at=utcnow_iso(), ) t = get_translations(g.get("lang") or "en") await flash(t["dash_settings_saved"], "success") diff --git a/web/src/padelnomics/directory/routes.py b/web/src/padelnomics/directory/routes.py index f0d3f59..00caa44 100644 --- a/web/src/padelnomics/directory/routes.py +++ b/web/src/padelnomics/directory/routes.py @@ -2,12 +2,11 @@ Supplier directory: public, searchable listing of padel court suppliers. """ -from datetime import UTC, datetime from pathlib import Path from quart import Blueprint, g, make_response, redirect, render_template, request, url_for -from ..core import csrf_protect, execute, fetch_all, fetch_one +from ..core import csrf_protect, execute, fetch_all, fetch_one, utcnow_iso from ..i18n import get_translations bp = Blueprint( @@ -89,7 +88,7 @@ async def _build_directory_query(q, country, category, region, page, per_page=24 lang = g.get("lang", "en") cat_labels, country_labels, region_labels = get_directory_labels(lang) - now = datetime.now(UTC).isoformat() + now = utcnow_iso() params: list = [] wheres: list[str] = [] diff --git a/web/src/padelnomics/leads/routes.py b/web/src/padelnomics/leads/routes.py index ba73d8c..a814d8e 100644 --- a/web/src/padelnomics/leads/routes.py +++ b/web/src/padelnomics/leads/routes.py @@ -27,6 +27,7 @@ from ..core import ( is_disposable_email, is_plausible_phone, send_email, + utcnow_iso, ) from ..i18n import get_translations @@ -102,7 +103,7 @@ async def suppliers(): form.get("court_count", 0), form.get("budget", 0), form.get("message", ""), - datetime.utcnow().isoformat(), + utcnow_iso(), ), ) # Notify admin @@ -147,7 +148,7 @@ async def financing(): form.get("court_count", 0), form.get("budget", 0), form.get("message", ""), - datetime.utcnow().isoformat(), + utcnow_iso(), ), ) await send_email( @@ -375,7 +376,7 @@ async def quote_request(): status, credit_cost, secrets.token_urlsafe(16), - datetime.utcnow().isoformat(), + utcnow_iso(), ), ) @@ -520,7 +521,7 @@ async def verify_quote(): from ..credits import compute_credit_cost credit_cost = compute_credit_cost(dict(lead)) - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE lead_requests SET status = 'new', verified_at = ?, credit_cost = ? WHERE id = ?", (now, credit_cost, lead["id"]), diff --git a/web/src/padelnomics/planner/routes.py b/web/src/padelnomics/planner/routes.py index b5f6247..4a076e5 100644 --- a/web/src/padelnomics/planner/routes.py +++ b/web/src/padelnomics/planner/routes.py @@ -4,7 +4,6 @@ Planner domain: padel court financial planner + scenario management. import json import math -from datetime import datetime from pathlib import Path from quart import Blueprint, Response, g, jsonify, render_template, request @@ -18,6 +17,7 @@ from ..core import ( fetch_all, fetch_one, get_paddle_price, + utcnow_iso, ) from ..i18n import get_translations from .calculator import COUNTRY_CURRENCY, CURRENCY_DEFAULT, calc, validate_state @@ -502,7 +502,7 @@ async def save_scenario(): location = form.get("location", "") scenario_id = form.get("scenario_id") - now = datetime.utcnow().isoformat() + now = utcnow_iso() is_first_save = not scenario_id and (await count_scenarios(g.user["id"])) == 0 @@ -563,7 +563,7 @@ async def get_scenario(scenario_id: int): @login_required @csrf_protect async def delete_scenario(scenario_id: int): - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE scenarios SET deleted_at = ? WHERE id = ? AND user_id = ? AND deleted_at IS NULL", (now, scenario_id, g.user["id"]), diff --git a/web/src/padelnomics/scripts/seed_content.py b/web/src/padelnomics/scripts/seed_content.py index 23ee2f7..2b3090d 100644 --- a/web/src/padelnomics/scripts/seed_content.py +++ b/web/src/padelnomics/scripts/seed_content.py @@ -18,7 +18,7 @@ import json import os import sqlite3 import sys -from datetime import date, timedelta +from datetime import UTC, date, datetime, timedelta from pathlib import Path from dotenv import load_dotenv @@ -1390,7 +1390,7 @@ def seed_templates(conn: sqlite3.Connection) -> dict[str, int]: def seed_data_rows(conn: sqlite3.Connection, template_ids: dict[str, int]) -> int: """Insert template_data rows for all cities × languages. Returns count inserted.""" - now = __import__("datetime").datetime.utcnow().isoformat() + now = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S") inserted = 0 en_id = template_ids.get("city-padel-cost-en") diff --git a/web/src/padelnomics/scripts/seed_dev_data.py b/web/src/padelnomics/scripts/seed_dev_data.py index 96dfe37..69b2830 100644 --- a/web/src/padelnomics/scripts/seed_dev_data.py +++ b/web/src/padelnomics/scripts/seed_dev_data.py @@ -10,7 +10,7 @@ Usage: import os import sqlite3 import sys -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from pathlib import Path from dotenv import load_dotenv @@ -292,7 +292,7 @@ def main(): conn.execute("PRAGMA foreign_keys=ON") conn.row_factory = sqlite3.Row - now = datetime.utcnow() + now = datetime.now(UTC) # 1. Create dev user print("Creating dev user (dev@localhost)...") @@ -303,7 +303,7 @@ def main(): else: cursor = conn.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", - ("dev@localhost", "Dev User", now.isoformat()), + ("dev@localhost", "Dev User", now.strftime("%Y-%m-%dT%H:%M:%S")), ) dev_user_id = cursor.lastrowid print(f" Created (id={dev_user_id})") @@ -336,7 +336,7 @@ def main(): s["website"], s["description"], s["category"], s["tier"], s["credit_balance"], s["monthly_credits"], s["contact_name"], s["contact_email"], s["years_in_business"], s["project_count"], - s["service_area"], now.isoformat(), + s["service_area"], now.strftime("%Y-%m-%dT%H:%M:%S"), ), ) supplier_ids[s["slug"]] = cursor.lastrowid @@ -349,7 +349,7 @@ def main(): ("courtbuild-spain", "supplier_growth", "maria@courtbuild.example.com", "Maria Garcia"), ("desert-padel-fze", "supplier_pro", "ahmed@desertpadel.example.com", "Ahmed Al-Rashid"), ] - period_end = (now + timedelta(days=30)).isoformat() + period_end = (now + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S") for slug, plan, email, name in claimed_suppliers: sid = supplier_ids.get(slug) if not sid: @@ -364,14 +364,14 @@ def main(): else: cursor = conn.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", - (email, name, now.isoformat()), + (email, name, now.strftime("%Y-%m-%dT%H:%M:%S")), ) owner_id = cursor.lastrowid # Claim the supplier conn.execute( "UPDATE suppliers SET claimed_by = ?, claimed_at = ? WHERE id = ? AND claimed_by IS NULL", - (owner_id, now.isoformat(), sid), + (owner_id, now.strftime("%Y-%m-%dT%H:%M:%S"), sid), ) # Create billing customer record @@ -382,7 +382,7 @@ def main(): conn.execute( """INSERT INTO billing_customers (user_id, provider_customer_id, created_at) VALUES (?, ?, ?)""", - (owner_id, f"ctm_dev_{slug}", now.isoformat()), + (owner_id, f"ctm_dev_{slug}", now.strftime("%Y-%m-%dT%H:%M:%S")), ) # Create active subscription @@ -396,7 +396,7 @@ def main(): current_period_end, created_at) VALUES (?, ?, 'active', ?, ?, ?)""", (owner_id, plan, f"sub_dev_{slug}", - period_end, now.isoformat()), + period_end, now.strftime("%Y-%m-%dT%H:%M:%S")), ) print(f" {slug} -> owner {email} ({plan})") diff --git a/web/src/padelnomics/seo/_bing.py b/web/src/padelnomics/seo/_bing.py index 5a76446..9a5cb74 100644 --- a/web/src/padelnomics/seo/_bing.py +++ b/web/src/padelnomics/seo/_bing.py @@ -3,12 +3,12 @@ Uses an API key for auth. Fetches query stats and page stats. """ -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from urllib.parse import urlparse import httpx -from ..core import config, execute +from ..core import config, execute, utcnow, utcnow_iso _TIMEOUT_SECONDS = 30 @@ -27,7 +27,7 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) if not config.BING_WEBMASTER_API_KEY or not config.BING_SITE_URL: return 0 # Bing not configured — skip silently - started_at = datetime.utcnow() + started_at = utcnow() try: rows_synced = 0 @@ -48,14 +48,14 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) if not isinstance(entries, list): entries = [] - cutoff = datetime.utcnow() - timedelta(days=days_back) + cutoff = utcnow() - timedelta(days=days_back) for entry in entries: # Bing date format: "/Date(1708905600000)/" (ms since epoch) date_str = entry.get("Date", "") if "/Date(" in date_str: ms = int(date_str.split("(")[1].split(")")[0]) - entry_date = datetime.utcfromtimestamp(ms / 1000) + entry_date = datetime.fromtimestamp(ms / 1000, tz=UTC) else: continue @@ -99,7 +99,7 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) date_str = entry.get("Date", "") if "/Date(" in date_str: ms = int(date_str.split("(")[1].split(")")[0]) - entry_date = datetime.utcfromtimestamp(ms / 1000) + entry_date = datetime.fromtimestamp(ms / 1000, tz=UTC) else: continue @@ -122,21 +122,21 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) ) rows_synced += 1 - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('bing', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced except Exception as exc: - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('bing', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/seo/_gsc.py b/web/src/padelnomics/seo/_gsc.py index 83fa70e..7c5ee86 100644 --- a/web/src/padelnomics/seo/_gsc.py +++ b/web/src/padelnomics/seo/_gsc.py @@ -9,7 +9,7 @@ from datetime import datetime, timedelta from pathlib import Path from urllib.parse import urlparse -from ..core import config, execute +from ..core import config, execute, utcnow, utcnow_iso # GSC returns max 25K rows per request _ROWS_PER_PAGE = 25_000 @@ -95,11 +95,11 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int: if not config.GSC_SERVICE_ACCOUNT_PATH or not config.GSC_SITE_URL: return 0 # GSC not configured — skip silently - started_at = datetime.utcnow() + started_at = utcnow() # GSC has ~2 day delay; fetch from days_back ago to 2 days ago - end_date = (datetime.utcnow() - timedelta(days=2)).strftime("%Y-%m-%d") - start_date = (datetime.utcnow() - timedelta(days=days_back + 2)).strftime("%Y-%m-%d") + end_date = (utcnow() - timedelta(days=2)).strftime("%Y-%m-%d") + start_date = (utcnow() - timedelta(days=days_back + 2)).strftime("%Y-%m-%d") try: rows = await asyncio.to_thread( @@ -122,21 +122,21 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int: ) rows_synced += 1 - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('gsc', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced except Exception as exc: - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('gsc', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/seo/_queries.py b/web/src/padelnomics/seo/_queries.py index 94434c0..c12820b 100644 --- a/web/src/padelnomics/seo/_queries.py +++ b/web/src/padelnomics/seo/_queries.py @@ -4,14 +4,14 @@ All heavy lifting happens in SQL. Functions accept filter parameters and return plain dicts/lists. """ -from datetime import datetime, timedelta +from datetime import timedelta -from ..core import execute, fetch_all, fetch_one +from ..core import execute, fetch_all, fetch_one, utcnow def _date_cutoff(date_range_days: int) -> str: """Return ISO date string for N days ago.""" - return (datetime.utcnow() - timedelta(days=date_range_days)).strftime("%Y-%m-%d") + return (utcnow() - timedelta(days=date_range_days)).strftime("%Y-%m-%d") async def get_search_performance( diff --git a/web/src/padelnomics/seo/_umami.py b/web/src/padelnomics/seo/_umami.py index c35f357..cccbb8a 100644 --- a/web/src/padelnomics/seo/_umami.py +++ b/web/src/padelnomics/seo/_umami.py @@ -8,7 +8,7 @@ from datetime import datetime, timedelta import httpx -from ..core import config, execute +from ..core import config, execute, utcnow, utcnow_iso _TIMEOUT_SECONDS = 15 @@ -21,7 +21,7 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS if not config.UMAMI_API_TOKEN or not config.UMAMI_API_URL: return 0 # Umami not configured — skip silently - started_at = datetime.utcnow() + started_at = utcnow() try: rows_synced = 0 @@ -34,7 +34,7 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS # (Umami's metrics endpoint returns totals for the period, # so we query one day at a time for daily granularity) for day_offset in range(days_back): - day = datetime.utcnow() - timedelta(days=day_offset + 1) + day = utcnow() - timedelta(days=day_offset + 1) metric_date = day.strftime("%Y-%m-%d") start_ms = int(day.replace(hour=0, minute=0, second=0).timestamp() * 1000) end_ms = int(day.replace(hour=23, minute=59, second=59).timestamp() * 1000) @@ -96,21 +96,21 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS (metric_date, page_count, visitors, br, avg_time), ) - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('umami', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced except Exception as exc: - duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000) + duration_ms = int((utcnow() - started_at).total_seconds() * 1000) await execute( """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('umami', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/webhooks.py b/web/src/padelnomics/webhooks.py index 6d3821c..3196adb 100644 --- a/web/src/padelnomics/webhooks.py +++ b/web/src/padelnomics/webhooks.py @@ -5,12 +5,10 @@ NOT behind @role_required: Resend posts here unauthenticated. Verification uses RESEND_WEBHOOK_SECRET via the Resend SDK. """ -from datetime import datetime - import resend from quart import Blueprint, jsonify, request -from .core import config, execute +from .core import config, execute, utcnow_iso bp = Blueprint("webhooks", __name__, url_prefix="/webhooks") @@ -67,7 +65,7 @@ async def _handle_delivery_event(event_type: str, data: dict) -> None: return last_event, ts_col = _EVENT_UPDATES[event_type] - now = datetime.utcnow().isoformat() + now = utcnow_iso() if ts_col: await execute( @@ -87,7 +85,7 @@ async def _handle_inbound(data: dict) -> None: if not resend_id: return - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( """INSERT OR IGNORE INTO inbound_emails (resend_id, message_id, in_reply_to, from_addr, to_addr, diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index 0681b25..e8f8b8d 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -7,7 +7,17 @@ import json import traceback from datetime import datetime, timedelta -from .core import EMAIL_ADDRESSES, config, execute, fetch_all, fetch_one, init_db, send_email +from .core import ( + EMAIL_ADDRESSES, + config, + execute, + fetch_all, + fetch_one, + init_db, + send_email, + utcnow, + utcnow_iso, +) from .i18n import get_translations # Task handlers registry @@ -29,7 +39,7 @@ def _email_wrap(body: str, lang: str = "en", preheader: str = "") -> str: preheader: hidden preview text shown in email client list views. """ - year = datetime.utcnow().year + year = utcnow().year tagline = _t("email_footer_tagline", lang) copyright_text = _t("email_footer_copyright", lang, year=year, app_name=config.APP_NAME) # Hidden preheader trick: visible text + invisible padding to prevent @@ -132,15 +142,15 @@ async def enqueue(task_name: str, payload: dict = None, run_at: datetime = None) ( task_name, json.dumps(payload or {}), - (run_at or datetime.utcnow()).isoformat(), - datetime.utcnow().isoformat(), + (run_at or utcnow()).strftime("%Y-%m-%dT%H:%M:%S"), + utcnow_iso(), ), ) async def get_pending_tasks(limit: int = 10) -> list[dict]: """Get pending tasks ready to run.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() return await fetch_all( """ SELECT * FROM tasks @@ -156,7 +166,7 @@ async def mark_complete(task_id: int) -> None: """Mark task as completed.""" await execute( "UPDATE tasks SET status = 'complete', completed_at = ? WHERE id = ?", - (datetime.utcnow().isoformat(), task_id), + (utcnow_iso(), task_id), ) @@ -167,7 +177,7 @@ async def mark_failed(task_id: int, error: str, retries: int) -> None: if retries < max_retries: # Exponential backoff: 1min, 5min, 25min delay = timedelta(minutes=5**retries) - run_at = datetime.utcnow() + delay + run_at = utcnow() + delay await execute( """ @@ -385,13 +395,13 @@ async def handle_send_waitlist_confirmation(payload: dict) -> None: @task("cleanup_expired_tokens") async def handle_cleanup_tokens(payload: dict) -> None: """Clean up expired auth tokens.""" - await execute("DELETE FROM auth_tokens WHERE expires_at < ?", (datetime.utcnow().isoformat(),)) + await execute("DELETE FROM auth_tokens WHERE expires_at < ?", (utcnow_iso(),)) @task("cleanup_rate_limits") async def handle_cleanup_rate_limits(payload: dict) -> None: """Clean up old rate limit entries.""" - cutoff = (datetime.utcnow() - timedelta(hours=1)).isoformat() + cutoff = (utcnow() - timedelta(hours=1)).strftime("%Y-%m-%dT%H:%M:%S") await execute("DELETE FROM rate_limits WHERE timestamp < ?", (cutoff,)) @@ -497,7 +507,7 @@ async def handle_send_lead_forward_email(payload: dict) -> None: ) # Update email_sent_at on lead_forward - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE lead_forwards SET email_sent_at = ? WHERE lead_id = ? AND supplier_id = ?", (now, lead_id, supplier_id), @@ -621,7 +631,7 @@ async def handle_generate_business_plan(payload: dict) -> None: file_path.write_bytes(pdf_bytes) # Update record - now = datetime.utcnow().isoformat() + now = utcnow_iso() await execute( "UPDATE business_plan_exports SET status = 'ready', file_path = ?, completed_at = ? WHERE id = ?", (str(file_path), now, export_id), @@ -664,7 +674,7 @@ async def handle_generate_business_plan(payload: dict) -> None: @task("cleanup_old_tasks") async def handle_cleanup_tasks(payload: dict) -> None: """Clean up completed/failed tasks older than 7 days.""" - cutoff = (datetime.utcnow() - timedelta(days=7)).isoformat() + cutoff = (utcnow() - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S") await execute( "DELETE FROM tasks WHERE status IN ('complete', 'failed') AND created_at < ?", (cutoff,) ) @@ -791,9 +801,7 @@ async def run_scheduler() -> None: await enqueue("cleanup_old_tasks") # Monthly credit refill — run on the 1st of each month - from datetime import datetime - - today = datetime.utcnow() + today = utcnow() this_month = f"{today.year}-{today.month:02d}" if today.day == 1 and last_credit_refill != this_month: await enqueue("refill_monthly_credits") From a05c230ce39b67af6313d75363d6832b1ed54a9a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:24:16 +0100 Subject: [PATCH 12/98] fix(tests): replace datetime.utcnow() with utcnow_iso() in test files Also fixes test_supplier_webhooks.py fromisoformat() comparisons: expires (naive, from DB) now compared against datetime.now(UTC).replace(tzinfo=None) to avoid mixing naive/aware datetimes. Co-Authored-By: Claude Sonnet 4.6 --- web/tests/test_content.py | 7 ++++--- web/tests/test_credits.py | 12 ++++++------ web/tests/test_feature_flags.py | 5 +++-- web/tests/test_seo.py | 16 +++++++++------- web/tests/test_supplier_webhooks.py | 14 ++++++++------ 5 files changed, 30 insertions(+), 24 deletions(-) diff --git a/web/tests/test_content.py b/web/tests/test_content.py index 015b3ff..4e5cbcf 100644 --- a/web/tests/test_content.py +++ b/web/tests/test_content.py @@ -9,6 +9,8 @@ import importlib import json import sqlite3 from datetime import date, datetime + +from padelnomics.core import utcnow_iso from pathlib import Path import pytest @@ -70,7 +72,7 @@ async def _create_published_scenario(slug="test-scenario", city="TestCity", coun async def _create_article(slug="test-article", url_path="/test-article", status="published", published_at=None): """Insert an article row, return its id.""" - pub = published_at or datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + pub = published_at or utcnow_iso() return await execute( """INSERT INTO articles (url_path, slug, title, meta_description, country, region, @@ -936,8 +938,7 @@ class TestRouteRegistration: @pytest.fixture async def admin_client(app, db): """Test client with admin user (has admin role).""" - from datetime import datetime - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", ("admin@test.com", "Admin", now), diff --git a/web/tests/test_credits.py b/web/tests/test_credits.py index 054dfbe..5599a7c 100644 --- a/web/tests/test_credits.py +++ b/web/tests/test_credits.py @@ -3,7 +3,7 @@ Tests for the credit system (credits.py). Pure SQL operations against real in-memory SQLite — no mocking needed. """ -from datetime import datetime +from padelnomics.core import utcnow_iso import pytest from padelnomics.credits import ( @@ -24,7 +24,7 @@ from padelnomics.credits import ( @pytest.fixture async def supplier(db): """Supplier with credit_balance=100, monthly_credits=30, tier=growth.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO suppliers (name, slug, country_code, region, category, tier, @@ -41,7 +41,7 @@ async def supplier(db): @pytest.fixture async def lead(db): """Lead request with heat_score=warm, credit_cost=20.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO lead_requests (lead_type, heat_score, credit_cost, status, created_at) @@ -154,7 +154,7 @@ class TestAlreadyUnlocked: assert await already_unlocked(supplier["id"], lead["id"]) is False async def test_returns_true_after_unlock(self, db, supplier, lead): - now = datetime.utcnow().isoformat() + now = utcnow_iso() await db.execute( """INSERT INTO lead_forwards (lead_id, supplier_id, credit_cost, created_at) VALUES (?, ?, 20, ?)""", @@ -210,7 +210,7 @@ class TestUnlockLead: async def test_raises_insufficient_credits(self, db, lead): """Supplier with only 5 credits tries to unlock a 20-credit lead.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO suppliers (name, slug, country_code, region, category, tier, @@ -247,7 +247,7 @@ class TestMonthlyRefill: async def test_noop_when_no_monthly_credits(self, db): """Supplier with monthly_credits=0 gets no refill.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO suppliers (name, slug, country_code, region, category, tier, diff --git a/web/tests/test_feature_flags.py b/web/tests/test_feature_flags.py index 4ab8768..b467c6b 100644 --- a/web/tests/test_feature_flags.py +++ b/web/tests/test_feature_flags.py @@ -7,7 +7,8 @@ Integration tests exercise full request/response flows via Quart test client. """ import sqlite3 -from datetime import datetime + +from padelnomics.core import utcnow_iso from pathlib import Path from unittest.mock import AsyncMock, patch @@ -30,7 +31,7 @@ def mock_csrf_validation(): @pytest.fixture async def admin_client(app, db): """Test client with an admin-role user session (module-level, follows test_content.py).""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", ("flags_admin@test.com", "Flags Admin", now), diff --git a/web/tests/test_seo.py b/web/tests/test_seo.py index 708ee51..64dc5b6 100644 --- a/web/tests/test_seo.py +++ b/web/tests/test_seo.py @@ -1,6 +1,8 @@ """Tests for the SEO metrics module: queries, sync functions, admin routes.""" -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta + +from padelnomics.core import utcnow_iso from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -21,11 +23,11 @@ from padelnomics import core # ── Fixtures ────────────────────────────────────────────────── def _today(): - return datetime.utcnow().strftime("%Y-%m-%d") + return datetime.now(UTC).strftime("%Y-%m-%d") def _days_ago(n: int) -> str: - return (datetime.utcnow() - timedelta(days=n)).strftime("%Y-%m-%d") + return (datetime.now(UTC) - timedelta(days=n)).strftime("%Y-%m-%d") @pytest.fixture @@ -72,7 +74,7 @@ async def seo_data(db): @pytest.fixture async def articles_data(db, seo_data): """Create articles that match the SEO data URLs.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() pub = _days_ago(10) for title, url, tpl, lang in [ @@ -91,7 +93,7 @@ async def articles_data(db, seo_data): @pytest.fixture async def admin_client(app, db): """Authenticated admin client.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", ("admin@test.com", "Admin", now), @@ -258,7 +260,7 @@ class TestSyncStatus: """Tests for get_sync_status().""" async def test_returns_last_sync_per_source(self, db): - now = datetime.utcnow().isoformat() + now = utcnow_iso() await db.execute( """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('gsc', 'success', 100, ?, ?, 500)""", @@ -286,7 +288,7 @@ class TestCleanupOldMetrics: """Tests for cleanup_old_metrics().""" async def test_deletes_old_data(self, db): - old_date = (datetime.utcnow() - timedelta(days=400)).strftime("%Y-%m-%d") + old_date = (datetime.now(UTC) - timedelta(days=400)).strftime("%Y-%m-%d") recent_date = _today() await db.execute( diff --git a/web/tests/test_supplier_webhooks.py b/web/tests/test_supplier_webhooks.py index 2eba57c..2f1d1da 100644 --- a/web/tests/test_supplier_webhooks.py +++ b/web/tests/test_supplier_webhooks.py @@ -5,7 +5,9 @@ POST real webhook payloads to /billing/webhook/paddle and verify DB state. Uses the existing client, db, sign_payload from conftest. """ import json -from datetime import datetime +from datetime import UTC, datetime + +from padelnomics.core import utcnow_iso from unittest.mock import AsyncMock, patch import pytest @@ -21,7 +23,7 @@ SIG_HEADER = "Paddle-Signature" @pytest.fixture async def supplier(db): """Supplier with tier=free, credit_balance=0.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO suppliers (name, slug, country_code, region, category, tier, @@ -38,7 +40,7 @@ async def supplier(db): @pytest.fixture async def paddle_products(db): """Insert paddle_products rows for all keys the handlers need.""" - now = datetime.utcnow().isoformat() + now = utcnow_iso() products = [ ("credits_25", "pri_credits25", "Credit Pack 25", 999, "one_time"), ("credits_100", "pri_credits100", "Credit Pack 100", 3290, "one_time"), @@ -175,7 +177,7 @@ class TestStickyBoostPurchase: assert boosts[0][1] == "active" # expires_at should be ~7 days from now expires = datetime.fromisoformat(boosts[0][2]) - assert abs((expires - datetime.utcnow()).days - 7) <= 1 + assert abs((expires - datetime.now(UTC).replace(tzinfo=None)).days - 7) <= 1 # Verify sticky_until set on supplier sup = await db.execute_fetchall( @@ -202,7 +204,7 @@ class TestStickyBoostPurchase: assert len(boosts) == 1 assert boosts[0][0] == "sticky_month" expires = datetime.fromisoformat(boosts[0][1]) - assert abs((expires - datetime.utcnow()).days - 30) <= 1 + assert abs((expires - datetime.now(UTC).replace(tzinfo=None)).days - 30) <= 1 async def test_sticky_boost_sets_country(self, client, db, supplier, paddle_products): payload = make_transaction_payload( @@ -387,7 +389,7 @@ class TestBusinessPlanPurchase: self, client, db, supplier, paddle_products, test_user, ): # Need a scenario for the export - now = datetime.utcnow().isoformat() + now = utcnow_iso() async with db.execute( """INSERT INTO scenarios (user_id, name, state_json, created_at) VALUES (?, 'Test Scenario', '{}', ?)""", From 6bd92c69ced018b5868b35562d65a8b5a2c0a38f Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:24:53 +0100 Subject: [PATCH 13/98] fix(admin): use task_name column (not task_type) in _is_generating query Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index c20a49f..03ef84f 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1722,7 +1722,7 @@ async def _get_article_stats() -> dict: async def _is_generating() -> bool: """Return True if a generate_articles task is currently pending.""" row = await fetch_one( - "SELECT COUNT(*) AS cnt FROM tasks WHERE task_type = 'generate_articles' AND status = 'pending'" + "SELECT COUNT(*) AS cnt FROM tasks WHERE task_name = 'generate_articles' AND status = 'pending'" ) return bool(row and row["cnt"] > 0) From e33b28025e18c56764fb741a702d55f02b1692f4 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:30:18 +0100 Subject: [PATCH 14/98] fix: use SQLite-compatible space format in utcnow_iso(), fix credits ordering utcnow_iso() now produces 'YYYY-MM-DD HH:MM:SS' (space separator) matching SQLite's datetime('now') so lexicographic comparisons like 'published_at <= datetime(now)' work correctly. Also add `id DESC` tiebreaker to get_ledger() ORDER BY to preserve insertion order when multiple credits are added within the same second. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 4 ++-- web/src/padelnomics/auth/routes.py | 4 ++-- web/src/padelnomics/billing/routes.py | 6 +++--- web/src/padelnomics/content/__init__.py | 2 +- web/src/padelnomics/core.py | 14 +++++++------- web/src/padelnomics/credits.py | 2 +- web/src/padelnomics/leads/routes.py | 1 - web/src/padelnomics/scripts/seed_content.py | 2 +- web/src/padelnomics/scripts/seed_dev_data.py | 14 +++++++------- web/src/padelnomics/seo/_bing.py | 4 ++-- web/src/padelnomics/seo/_gsc.py | 6 +++--- web/src/padelnomics/seo/_umami.py | 6 +++--- web/src/padelnomics/suppliers/routes.py | 2 +- web/src/padelnomics/worker.py | 6 +++--- web/tests/test_content.py | 6 ++---- web/tests/test_credits.py | 3 +-- web/tests/test_feature_flags.py | 10 ++++------ web/tests/test_seo.py | 3 +-- web/tests/test_supervisor.py | 10 +++------- web/tests/test_supplier_webhooks.py | 3 +-- 20 files changed, 48 insertions(+), 60 deletions(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 47913d6..c8ec9d0 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -2,7 +2,7 @@ Admin domain: role-based admin panel for managing users, tasks, etc. """ import json -from datetime import date, datetime, timedelta +from datetime import date, timedelta from pathlib import Path import mistune @@ -68,7 +68,7 @@ async def get_dashboard_stats() -> dict: """Get admin dashboard statistics.""" now = utcnow() today = now.date().isoformat() - week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S") + week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%d %H:%M:%S") users_total = await fetch_one("SELECT COUNT(*) as count FROM users WHERE deleted_at IS NULL") users_today = await fetch_one( "SELECT COUNT(*) as count FROM users WHERE created_at >= ? AND deleted_at IS NULL", diff --git a/web/src/padelnomics/auth/routes.py b/web/src/padelnomics/auth/routes.py index 908ca9d..5c39f73 100644 --- a/web/src/padelnomics/auth/routes.py +++ b/web/src/padelnomics/auth/routes.py @@ -3,7 +3,7 @@ Auth domain: magic link authentication, user management, decorators. """ import secrets -from datetime import datetime, timedelta +from datetime import timedelta from functools import wraps from pathlib import Path @@ -87,7 +87,7 @@ async def create_auth_token(user_id: int, token: str, minutes: int = None) -> in expires = utcnow() + timedelta(minutes=minutes) return await execute( "INSERT INTO auth_tokens (user_id, token, expires_at) VALUES (?, ?, ?)", - (user_id, token, expires.strftime("%Y-%m-%dT%H:%M:%S")), + (user_id, token, expires.strftime("%Y-%m-%d %H:%M:%S")), ) diff --git a/web/src/padelnomics/billing/routes.py b/web/src/padelnomics/billing/routes.py index c1c521d..5cb2f15 100644 --- a/web/src/padelnomics/billing/routes.py +++ b/web/src/padelnomics/billing/routes.py @@ -5,7 +5,7 @@ Payment provider: paddle import json import secrets -from datetime import datetime, timedelta +from datetime import timedelta from pathlib import Path from paddle_billing import Client as PaddleClient @@ -413,7 +413,7 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None: # Sticky boost purchases elif key == "boost_sticky_week" and supplier_id: from ..core import transaction as db_transaction - expires = (utcnow() + timedelta(weeks=1)).strftime("%Y-%m-%dT%H:%M:%S") + expires = (utcnow() + timedelta(weeks=1)).strftime("%Y-%m-%d %H:%M:%S") country = custom_data.get("sticky_country", "") async with db_transaction() as db: await db.execute( @@ -429,7 +429,7 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None: elif key == "boost_sticky_month" and supplier_id: from ..core import transaction as db_transaction - expires = (utcnow() + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S") + expires = (utcnow() + timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S") country = custom_data.get("sticky_country", "") async with db_transaction() as db: await db.execute( diff --git a/web/src/padelnomics/content/__init__.py b/web/src/padelnomics/content/__init__.py index 618679b..33cbeb9 100644 --- a/web/src/padelnomics/content/__init__.py +++ b/web/src/padelnomics/content/__init__.py @@ -135,7 +135,7 @@ def _validate_table_name(data_table: str) -> None: def _datetimeformat(value: str, fmt: str = "%Y-%m-%d") -> str: """Jinja2 filter: format a date string (or 'now') with strftime.""" - from datetime import UTC, datetime + from datetime import datetime if value == "now": dt = datetime.now(UTC) diff --git a/web/src/padelnomics/core.py b/web/src/padelnomics/core.py index c762d5f..9f0895d 100644 --- a/web/src/padelnomics/core.py +++ b/web/src/padelnomics/core.py @@ -102,10 +102,10 @@ def utcnow() -> datetime: def utcnow_iso() -> str: """UTC now as naive ISO string for SQLite TEXT columns. - Produces YYYY-MM-DDTHH:MM:SS (no +00:00 suffix) to match the existing - format stored in the DB so lexicographic SQL comparisons keep working. + Produces YYYY-MM-DD HH:MM:SS (space separator, no +00:00 suffix) to match + SQLite's native datetime('now') format so lexicographic SQL comparisons work. """ - return datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S") + return datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S") # ============================================================================= @@ -554,12 +554,12 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t # Clean old entries and count recent await execute( "DELETE FROM rate_limits WHERE key = ? AND timestamp < ?", - (key, window_start.strftime("%Y-%m-%dT%H:%M:%S")), + (key, window_start.strftime("%Y-%m-%d %H:%M:%S")), ) result = await fetch_one( "SELECT COUNT(*) as count FROM rate_limits WHERE key = ? AND timestamp > ?", - (key, window_start.strftime("%Y-%m-%dT%H:%M:%S")), + (key, window_start.strftime("%Y-%m-%d %H:%M:%S")), ) count = result["count"] if result else 0 @@ -575,7 +575,7 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t # Record this request await execute( "INSERT INTO rate_limits (key, timestamp) VALUES (?, ?)", - (key, now.strftime("%Y-%m-%dT%H:%M:%S")), + (key, now.strftime("%Y-%m-%d %H:%M:%S")), ) return True, info @@ -671,7 +671,7 @@ async def hard_delete(table: str, id: int) -> bool: async def purge_deleted(table: str, days: int = 30) -> int: """Purge records deleted more than X days ago.""" - cutoff = (utcnow() - timedelta(days=days)).strftime("%Y-%m-%dT%H:%M:%S") + cutoff = (utcnow() - timedelta(days=days)).strftime("%Y-%m-%d %H:%M:%S") return await execute( f"DELETE FROM {table} WHERE deleted_at IS NOT NULL AND deleted_at < ?", (cutoff,) ) diff --git a/web/src/padelnomics/credits.py b/web/src/padelnomics/credits.py index 77943f1..fa8c855 100644 --- a/web/src/padelnomics/credits.py +++ b/web/src/padelnomics/credits.py @@ -199,6 +199,6 @@ async def get_ledger(supplier_id: int, limit: int = 50) -> list[dict]: FROM credit_ledger cl LEFT JOIN lead_forwards lf ON cl.reference_id = lf.id AND cl.event_type = 'lead_unlock' WHERE cl.supplier_id = ? - ORDER BY cl.created_at DESC LIMIT ?""", + ORDER BY cl.created_at DESC, cl.id DESC LIMIT ?""", (supplier_id, limit), ) diff --git a/web/src/padelnomics/leads/routes.py b/web/src/padelnomics/leads/routes.py index a814d8e..265fc74 100644 --- a/web/src/padelnomics/leads/routes.py +++ b/web/src/padelnomics/leads/routes.py @@ -4,7 +4,6 @@ Leads domain: capture interest in court suppliers and financing. import json import secrets -from datetime import datetime from pathlib import Path from quart import Blueprint, flash, g, jsonify, redirect, render_template, request, session, url_for diff --git a/web/src/padelnomics/scripts/seed_content.py b/web/src/padelnomics/scripts/seed_content.py index 2b3090d..1b53b0a 100644 --- a/web/src/padelnomics/scripts/seed_content.py +++ b/web/src/padelnomics/scripts/seed_content.py @@ -1390,7 +1390,7 @@ def seed_templates(conn: sqlite3.Connection) -> dict[str, int]: def seed_data_rows(conn: sqlite3.Connection, template_ids: dict[str, int]) -> int: """Insert template_data rows for all cities × languages. Returns count inserted.""" - now = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S") + now = datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S") inserted = 0 en_id = template_ids.get("city-padel-cost-en") diff --git a/web/src/padelnomics/scripts/seed_dev_data.py b/web/src/padelnomics/scripts/seed_dev_data.py index 69b2830..9d9a9c2 100644 --- a/web/src/padelnomics/scripts/seed_dev_data.py +++ b/web/src/padelnomics/scripts/seed_dev_data.py @@ -303,7 +303,7 @@ def main(): else: cursor = conn.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", - ("dev@localhost", "Dev User", now.strftime("%Y-%m-%dT%H:%M:%S")), + ("dev@localhost", "Dev User", now.strftime("%Y-%m-%d %H:%M:%S")), ) dev_user_id = cursor.lastrowid print(f" Created (id={dev_user_id})") @@ -336,7 +336,7 @@ def main(): s["website"], s["description"], s["category"], s["tier"], s["credit_balance"], s["monthly_credits"], s["contact_name"], s["contact_email"], s["years_in_business"], s["project_count"], - s["service_area"], now.strftime("%Y-%m-%dT%H:%M:%S"), + s["service_area"], now.strftime("%Y-%m-%d %H:%M:%S"), ), ) supplier_ids[s["slug"]] = cursor.lastrowid @@ -349,7 +349,7 @@ def main(): ("courtbuild-spain", "supplier_growth", "maria@courtbuild.example.com", "Maria Garcia"), ("desert-padel-fze", "supplier_pro", "ahmed@desertpadel.example.com", "Ahmed Al-Rashid"), ] - period_end = (now + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S") + period_end = (now + timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S") for slug, plan, email, name in claimed_suppliers: sid = supplier_ids.get(slug) if not sid: @@ -364,14 +364,14 @@ def main(): else: cursor = conn.execute( "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", - (email, name, now.strftime("%Y-%m-%dT%H:%M:%S")), + (email, name, now.strftime("%Y-%m-%d %H:%M:%S")), ) owner_id = cursor.lastrowid # Claim the supplier conn.execute( "UPDATE suppliers SET claimed_by = ?, claimed_at = ? WHERE id = ? AND claimed_by IS NULL", - (owner_id, now.strftime("%Y-%m-%dT%H:%M:%S"), sid), + (owner_id, now.strftime("%Y-%m-%d %H:%M:%S"), sid), ) # Create billing customer record @@ -382,7 +382,7 @@ def main(): conn.execute( """INSERT INTO billing_customers (user_id, provider_customer_id, created_at) VALUES (?, ?, ?)""", - (owner_id, f"ctm_dev_{slug}", now.strftime("%Y-%m-%dT%H:%M:%S")), + (owner_id, f"ctm_dev_{slug}", now.strftime("%Y-%m-%d %H:%M:%S")), ) # Create active subscription @@ -396,7 +396,7 @@ def main(): current_period_end, created_at) VALUES (?, ?, 'active', ?, ?, ?)""", (owner_id, plan, f"sub_dev_{slug}", - period_end, now.strftime("%Y-%m-%dT%H:%M:%S")), + period_end, now.strftime("%Y-%m-%d %H:%M:%S")), ) print(f" {slug} -> owner {email} ({plan})") diff --git a/web/src/padelnomics/seo/_bing.py b/web/src/padelnomics/seo/_bing.py index 9a5cb74..df6192a 100644 --- a/web/src/padelnomics/seo/_bing.py +++ b/web/src/padelnomics/seo/_bing.py @@ -127,7 +127,7 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('bing', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced @@ -137,6 +137,6 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('bing', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/seo/_gsc.py b/web/src/padelnomics/seo/_gsc.py index 7c5ee86..dbdce33 100644 --- a/web/src/padelnomics/seo/_gsc.py +++ b/web/src/padelnomics/seo/_gsc.py @@ -5,7 +5,7 @@ is synchronous, so sync runs in asyncio.to_thread(). """ import asyncio -from datetime import datetime, timedelta +from datetime import timedelta from pathlib import Path from urllib.parse import urlparse @@ -127,7 +127,7 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int: """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('gsc', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced @@ -137,6 +137,6 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int: """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('gsc', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/seo/_umami.py b/web/src/padelnomics/seo/_umami.py index cccbb8a..9a3172a 100644 --- a/web/src/padelnomics/seo/_umami.py +++ b/web/src/padelnomics/seo/_umami.py @@ -4,7 +4,7 @@ Uses bearer token auth. Self-hosted instance, no rate limits. Config already exists: UMAMI_API_URL, UMAMI_API_TOKEN, UMAMI_WEBSITE_ID. """ -from datetime import datetime, timedelta +from datetime import timedelta import httpx @@ -101,7 +101,7 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS """INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms) VALUES ('umami', 'success', ?, ?, ?, ?)""", - (rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) return rows_synced @@ -111,6 +111,6 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS """INSERT INTO seo_sync_log (source, status, rows_synced, error, started_at, completed_at, duration_ms) VALUES ('umami', 'failed', 0, ?, ?, ?, ?)""", - (str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms), + (str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms), ) raise diff --git a/web/src/padelnomics/suppliers/routes.py b/web/src/padelnomics/suppliers/routes.py index f7d2977..7846887 100644 --- a/web/src/padelnomics/suppliers/routes.py +++ b/web/src/padelnomics/suppliers/routes.py @@ -13,9 +13,9 @@ from ..core import ( config, csrf_protect, execute, + feature_gate, fetch_all, fetch_one, - feature_gate, get_paddle_price, is_flag_enabled, ) diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index e8f8b8d..f7da534 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -142,7 +142,7 @@ async def enqueue(task_name: str, payload: dict = None, run_at: datetime = None) ( task_name, json.dumps(payload or {}), - (run_at or utcnow()).strftime("%Y-%m-%dT%H:%M:%S"), + (run_at or utcnow()).strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), ), ) @@ -401,7 +401,7 @@ async def handle_cleanup_tokens(payload: dict) -> None: @task("cleanup_rate_limits") async def handle_cleanup_rate_limits(payload: dict) -> None: """Clean up old rate limit entries.""" - cutoff = (utcnow() - timedelta(hours=1)).strftime("%Y-%m-%dT%H:%M:%S") + cutoff = (utcnow() - timedelta(hours=1)).strftime("%Y-%m-%d %H:%M:%S") await execute("DELETE FROM rate_limits WHERE timestamp < ?", (cutoff,)) @@ -674,7 +674,7 @@ async def handle_generate_business_plan(payload: dict) -> None: @task("cleanup_old_tasks") async def handle_cleanup_tasks(payload: dict) -> None: """Clean up completed/failed tasks older than 7 days.""" - cutoff = (utcnow() - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S") + cutoff = (utcnow() - timedelta(days=7)).strftime("%Y-%m-%d %H:%M:%S") await execute( "DELETE FROM tasks WHERE status IN ('complete', 'failed') AND created_at < ?", (cutoff,) ) diff --git a/web/tests/test_content.py b/web/tests/test_content.py index 4e5cbcf..02cc103 100644 --- a/web/tests/test_content.py +++ b/web/tests/test_content.py @@ -8,9 +8,7 @@ sitemap integration, admin CRUD routes, and path collision prevention. import importlib import json import sqlite3 -from datetime import date, datetime - -from padelnomics.core import utcnow_iso +from datetime import date from pathlib import Path import pytest @@ -21,7 +19,7 @@ from padelnomics.content.routes import ( bake_scenario_cards, is_reserved_path, ) -from padelnomics.core import execute, fetch_all, fetch_one, slugify +from padelnomics.core import execute, fetch_all, fetch_one, slugify, utcnow_iso from padelnomics.planner.calculator import calc, validate_state SCHEMA_PATH = Path(__file__).parent.parent / "src" / "padelnomics" / "migrations" / "schema.sql" diff --git a/web/tests/test_credits.py b/web/tests/test_credits.py index 5599a7c..12b9826 100644 --- a/web/tests/test_credits.py +++ b/web/tests/test_credits.py @@ -3,9 +3,8 @@ Tests for the credit system (credits.py). Pure SQL operations against real in-memory SQLite — no mocking needed. """ -from padelnomics.core import utcnow_iso - import pytest +from padelnomics.core import utcnow_iso from padelnomics.credits import ( InsufficientCredits, add_credits, diff --git a/web/tests/test_feature_flags.py b/web/tests/test_feature_flags.py index b467c6b..df822bb 100644 --- a/web/tests/test_feature_flags.py +++ b/web/tests/test_feature_flags.py @@ -7,16 +7,13 @@ Integration tests exercise full request/response flows via Quart test client. """ import sqlite3 - -from padelnomics.core import utcnow_iso -from pathlib import Path from unittest.mock import AsyncMock, patch import pytest - -from padelnomics import core +from padelnomics.core import utcnow_iso from padelnomics.migrations.migrate import migrate +from padelnomics import core # ── Fixtures & helpers ──────────────────────────────────────────── @@ -294,8 +291,9 @@ class TestLeadUnlockGate: @pytest.mark.asyncio async def test_route_imports_is_flag_enabled(self): """suppliers/routes.py imports is_flag_enabled (gate is wired up).""" - from padelnomics.suppliers.routes import unlock_lead import inspect + + from padelnomics.suppliers.routes import unlock_lead src = inspect.getsource(unlock_lead) assert "is_flag_enabled" in src assert "lead_unlock" in src diff --git a/web/tests/test_seo.py b/web/tests/test_seo.py index 64dc5b6..06fb56a 100644 --- a/web/tests/test_seo.py +++ b/web/tests/test_seo.py @@ -1,11 +1,10 @@ """Tests for the SEO metrics module: queries, sync functions, admin routes.""" from datetime import UTC, datetime, timedelta - -from padelnomics.core import utcnow_iso from unittest.mock import AsyncMock, MagicMock, patch import pytest +from padelnomics.core import utcnow_iso from padelnomics.seo._queries import ( cleanup_old_metrics, get_article_scorecard, diff --git a/web/tests/test_supervisor.py b/web/tests/test_supervisor.py index 8f6eb3f..8a291db 100644 --- a/web/tests/test_supervisor.py +++ b/web/tests/test_supervisor.py @@ -8,19 +8,16 @@ supervisor.py lives in src/padelnomics/ (not a uv workspace package), so we add src/ to sys.path before importing. """ -import sys +# Load supervisor.py directly by path — avoids clashing with the web app's +# 'padelnomics' namespace (which is the installed web package). +import importlib.util as _ilu import textwrap -import tomllib from datetime import UTC, datetime, timedelta from pathlib import Path from unittest.mock import MagicMock, patch import pytest -# Load supervisor.py directly by path — avoids clashing with the web app's -# 'padelnomics' namespace (which is the installed web package). -import importlib.util as _ilu - _SUP_PATH = Path(__file__).parent.parent.parent / "src" / "padelnomics" / "supervisor.py" _spec = _ilu.spec_from_file_location("padelnomics_supervisor", _SUP_PATH) sup = _ilu.module_from_spec(_spec) @@ -32,7 +29,6 @@ from padelnomics_extract.proxy import ( make_sticky_selector, ) - # ── load_workflows ──────────────────────────────────────────────── diff --git a/web/tests/test_supplier_webhooks.py b/web/tests/test_supplier_webhooks.py index 2f1d1da..8322c1c 100644 --- a/web/tests/test_supplier_webhooks.py +++ b/web/tests/test_supplier_webhooks.py @@ -6,12 +6,11 @@ Uses the existing client, db, sign_payload from conftest. """ import json from datetime import UTC, datetime - -from padelnomics.core import utcnow_iso from unittest.mock import AsyncMock, patch import pytest from conftest import sign_payload +from padelnomics.core import utcnow_iso WEBHOOK_PATH = "/billing/webhook/paddle" SIG_HEADER = "Paddle-Signature" From d42c4790b49a5e8f0ad95c3da8be9bd798e4e07d Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:30:31 +0100 Subject: [PATCH 15/98] chore: update CHANGELOG for datetime deprecation fix Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0031cb..9463256 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] ### Fixed +- **`datetime.utcnow()` deprecation warnings** — replaced all 94 occurrences + across 22 files (source + tests) with `utcnow()` / `utcnow_iso()` helpers + from `core.py`. `utcnow_iso()` produces `YYYY-MM-DD HH:MM:SS` (space + separator) matching SQLite's `datetime('now')` format so lexicographic SQL + comparisons stay correct. `datetime.utcfromtimestamp()` in `seo/_bing.py` + also replaced with `datetime.fromtimestamp(ts, tz=UTC)`. Zero deprecation + warnings remain. +- **Credit ledger ordering** — `get_ledger()` now uses `ORDER BY created_at + DESC, id DESC` to preserve insertion order when multiple credits are added + within the same second. + + - **Double language prefix in article URLs** — articles were served at `/en/en/markets/italy` (double prefix) because `generate_articles()` stored `url_path` with the lang prefix baked in, but the blueprint is already mounted From bd796178517c08c5325cc60efa8ddbe9465993d3 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:33:38 +0100 Subject: [PATCH 16/98] feat(admin): replace browser confirm() dialogs with native modal MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add styled to base_admin.html — frosted backdrop, rounded card, Cancel / Confirm buttons - Add confirmAction(message, form) JS helper — clones OK button to avoid listener accumulation, calls form.submit() on confirm - Replace all 5 onclick="return confirm()" across templates with type="button" + confirmAction(..., this.closest('form')) · articles.html — Rebuild All · template_detail.html — Regenerate · generate_form.html — Generate Articles · scenario_results.html — Delete scenario · audience_contacts.html — Remove contact Co-Authored-By: Claude Sonnet 4.6 --- .../admin/templates/admin/articles.html | 2 +- .../templates/admin/audience_contacts.html | 2 +- .../admin/templates/admin/base_admin.html | 28 +++++++++++++++++++ .../admin/templates/admin/generate_form.html | 2 +- .../admin/partials/scenario_results.html | 2 +- .../templates/admin/template_detail.html | 2 +- 6 files changed, 33 insertions(+), 5 deletions(-) diff --git a/web/src/padelnomics/admin/templates/admin/articles.html b/web/src/padelnomics/admin/templates/admin/articles.html index f5a4af9..19d9d3f 100644 --- a/web/src/padelnomics/admin/templates/admin/articles.html +++ b/web/src/padelnomics/admin/templates/admin/articles.html @@ -18,7 +18,7 @@ New Article
    - +
    diff --git a/web/src/padelnomics/admin/templates/admin/audience_contacts.html b/web/src/padelnomics/admin/templates/admin/audience_contacts.html index 10ecebe..b48de55 100644 --- a/web/src/padelnomics/admin/templates/admin/audience_contacts.html +++ b/web/src/padelnomics/admin/templates/admin/audience_contacts.html @@ -30,7 +30,7 @@
    - +
    diff --git a/web/src/padelnomics/admin/templates/admin/base_admin.html b/web/src/padelnomics/admin/templates/admin/base_admin.html index a153220..ef3f0d0 100644 --- a/web/src/padelnomics/admin/templates/admin/base_admin.html +++ b/web/src/padelnomics/admin/templates/admin/base_admin.html @@ -27,6 +27,14 @@ .admin-main { flex: 1; padding: 2rem; overflow-y: auto; } + #confirm-dialog { + border: none; border-radius: 12px; padding: 1.5rem; max-width: 380px; width: 90%; + box-shadow: 0 20px 60px rgba(0,0,0,0.15), 0 4px 16px rgba(0,0,0,0.08); + } + #confirm-dialog::backdrop { background: rgba(15,23,42,0.45); backdrop-filter: blur(3px); } + #confirm-dialog p { margin: 0 0 1.25rem; font-size: 0.9375rem; color: #0F172A; line-height: 1.55; } + #confirm-dialog .dialog-actions { display: flex; gap: 0.5rem; justify-content: flex-end; } + @media (max-width: 768px) { .admin-layout { flex-direction: column; } .admin-sidebar { @@ -130,4 +138,24 @@ {% block admin_content %}{% endblock %} + + +

    +
    + + +
    +
    + {% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/generate_form.html b/web/src/padelnomics/admin/templates/admin/generate_form.html index d59f7ed..1b7c137 100644 --- a/web/src/padelnomics/admin/templates/admin/generate_form.html +++ b/web/src/padelnomics/admin/templates/admin/generate_form.html @@ -45,7 +45,7 @@

    - diff --git a/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html index 93dd343..ac9860d 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html +++ b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html @@ -32,7 +32,7 @@ Edit
    - +
    diff --git a/web/src/padelnomics/admin/templates/admin/template_detail.html b/web/src/padelnomics/admin/templates/admin/template_detail.html index e053c34..d0ca524 100644 --- a/web/src/padelnomics/admin/templates/admin/template_detail.html +++ b/web/src/padelnomics/admin/templates/admin/template_detail.html @@ -15,7 +15,7 @@ Generate Articles
    -
    From c5176d7d17b74598b46ab41ed6cff35fa3982f6a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:34:38 +0100 Subject: [PATCH 17/98] fix(admin): center confirm dialog with fixed position + transform Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/templates/admin/base_admin.html | 1 + 1 file changed, 1 insertion(+) diff --git a/web/src/padelnomics/admin/templates/admin/base_admin.html b/web/src/padelnomics/admin/templates/admin/base_admin.html index ef3f0d0..3ca1820 100644 --- a/web/src/padelnomics/admin/templates/admin/base_admin.html +++ b/web/src/padelnomics/admin/templates/admin/base_admin.html @@ -30,6 +30,7 @@ #confirm-dialog { border: none; border-radius: 12px; padding: 1.5rem; max-width: 380px; width: 90%; box-shadow: 0 20px 60px rgba(0,0,0,0.15), 0 4px 16px rgba(0,0,0,0.08); + position: fixed; top: 50%; left: 50%; transform: translate(-50%, -50%); margin: 0; } #confirm-dialog::backdrop { background: rgba(15,23,42,0.45); backdrop-filter: blur(3px); } #confirm-dialog p { margin: 0 0 1.25rem; font-size: 0.9375rem; color: #0F172A; line-height: 1.55; } From d9de9e4cdae079b3daa4f044b2221b136474f70a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 10:36:01 +0100 Subject: [PATCH 18/98] fix(planner): replace alert() error popups with inline error banner Show API errors and network failures in a red inline div below the export form instead of browser alert() dialogs. Error div is hidden on each new submit attempt so stale messages don't linger. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/planner/templates/export.html | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/web/src/padelnomics/planner/templates/export.html b/web/src/padelnomics/planner/templates/export.html index a2446dd..9d9d9af 100644 --- a/web/src/padelnomics/planner/templates/export.html +++ b/web/src/padelnomics/planner/templates/export.html @@ -71,6 +71,7 @@ + @@ -106,9 +107,16 @@ {% block scripts %} ' - for obj in jsonld_objects - ], - ]) - - # Write HTML to disk - build_dir = BUILD_DIR / lang - build_dir.mkdir(parents=True, exist_ok=True) - (build_dir / f"{article_slug}.html").write_text(body_html) - - # Write markdown source to disk (for admin editing) - md_dir = BUILD_DIR / lang / "md" - md_dir.mkdir(parents=True, exist_ok=True) - (md_dir / f"{article_slug}.md").write_text(body_md) - - # Upsert article in SQLite — keyed by (url_path, language) since - # multiple languages share the same url_path - existing_article = await fetch_one( - "SELECT id FROM articles WHERE url_path = ? AND language = ?", - (url_path, lang), - ) - if existing_article: - await execute( - """UPDATE articles - SET title = ?, meta_description = ?, template_slug = ?, - language = ?, date_modified = ?, updated_at = ?, - seo_head = ? - WHERE url_path = ? AND language = ?""", - (title, meta_desc, slug, lang, now_iso, now_iso, seo_head, url_path, lang), + # JSON-LD + breadcrumbs = _build_breadcrumbs(f"/{lang}{url_path}", base_url) + jsonld_objects = build_jsonld( + config["schema_type"], + title=title, + description=meta_desc, + url=full_url, + published_at=publish_dt, + date_modified=now_iso, + language=lang, + breadcrumbs=breadcrumbs, + faq_pairs=faq_pairs, ) - else: - await execute( + + # Build SEO head block + seo_head = "\n".join([ + f'', + *hreflang_links, + f'', + f'', + f'', + '', + *[ + f'' + for obj in jsonld_objects + ], + ]) + + # Write HTML to disk + build_dir = BUILD_DIR / lang + build_dir.mkdir(parents=True, exist_ok=True) + (build_dir / f"{article_slug}.html").write_text(body_html) + + # Write markdown source to disk (for admin editing) + md_dir = BUILD_DIR / lang / "md" + md_dir.mkdir(parents=True, exist_ok=True) + (md_dir / f"{article_slug}.md").write_text(body_md) + + # Upsert article in SQLite — keyed by (url_path, language) + await db.execute( """INSERT INTO articles - (url_path, slug, title, meta_description, country, region, - status, published_at, template_slug, language, date_modified, - seo_head, created_at) - VALUES (?, ?, ?, ?, ?, ?, 'published', ?, ?, ?, ?, ?, ?)""", + (url_path, slug, title, meta_description, country, region, + status, published_at, template_slug, language, date_modified, + seo_head, created_at) + VALUES (?, ?, ?, ?, ?, ?, 'published', ?, ?, ?, ?, ?, ?) + ON CONFLICT(url_path, language) DO UPDATE SET + title = excluded.title, + meta_description = excluded.meta_description, + template_slug = excluded.template_slug, + date_modified = excluded.date_modified, + seo_head = excluded.seo_head, + updated_at = excluded.date_modified""", ( url_path, article_slug, title, meta_desc, row.get("country", ""), row.get("region", ""), @@ -463,14 +450,17 @@ async def generate_articles( ), ) - generated += 1 + generated += 1 + if generated % 25 == 0: + logger.info("%s: %d articles written…", slug, generated) - # Stagger dates - published_today += 1 - if published_today >= articles_per_day: - published_today = 0 - publish_date += timedelta(days=1) + # Stagger dates + published_today += 1 + if published_today >= articles_per_day: + published_today = 0 + publish_date += timedelta(days=1) + logger.info("%s: done — %d total", slug, generated) return generated From af20c59ced5fbf96f1114b8577138abe09749f7f Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 16:44:02 +0100 Subject: [PATCH 35/98] feat(content): spinner, batch commits, pre-compiled templates, timing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Spinner: - article_results.html: replace hidden polling div with a visible animated spinner banner; CSS spin keyframe added to input.css Batch commits: - generate_articles() now commits every 200 articles instead of holding one giant transaction; articles appear in the admin UI progressively without waiting for the full run Performance (pre-compiled Jinja templates): - Create one Environment + compile url/title/meta/body templates once before the loop instead of calling _render_pattern() per iteration; eliminates ~4 × N Environment() constructions and re-parses of the same template strings (N = articles, typically 500+) - Reuse url_tmpl for hreflang alt-lang rendering Scenario override passthrough: - Pass just-computed scenario data directly to bake_scenario_cards() via scenario_overrides, avoiding a DB SELECT that reads an uncommitted row from a potentially separate connection Timing instrumentation: - Accumulate time spent in calc / render / bake phases per run - Log totals at completion: "done — 500 total | calc=1.2s render=4.3s bake=0.1s" Co-Authored-By: Claude Opus 4.6 --- .../admin/partials/article_results.html | 12 ++- web/src/padelnomics/content/__init__.py | 82 +++++++++++++++---- web/src/padelnomics/static/css/input.css | 15 ++++ 3 files changed, 92 insertions(+), 17 deletions(-) diff --git a/web/src/padelnomics/admin/templates/admin/partials/article_results.html b/web/src/padelnomics/admin/templates/admin/partials/article_results.html index d0ef5d8..ecc551e 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/article_results.html +++ b/web/src/padelnomics/admin/templates/admin/partials/article_results.html @@ -1,9 +1,15 @@ {% if is_generating %} - + hx-swap="innerHTML"> + + + + + Generating articles… + {% endif %} {% if articles %}
    diff --git a/web/src/padelnomics/content/__init__.py b/web/src/padelnomics/content/__init__.py index 87d4d04..b9be927 100644 --- a/web/src/padelnomics/content/__init__.py +++ b/web/src/padelnomics/content/__init__.py @@ -8,6 +8,7 @@ are stored in SQLite (routing / application state). import json import logging import re +import time from datetime import UTC, date, datetime, timedelta from pathlib import Path @@ -16,7 +17,7 @@ import yaml from jinja2 import ChainableUndefined, Environment from ..analytics import fetch_analytics -from ..core import execute, fetch_one, slugify, transaction, utcnow_iso +from ..core import slugify, transaction, utcnow_iso logger = logging.getLogger(__name__) @@ -301,29 +302,48 @@ async def generate_articles( if not rows: return 0 + # Pre-compile all Jinja templates once — avoids creating a new Environment() + # and re-parsing the same template strings on every iteration. + _env = Environment(undefined=ChainableUndefined) + _env.filters["slugify"] = slugify + _env.filters["datetimeformat"] = _datetimeformat + url_tmpl = _env.from_string(config["url_pattern"]) + title_tmpl = _env.from_string(config["title_pattern"]) + meta_tmpl = _env.from_string(config["meta_description_pattern"]) + body_tmpl = _env.from_string(config["body_template"]) + publish_date = start_date published_today = 0 generated = 0 now_iso = utcnow_iso() + # Timing accumulators — logged at end so we can see where time goes. + t_calc = t_render = t_bake = 0.0 + + _BATCH_SIZE = 200 + async with transaction() as db: for row in rows: for lang in config["languages"]: - # Build render context: row data + language - ctx = {**row, "language": lang} + # Build render context, replacing None with 0 so numeric + # Jinja filters (round, int) don't crash. + safe_ctx = {k: (v if v is not None else 0) for k, v in row.items()} + safe_ctx["language"] = lang # Render URL pattern (no lang prefix — blueprint provides /) - url_path = _render_pattern(config["url_pattern"], ctx) + url_path = url_tmpl.render(**safe_ctx) if is_reserved_path(url_path): continue - title = _render_pattern(config["title_pattern"], ctx) - meta_desc = _render_pattern(config["meta_description_pattern"], ctx) + title = title_tmpl.render(**safe_ctx) + meta_desc = meta_tmpl.render(**safe_ctx) article_slug = slug + "-" + lang + "-" + str(row[config["natural_key"]]) # Calculator content type: create scenario scenario_slug = None + scenario_overrides = None if config["content_type"] == "calculator": + t0 = time.perf_counter() # DuckDB lowercases all column names; build a case-insensitive # reverse map so "ratepeak" (stored) matches "ratePeak" (DEFAULTS). _defaults_ci = {k.lower(): k for k in DEFAULTS} @@ -334,6 +354,7 @@ async def generate_articles( } state = validate_state(calc_overrides) d = calc(state, lang=lang) + t_calc += time.perf_counter() - t0 scenario_slug = slug + "-" + str(row[config["natural_key"]]) dbl = state.get("dblCourts", 0) @@ -360,12 +381,36 @@ async def generate_articles( ), ) - ctx["scenario_slug"] = scenario_slug + safe_ctx["scenario_slug"] = scenario_slug + # Pass scenario data directly so bake_scenario_cards skips the + # DB re-fetch (the row was just upserted and may not be visible + # on a separate connection within the same uncommitted transaction). + scenario_overrides = { + scenario_slug: { + "slug": scenario_slug, + "title": city, + "location": city, + "country": country, + "venue_type": state.get("venue", "indoor"), + "ownership": state.get("own", "rent"), + "court_config": court_config, + "state_json": json.dumps(state), + "calc_json": json.dumps(d), + "created_at": now_iso, + } + } # Render body template - body_md = _render_pattern(config["body_template"], ctx) + t0 = time.perf_counter() + body_md = body_tmpl.render(**safe_ctx) body_html = mistune.html(body_md) - body_html = await bake_scenario_cards(body_html, lang=lang) + t_render += time.perf_counter() - t0 + + t0 = time.perf_counter() + body_html = await bake_scenario_cards( + body_html, lang=lang, scenario_overrides=scenario_overrides + ) + t_bake += time.perf_counter() - t0 # Extract FAQ pairs for structured data faq_pairs = _extract_faq_pairs(body_md) @@ -377,16 +422,16 @@ async def generate_articles( 8, 0, 0, ).isoformat() - # Hreflang links + # Hreflang links — reuse compiled url_tmpl with swapped language hreflang_links = [] for alt_lang in config["languages"]: - alt_url = f"/{alt_lang}" + _render_pattern(config["url_pattern"], {**row, "language": alt_lang}) + alt_url = f"/{alt_lang}" + url_tmpl.render(**{**safe_ctx, "language": alt_lang}) hreflang_links.append( f'' ) # x-default points to English (or first language) default_lang = "en" if "en" in config["languages"] else config["languages"][0] - default_url = f"/{default_lang}" + _render_pattern(config["url_pattern"], {**row, "language": default_lang}) + default_url = f"/{default_lang}" + url_tmpl.render(**{**safe_ctx, "language": default_lang}) hreflang_links.append( f'' ) @@ -451,7 +496,13 @@ async def generate_articles( ) generated += 1 - if generated % 25 == 0: + + # Commit every _BATCH_SIZE articles so the admin UI shows progress + # earlier rather than waiting for the full run to complete. + if generated % _BATCH_SIZE == 0: + await db.commit() + logger.info("%s: committed batch — %d articles", slug, generated) + elif generated % 25 == 0: logger.info("%s: %d articles written…", slug, generated) # Stagger dates @@ -460,7 +511,10 @@ async def generate_articles( published_today = 0 publish_date += timedelta(days=1) - logger.info("%s: done — %d total", slug, generated) + logger.info( + "%s: done — %d total | calc=%.1fs render=%.1fs bake=%.1fs", + slug, generated, t_calc, t_render, t_bake, + ) return generated diff --git a/web/src/padelnomics/static/css/input.css b/web/src/padelnomics/static/css/input.css index 539d63e..1acff6b 100644 --- a/web/src/padelnomics/static/css/input.css +++ b/web/src/padelnomics/static/css/input.css @@ -568,4 +568,19 @@ .article-body details > div { @apply px-4 pb-4 text-slate-dark; } + + /* Article generation spinner banner */ + .generating-banner { + @apply flex items-center gap-3 rounded-xl border border-light-gray bg-white text-sm text-slate-dark mb-4; + padding: 0.75rem 1rem; + } + .spinner-icon { + flex-shrink: 0; + animation: spin-icon 0.9s linear infinite; + } +} + +@keyframes spin-icon { + from { transform: rotate(0deg); } + to { transform: rotate(360deg); } } From 3e2757e0a7665fc71bf811f67b437617f57fe9e5 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 16:48:52 +0100 Subject: [PATCH 36/98] feat(ui): dual market score methodology page and translations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Task 7: update market_score.html + en/de translation keys for the Marktreife-Score / Marktpotenzial-Score dual score system. Changes: - market_score.html: add Two Scores intro section (blue gradient card), Marktpotenzial-Score component cards (5 components), score bands for opportunity score, extend FAQ loop from 5 → 7 entries, add q6/q7 to JSON-LD FAQPage structured data - en.json: rename existing headings to Marktreife-Score prefix; add 30 new mscore_dual_* / mscore_reife_* / mscore_potenzial_* / mscore_pot_* keys for dual score UI and FAQ q6/q7 - de.json: same 30 new keys in native German (linguistic mediation, not word-for-word translation); update renamed heading keys Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/locales/de.json | 39 +++++++- web/src/padelnomics/locales/en.json | 39 +++++++- .../public/templates/market_score.html | 96 ++++++++++++++++++- 3 files changed, 163 insertions(+), 11 deletions(-) diff --git a/web/src/padelnomics/locales/de.json b/web/src/padelnomics/locales/de.json index 25c5c84..fffb6cd 100644 --- a/web/src/padelnomics/locales/de.json +++ b/web/src/padelnomics/locales/de.json @@ -1628,8 +1628,16 @@ "mscore_og_desc": "Ein datengestützter Komposit-Score (0–100), der die Attraktivität einer Stadt für Padelanlagen-Investitionen misst. Was steckt dahinter — und was bedeutet er für Deine Planung?", "mscore_h1": "Der padelnomics Market Score", "mscore_subtitle": "Ein datengestütztes Maß für die Attraktivität einer Stadt als Padel-Investitionsstandort.", - "mscore_what_h2": "Was der Score misst", - "mscore_what_intro": "Der Market Score ist ein Komposit-Index von 0 bis 100, der das Potenzial einer Stadt als Standort für Padelanlagen-Investitionen bewertet. Vier Datenkategorien fließen in eine einzige Kennzahl ein — damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", + "mscore_dual_h2": "Zwei Scores, zwei Fragen", + "mscore_dual_intro": "Padelnomics veröffentlicht zwei eigenständige Scores für jeden Markt. Sie beantworten unterschiedliche Fragen und basieren auf unterschiedlichen Methoden — beide zu kennen ist entscheidend für eine fundierte Investitionsentscheidung.", + "mscore_reife_chip": "Marktreife-Score", + "mscore_reife_question": "Wie etabliert ist dieser Padel-Markt?", + "mscore_reife_desc": "Berechnet für Städte mit mindestens einer Padelanlage. Kombiniert Bevölkerungsgröße, Wirtschaftskraft, Nachfragenachweise aus Buchungsplattformen und Datenvollständigkeit.", + "mscore_potenzial_chip": "Marktpotenzial-Score", + "mscore_potenzial_question": "Wo sollte ich eine Padelanlage bauen?", + "mscore_potenzial_desc": "Berechnet für alle Standorte weltweit, auch dort, wo es noch keine Anlagen gibt. Belohnt Angebotslücken, unterversorgte Einzugsgebiete und Schlägersportkultur — die relevanten Signale für Greenfield-Investitionen.", + "mscore_what_h2": "Marktreife-Score: Was er misst", + "mscore_what_intro": "Der Marktreife-Score ist ein Komposit-Index von 0 bis 100, der bewertet, wie etabliert und attraktiv ein bestehender Padel-Markt ist. Er gilt ausschließlich für Städte mit mindestens einer Padelanlage — vier Datenkategorien fließen in eine einzige Kennzahl ein, damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", "mscore_cat_demo_h3": "Demografie", "mscore_cat_demo_p": "Bevölkerungsgröße als Indikator für den adressierbaren Markt. Größere Städte tragen in der Regel mehr Anlagen und höhere Auslastung.", "mscore_cat_econ_h3": "Wirtschaftskraft", @@ -1638,7 +1646,7 @@ "mscore_cat_demand_p": "Signale aus dem laufenden Betrieb bestehender Anlagen — Auslastungsraten, Buchungsdaten, Anzahl aktiver Standorte. Wo sich reale Nachfrage bereits messen lässt, ist das der stärkste Indikator.", "mscore_cat_data_h3": "Datenqualität", "mscore_cat_data_p": "Wie umfassend die Datenlage für eine Stadt ist. Ein Score auf Basis unvollständiger Daten ist weniger belastbar — wir machen das transparent, damit Du weißt, wo eigene Recherche sinnvoll ist.", - "mscore_read_h2": "Wie Du den Score liest", + "mscore_read_h2": "Marktreife-Score: Wie Du ihn liest", "mscore_band_high_label": "70–100: Starker Markt", "mscore_band_high_p": "Große Bevölkerung, hohe Wirtschaftskraft und nachgewiesene Nachfrage durch bestehende Anlagen. Diese Städte haben validierte Padel-Märkte mit belastbaren Benchmarks für die Finanzplanung.", "mscore_band_mid_label": "45–69: Solides Mittelfeld", @@ -1664,5 +1672,28 @@ "mscore_faq_q4": "Kann ich Scores länderübergreifend vergleichen?", "mscore_faq_a4": "Ja. Die Methodik ist für alle Märkte einheitlich, sodass ein Score von 72 in Deutschland direkt vergleichbar ist mit einem 72 in Spanien oder Großbritannien.", "mscore_faq_q5": "Garantiert ein hoher Score eine gute Investition?", - "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivität auf Makroebene. Deine konkrete Investition hängt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen." + "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivität auf Makroebene. Deine konkrete Investition hängt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen.", + "mscore_pot_what_h2": "Marktpotenzial-Score: Was er misst", + "mscore_pot_what_intro": "Der Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Konzipiert für Greenfield-Investoren auf der Suche nach unbesetzten Märkten, nicht für den Vergleich bestehender Venues.", + "mscore_pot_cat_market_h3": "Adressierbarer Markt", + "mscore_pot_cat_market_p": "Logarithmisch skalierte Bevölkerungsgröße, begrenzt auf 500.000 Einwohner. Das Potenzial ist bei mittelgroßen Städten am höchsten — groß genug für eine rentable Anlage, aber noch nicht von Großstadt-Betreibern erschlossen.", + "mscore_pot_cat_econ_h3": "Wirtschaftskraft", + "mscore_pot_cat_econ_p": "Kaufkraft auf Länderebene (KKS), normiert auf internationale Benchmarks. Maßgeblich für die Zahlungsbereitschaft bei Platzmieten im Zielbereich von 20–35 €/Std.", + "mscore_pot_cat_gap_h3": "Angebotslücke", + "mscore_pot_cat_gap_p": "Invertierte Anlagendichte: null Plätze pro 100.000 Einwohner ergibt die volle Punktzahl. Das ist das zentrale Signal, das den Marktpotenzial-Score vom Marktreife-Score unterscheidet — der weiße Fleck auf der Karte ist die Chance.", + "mscore_pot_cat_catchment_h3": "Einzugsgebiet-Lücke", + "mscore_pot_cat_catchment_p": "Entfernung zur nächsten bestehenden Padelanlage. Standorte mehr als 30 km vom nächsten Platz entfernt erhalten die volle Punktzahl — echte Versorgungslücken ohne nahe gelegene Alternative.", + "mscore_pot_cat_tennis_h3": "Schlägersportkultur", + "mscore_pot_cat_tennis_p": "Tennisplätze im Umkreis von 25 km als Indikator für etablierte Schlägersportnachfrage. Viele neue Padelanlagen entstehen innerhalb bestehender Tennisvereine oder direkt daneben — ein verlässlicher Frühindikator.", + "mscore_pot_read_h2": "Marktpotenzial-Score: So liest Du ihn", + "mscore_pot_band_high_label": "70–100: Hohes Potenzial", + "mscore_pot_band_high_p": "Unterversorgtes Gebiet mit starker Demografie und wirtschaftlicher Substanz. Geringes Angebot, deutliche Einzugsgebiet-Lücke und nachgewiesene Schlägersportkultur. Prioritätsmarkt für Greenfield-Investitionen.", + "mscore_pot_band_mid_label": "45–69: Moderates Potenzial", + "mscore_pot_band_mid_p": "Teilweise bereits vorhandenes Angebot, demografische Einschränkungen oder gemischte Signale. Lohnt sich für eine genauere Prüfung — lokale Faktoren können das Bild erheblich verändern.", + "mscore_pot_band_low_label": "Unter 45: Geringeres Potenzial", + "mscore_pot_band_low_p": "Markt ist bereits gut versorgt, Bevölkerungszahl gering oder wirtschaftliche Kaufkraft begrenzt. Fokussiere Deine Ressourcen auf höher bewertete Standorte, es sei denn, Du hast einen konkreten lokalen Vorteil.", + "mscore_faq_q6": "Was ist der Unterschied zwischen Marktreife-Score und Marktpotenzial-Score?", + "mscore_faq_a6": "Der Marktreife-Score misst, wie etabliert und ausgereift ein bestehender Padel-Markt ist — er gilt nur für Städte mit mindestens einer Anlage. Der Marktpotenzial-Score bewertet Greenfield-Investitionschancen und erfasst alle Standorte weltweit. Er belohnt Angebotslücken und unterversorgte Einzugsgebiete, wo es noch gar keine Anlagen gibt.", + "mscore_faq_q7": "Warum hat mein Ort einen hohen Marktpotenzial-Score, aber keine Padelanlagen?", + "mscore_faq_a7": "Genau darum geht es. Ein hoher Marktpotenzial-Score signalisiert einen unterversorgten Standort: starke Demografie, wirtschaftliche Kaufkraft, kein bestehendes Angebot und Distanz zur nächsten Anlage. Das sind genau die Signale, die auf eine Greenfield-Chance hinweisen — kein Zeichen für einen schwachen Markt." } diff --git a/web/src/padelnomics/locales/en.json b/web/src/padelnomics/locales/en.json index 9b5a0ab..4af2d06 100644 --- a/web/src/padelnomics/locales/en.json +++ b/web/src/padelnomics/locales/en.json @@ -1649,8 +1649,16 @@ "mscore_og_desc": "A data-driven composite score (0\u2013100) that measures how attractive a city is for padel court investment. See what goes into it and what it means for your planning.", "mscore_h1": "The padelnomics Market Score", "mscore_subtitle": "A data-driven measure of how attractive a city is for padel investment.", - "mscore_what_h2": "What It Measures", - "mscore_what_intro": "The Market Score is a composite index from 0 to 100 that evaluates a city\u2019s potential as a location for padel court investment. It combines four categories of data into a single number designed to help you prioritize markets worth investigating further.", + "mscore_dual_h2": "Two Scores, Two Questions", + "mscore_dual_intro": "Padelnomics publishes two distinct scores for every market. They answer different questions and are calculated using different methodologies \u2014 knowing both is essential for a well-informed investment decision.", + "mscore_reife_chip": "Marktreife-Score", + "mscore_reife_question": "How established is this padel market?", + "mscore_reife_desc": "Calculated for cities with at least one padel venue. Combines population size, economic power, demand evidence from booking platforms, and data completeness.", + "mscore_potenzial_chip": "Marktpotenzial-Score", + "mscore_potenzial_question": "Where should I build a padel court?", + "mscore_potenzial_desc": "Calculated for all locations globally, including those with zero courts. Rewards supply gaps, underserved catchment areas, and racket sport culture \u2014 the signals that matter for greenfield investors.", + "mscore_what_h2": "Marktreife-Score: What It Measures", + "mscore_what_intro": "The Marktreife-Score is a composite index from 0 to 100 that evaluates how established and attractive an existing padel market is. It only applies to cities with at least one padel venue, combining four categories of data into a single number designed to help you prioritize markets worth investigating further.", "mscore_cat_demo_h3": "Demographics", "mscore_cat_demo_p": "Population size as a proxy for the addressable market. Larger cities generally support more venues and higher utilization.", "mscore_cat_econ_h3": "Economic Strength", @@ -1659,7 +1667,7 @@ "mscore_cat_demand_p": "Signals from existing venue activity \u2014 occupancy rates, booking data, and the number of operating venues. Where real demand is already measurable, it\u2019s the strongest indicator.", "mscore_cat_data_h3": "Data Completeness", "mscore_cat_data_p": "How much data we have for that city. A score influenced by incomplete data is less reliable \u2014 we surface this explicitly so you know when to dig deeper on your own.", - "mscore_read_h2": "How To Read the Score", + "mscore_read_h2": "Marktreife-Score: How To Read", "mscore_band_high_label": "70\u2013100: Strong market", "mscore_band_high_p": "Large population, economic power, and proven demand from existing venues. These cities have validated padel markets with reliable benchmarks for financial planning.", "mscore_band_mid_label": "45\u201369: Solid mid-tier", @@ -1685,5 +1693,28 @@ "mscore_faq_q4": "Can I compare scores across countries?", "mscore_faq_a4": "Yes. The methodology is consistent across all markets we track, so a score of 72 in Germany is directly comparable to a 72 in Spain or the UK.", "mscore_faq_q5": "Does a high score guarantee a good investment?", - "mscore_faq_a5": "No. The score measures market attractiveness at a macro level. Your specific investment depends on venue type, build costs, lease terms, and dozens of other factors. Use the financial planner to model your scenario with real numbers." + "mscore_faq_a5": "No. The score measures market attractiveness at a macro level. Your specific investment depends on venue type, build costs, lease terms, and dozens of other factors. Use the financial planner to model your scenario with real numbers.", + "mscore_pot_what_h2": "Marktpotenzial-Score: What It Measures", + "mscore_pot_what_intro": "The Marktpotenzial-Score evaluates investment opportunity for locations with little or no existing padel infrastructure. It covers all locations globally, including those with zero courts \u2014 designed for greenfield investors scouting white-space markets, not for benchmarking established venues.", + "mscore_pot_cat_market_h3": "Addressable Market", + "mscore_pot_cat_market_p": "Log-scaled population, capped at 500K. Opportunity peaks in mid-size cities that can support a court but are not yet served by large-city operators.", + "mscore_pot_cat_econ_h3": "Economic Power", + "mscore_pot_cat_econ_p": "Country-level purchasing power (PPS), normalised to international benchmarks. Drives willingness to pay for court fees in the \u20ac20\u201335/hr target range.", + "mscore_pot_cat_gap_h3": "Supply Gap", + "mscore_pot_cat_gap_p": "Inverted venue density: zero courts per 100K residents earns full marks. This is the key signal separating the Marktpotenzial-Score from the Marktreife-Score \u2014 white space is the opportunity.", + "mscore_pot_cat_catchment_h3": "Catchment Gap", + "mscore_pot_cat_catchment_p": "Distance to the nearest existing padel court. Locations more than 30km from any court score maximum points \u2014 they represent genuinely underserved catchment areas with no nearby alternative.", + "mscore_pot_cat_tennis_h3": "Racket Sport Culture", + "mscore_pot_cat_tennis_p": "Tennis courts within 25km as a proxy for established racket sport demand. Many new padel facilities open inside or next to existing tennis clubs, making this a reliable lead indicator.", + "mscore_pot_read_h2": "Marktpotenzial-Score: How To Read", + "mscore_pot_band_high_label": "70\u2013100: High potential", + "mscore_pot_band_high_p": "Underserved area with strong demographics and economic fundamentals. Low supply, significant catchment gap, and proven racket sport culture. Priority market for greenfield investment.", + "mscore_pot_band_mid_label": "45\u201369: Moderate potential", + "mscore_pot_band_mid_p": "Some supply already exists, demographic limitations, or mixed signals. Worth investigating further \u2014 local factors may significantly change the picture.", + "mscore_pot_band_low_label": "Below 45: Lower potential", + "mscore_pot_band_low_p": "Market is already well-served, population is small, or economic purchasing power is limited. Focus resources on higher-scoring locations unless you have a specific local advantage.", + "mscore_faq_q6": "What is the difference between the Marktreife-Score and the Marktpotenzial-Score?", + "mscore_faq_a6": "The Marktreife-Score measures how established and mature an existing padel market is \u2014 it only applies to cities with at least one venue. The Marktpotenzial-Score measures greenfield investment opportunity and covers all locations globally, rewarding supply gaps and underserved catchment areas where no courts exist yet.", + "mscore_faq_q7": "Why does my town have a high Marktpotenzial-Score but no padel courts?", + "mscore_faq_a7": "That is exactly the point. A high Marktpotenzial-Score indicates an underserved location: strong demographics, economic purchasing power, no existing supply, and distance from the nearest court. These are precisely the signals that suggest a greenfield opportunity \u2014 not a sign of a weak market." } diff --git a/web/src/padelnomics/public/templates/market_score.html b/web/src/padelnomics/public/templates/market_score.html index df98930..34eb5e7 100644 --- a/web/src/padelnomics/public/templates/market_score.html +++ b/web/src/padelnomics/public/templates/market_score.html @@ -56,6 +56,16 @@ "@type": "Question", "name": "{{ t.mscore_faq_q5 }}", "acceptedAnswer": {"@type": "Answer", "text": "{{ t.mscore_faq_a5 }}"} + }, + { + "@type": "Question", + "name": "{{ t.mscore_faq_q6 }}", + "acceptedAnswer": {"@type": "Answer", "text": "{{ t.mscore_faq_a6 }}"} + }, + { + "@type": "Question", + "name": "{{ t.mscore_faq_q7 }}", + "acceptedAnswer": {"@type": "Answer", "text": "{{ t.mscore_faq_a7 }}"} } ] } @@ -77,7 +87,25 @@

    {{ t.mscore_subtitle }}

    - + +
    +

    {{ t.mscore_dual_h2 }}

    +

    {{ t.mscore_dual_intro }}

    +
    +
    +
    {{ t.mscore_reife_chip }}
    +
    {{ t.mscore_reife_question }}
    +

    {{ t.mscore_reife_desc }}

    +
    +
    +
    {{ t.mscore_potenzial_chip }}
    +
    {{ t.mscore_potenzial_question }}
    +

    {{ t.mscore_potenzial_desc }}

    +
    +
    +
    + +

    {{ t.mscore_what_h2 }}

    {{ t.mscore_what_intro }}

    @@ -106,7 +134,7 @@
    - +

    {{ t.mscore_read_h2 }}

    @@ -135,6 +163,68 @@

    {{ t.mscore_read_note }}

    + +
    +

    {{ t.mscore_pot_what_h2 }}

    +

    {{ t.mscore_pot_what_intro }}

    + +
    +
    +
    📊
    +

    {{ t.mscore_pot_cat_market_h3 }}

    +

    {{ t.mscore_pot_cat_market_p }}

    +
    +
    +
    💶
    +

    {{ t.mscore_pot_cat_econ_h3 }}

    +

    {{ t.mscore_pot_cat_econ_p }}

    +
    +
    +
    🎯
    +

    {{ t.mscore_pot_cat_gap_h3 }}

    +

    {{ t.mscore_pot_cat_gap_p }}

    +
    +
    +
    📍
    +

    {{ t.mscore_pot_cat_catchment_h3 }}

    +

    {{ t.mscore_pot_cat_catchment_p }}

    +
    +
    +
    🎾
    +

    {{ t.mscore_pot_cat_tennis_h3 }}

    +

    {{ t.mscore_pot_cat_tennis_p }}

    +
    +
    +
    + + +
    +

    {{ t.mscore_pot_read_h2 }}

    +
    +
    +
    + + {{ t.mscore_pot_band_high_label }} +
    +

    {{ t.mscore_pot_band_high_p }}

    +
    +
    +
    + + {{ t.mscore_pot_band_mid_label }} +
    +

    {{ t.mscore_pot_band_mid_p }}

    +
    +
    +
    + + {{ t.mscore_pot_band_low_label }} +
    +

    {{ t.mscore_pot_band_low_p }}

    +
    +
    +
    +

    {{ t.mscore_sources_h2 }}

    @@ -161,7 +251,7 @@

    {{ t.mscore_faq_h2 }}

    - {% for i in range(1, 6) %} + {% for i in range(1, 8) %}
    {{ t['mscore_faq_q' ~ i] }}

    {{ t['mscore_faq_a' ~ i] }}

    From 1510cad697aa88757990ed3c2fabb175db38b133 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 16:49:25 +0100 Subject: [PATCH 37/98] fix(admin): use form-input class on scenario filter fields MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The search/country/venue-type inputs used class="input" which has no definition in input.css — falls back to the browser's default focus outline. Replaced with form-input to get the consistent focus ring (ring-2 / ring-electric / border-electric) used everywhere else in admin. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/admin/templates/admin/scenarios.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/web/src/padelnomics/admin/templates/admin/scenarios.html b/web/src/padelnomics/admin/templates/admin/scenarios.html index 6d993fb..3cfa407 100644 --- a/web/src/padelnomics/admin/templates/admin/scenarios.html +++ b/web/src/padelnomics/admin/templates/admin/scenarios.html @@ -22,11 +22,11 @@ + class="form-input w-full">
    - {% for c in countries %} @@ -35,7 +35,7 @@
    - {% for v in venue_types %} From 46e41db0f83f7ef7751f2e0e6ece4046608e4009 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 16:54:30 +0100 Subject: [PATCH 38/98] =?UTF-8?q?fix(i18n):=20polish=20German=20translatio?= =?UTF-8?q?ns=20=E2=80=94=20remove=20English=20calques?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove "Greenfield" (5×), fix "Venues" → "Anlagen", replace "belohnt" (rewards) with idiomatic verb, fix "Einzugsgebiet-Lücke" → "Versorgungslücke", "gemischte Signale" → "unklare Signallage", "Fokussiere" → "Konzentriere", "Distanz" → "Entfernung", "Nachfragenachweise" → "Nachfragesignale". Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/locales/de.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/web/src/padelnomics/locales/de.json b/web/src/padelnomics/locales/de.json index fffb6cd..2b54149 100644 --- a/web/src/padelnomics/locales/de.json +++ b/web/src/padelnomics/locales/de.json @@ -1632,10 +1632,10 @@ "mscore_dual_intro": "Padelnomics veröffentlicht zwei eigenständige Scores für jeden Markt. Sie beantworten unterschiedliche Fragen und basieren auf unterschiedlichen Methoden — beide zu kennen ist entscheidend für eine fundierte Investitionsentscheidung.", "mscore_reife_chip": "Marktreife-Score", "mscore_reife_question": "Wie etabliert ist dieser Padel-Markt?", - "mscore_reife_desc": "Berechnet für Städte mit mindestens einer Padelanlage. Kombiniert Bevölkerungsgröße, Wirtschaftskraft, Nachfragenachweise aus Buchungsplattformen und Datenvollständigkeit.", + "mscore_reife_desc": "Berechnet für Städte mit mindestens einer Padelanlage. Kombiniert Bevölkerungsgröße, Wirtschaftskraft, Nachfragesignale aus Buchungsplattformen und Datenvollständigkeit.", "mscore_potenzial_chip": "Marktpotenzial-Score", "mscore_potenzial_question": "Wo sollte ich eine Padelanlage bauen?", - "mscore_potenzial_desc": "Berechnet für alle Standorte weltweit, auch dort, wo es noch keine Anlagen gibt. Belohnt Angebotslücken, unterversorgte Einzugsgebiete und Schlägersportkultur — die relevanten Signale für Greenfield-Investitionen.", + "mscore_potenzial_desc": "Berechnet für alle Standorte weltweit, auch dort, wo es noch keine Anlagen gibt. Angebotslücken, unterversorgte Einzugsgebiete und Schlägersportkultur schlagen positiv zu Buche — die entscheidenden Signale für Erstinvestitionen.", "mscore_what_h2": "Marktreife-Score: Was er misst", "mscore_what_intro": "Der Marktreife-Score ist ein Komposit-Index von 0 bis 100, der bewertet, wie etabliert und attraktiv ein bestehender Padel-Markt ist. Er gilt ausschließlich für Städte mit mindestens einer Padelanlage — vier Datenkategorien fließen in eine einzige Kennzahl ein, damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", "mscore_cat_demo_h3": "Demografie", @@ -1674,26 +1674,26 @@ "mscore_faq_q5": "Garantiert ein hoher Score eine gute Investition?", "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivität auf Makroebene. Deine konkrete Investition hängt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen.", "mscore_pot_what_h2": "Marktpotenzial-Score: Was er misst", - "mscore_pot_what_intro": "Der Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Konzipiert für Greenfield-Investoren auf der Suche nach unbesetzten Märkten, nicht für den Vergleich bestehender Venues.", + "mscore_pot_what_intro": "Der Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Gedacht für Erstinvestoren auf der Suche nach unbestellten Märkten, nicht für den Vergleich bereits erschlossener Standorte.", "mscore_pot_cat_market_h3": "Adressierbarer Markt", "mscore_pot_cat_market_p": "Logarithmisch skalierte Bevölkerungsgröße, begrenzt auf 500.000 Einwohner. Das Potenzial ist bei mittelgroßen Städten am höchsten — groß genug für eine rentable Anlage, aber noch nicht von Großstadt-Betreibern erschlossen.", "mscore_pot_cat_econ_h3": "Wirtschaftskraft", "mscore_pot_cat_econ_p": "Kaufkraft auf Länderebene (KKS), normiert auf internationale Benchmarks. Maßgeblich für die Zahlungsbereitschaft bei Platzmieten im Zielbereich von 20–35 €/Std.", "mscore_pot_cat_gap_h3": "Angebotslücke", "mscore_pot_cat_gap_p": "Invertierte Anlagendichte: null Plätze pro 100.000 Einwohner ergibt die volle Punktzahl. Das ist das zentrale Signal, das den Marktpotenzial-Score vom Marktreife-Score unterscheidet — der weiße Fleck auf der Karte ist die Chance.", - "mscore_pot_cat_catchment_h3": "Einzugsgebiet-Lücke", + "mscore_pot_cat_catchment_h3": "Versorgungslücke", "mscore_pot_cat_catchment_p": "Entfernung zur nächsten bestehenden Padelanlage. Standorte mehr als 30 km vom nächsten Platz entfernt erhalten die volle Punktzahl — echte Versorgungslücken ohne nahe gelegene Alternative.", "mscore_pot_cat_tennis_h3": "Schlägersportkultur", "mscore_pot_cat_tennis_p": "Tennisplätze im Umkreis von 25 km als Indikator für etablierte Schlägersportnachfrage. Viele neue Padelanlagen entstehen innerhalb bestehender Tennisvereine oder direkt daneben — ein verlässlicher Frühindikator.", "mscore_pot_read_h2": "Marktpotenzial-Score: So liest Du ihn", "mscore_pot_band_high_label": "70–100: Hohes Potenzial", - "mscore_pot_band_high_p": "Unterversorgtes Gebiet mit starker Demografie und wirtschaftlicher Substanz. Geringes Angebot, deutliche Einzugsgebiet-Lücke und nachgewiesene Schlägersportkultur. Prioritätsmarkt für Greenfield-Investitionen.", + "mscore_pot_band_high_p": "Unterversorgtes Gebiet mit solider Bevölkerungsstruktur und Kaufkraft. Geringes Angebot, weit entfernt von der nächsten Anlage, nachgewiesene Schlägersportkultur. Hohe Priorität für Erstinvestoren.", "mscore_pot_band_mid_label": "45–69: Moderates Potenzial", - "mscore_pot_band_mid_p": "Teilweise bereits vorhandenes Angebot, demografische Einschränkungen oder gemischte Signale. Lohnt sich für eine genauere Prüfung — lokale Faktoren können das Bild erheblich verändern.", + "mscore_pot_band_mid_p": "Teilweise bereits vorhandenes Angebot, demografische Einschränkungen oder unklare Signallage. Lohnt sich für eine genauere Prüfung — lokale Faktoren können das Bild erheblich verändern.", "mscore_pot_band_low_label": "Unter 45: Geringeres Potenzial", - "mscore_pot_band_low_p": "Markt ist bereits gut versorgt, Bevölkerungszahl gering oder wirtschaftliche Kaufkraft begrenzt. Fokussiere Deine Ressourcen auf höher bewertete Standorte, es sei denn, Du hast einen konkreten lokalen Vorteil.", + "mscore_pot_band_low_p": "Markt bereits gut versorgt, Bevölkerungszahl gering oder Kaufkraft begrenzt. Konzentriere Dich auf höher bewertete Standorte — es sei denn, Du hast einen konkreten lokalen Vorteil.", "mscore_faq_q6": "Was ist der Unterschied zwischen Marktreife-Score und Marktpotenzial-Score?", - "mscore_faq_a6": "Der Marktreife-Score misst, wie etabliert und ausgereift ein bestehender Padel-Markt ist — er gilt nur für Städte mit mindestens einer Anlage. Der Marktpotenzial-Score bewertet Greenfield-Investitionschancen und erfasst alle Standorte weltweit. Er belohnt Angebotslücken und unterversorgte Einzugsgebiete, wo es noch gar keine Anlagen gibt.", + "mscore_faq_a6": "Der Marktreife-Score misst, wie etabliert und ausgereift ein bestehender Padel-Markt ist — er gilt nur für Städte mit mindestens einer Anlage. Der Marktpotenzial-Score bewertet Investitionschancen in noch unbestellten Märkten und erfasst alle Standorte weltweit. Angebotslücken und unterversorgte Einzugsgebiete fließen positiv ein — auch dort, wo es noch gar keine Anlagen gibt.", "mscore_faq_q7": "Warum hat mein Ort einen hohen Marktpotenzial-Score, aber keine Padelanlagen?", - "mscore_faq_a7": "Genau darum geht es. Ein hoher Marktpotenzial-Score signalisiert einen unterversorgten Standort: starke Demografie, wirtschaftliche Kaufkraft, kein bestehendes Angebot und Distanz zur nächsten Anlage. Das sind genau die Signale, die auf eine Greenfield-Chance hinweisen — kein Zeichen für einen schwachen Markt." + "mscore_faq_a7": "Genau darum geht es. Ein hoher Marktpotenzial-Score signalisiert einen unterversorgten Standort: solide Bevölkerungsbasis, wirtschaftliche Kaufkraft, kein bestehendes Angebot und weite Entfernung zur nächsten Anlage. Das sind genau die Signale, die auf eine Pionierchance hinweisen — kein Zeichen für einen schwachen Markt." } From 4731a91d0233dfff76dea53c598b587b931b835a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 16:58:28 +0100 Subject: [PATCH 39/98] feat(admin): live search with loading indicator on all admin filter forms MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Scenarios: - Convert from plain GET form to HTMX live search (scenario_results route already existed, just needed wiring) - Replace Filter submit button with JS-reset Clear button - Update is_generating banner to match article_results.html style Users: - Add /admin/users/results HTMX partial route - Extract user table into partials/user_results.html with HTMX pagination - Convert search form to live-search (input delay:300ms) Loading indicator (all 6 forms): - Add hx-indicator pointing to a small arc spinner SVG - Spinner fades in while the debounce + request is in flight - CSS .search-spinner class in input.css (opacity 0 → 1 on htmx-request, spin-icon animation only runs when visible) Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/admin/routes.py | 17 ++++ .../admin/templates/admin/articles.html | 8 +- .../admin/templates/admin/emails.html | 8 +- .../admin/templates/admin/leads.html | 8 +- .../admin/partials/scenario_results.html | 12 ++- .../admin/partials/user_results.html | 60 ++++++++++++++ .../admin/templates/admin/scenarios.html | 22 +++-- .../admin/templates/admin/suppliers.html | 8 +- .../admin/templates/admin/users.html | 81 +++++-------------- web/src/padelnomics/static/css/input.css | 11 +++ 10 files changed, 159 insertions(+), 76 deletions(-) create mode 100644 web/src/padelnomics/admin/templates/admin/partials/user_results.html diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 436ed8e..a819294 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -263,6 +263,23 @@ async def users(): ) +@bp.route("/users/results") +@role_required("admin") +async def user_results(): + """HTMX partial for user list (live search).""" + search = request.args.get("search", "").strip() + page = int(request.args.get("page", 1)) + per_page = 50 + offset = (page - 1) * per_page + user_list = await get_users(limit=per_page, offset=offset, search=search or None) + return await render_template( + "admin/partials/user_results.html", + users=user_list, + search=search, + page=page, + ) + + @bp.route("/users/") @role_required("admin") async def user_detail(user_id: int): diff --git a/web/src/padelnomics/admin/templates/admin/articles.html b/web/src/padelnomics/admin/templates/admin/articles.html index 19d9d3f..8bfbe89 100644 --- a/web/src/padelnomics/admin/templates/admin/articles.html +++ b/web/src/padelnomics/admin/templates/admin/articles.html @@ -28,7 +28,8 @@
    + hx-trigger="change, input delay:300ms from:find input" + hx-indicator="#articles-loading">
    @@ -65,6 +66,11 @@
    + +
    diff --git a/web/src/padelnomics/admin/templates/admin/emails.html b/web/src/padelnomics/admin/templates/admin/emails.html index 8a40294..1a3ace9 100644 --- a/web/src/padelnomics/admin/templates/admin/emails.html +++ b/web/src/padelnomics/admin/templates/admin/emails.html @@ -24,7 +24,8 @@
    + hx-trigger="change, input delay:300ms from:find input" + hx-indicator="#emails-loading">
    @@ -51,6 +52,11 @@
    + +
    diff --git a/web/src/padelnomics/admin/templates/admin/leads.html b/web/src/padelnomics/admin/templates/admin/leads.html index 2c958ab..8f2ae57 100644 --- a/web/src/padelnomics/admin/templates/admin/leads.html +++ b/web/src/padelnomics/admin/templates/admin/leads.html @@ -25,7 +25,8 @@
    + hx-trigger="change, input delay:300ms from:find input" + hx-indicator="#leads-loading">
    @@ -57,6 +58,11 @@ {% endfor %}
    + +
    diff --git a/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html index ac9860d..d742914 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html +++ b/web/src/padelnomics/admin/templates/admin/partials/scenario_results.html @@ -1,9 +1,15 @@ {% if is_generating %} - + hx-swap="innerHTML"> + + + + + Generating scenarios… + {% endif %} {% if scenarios %} diff --git a/web/src/padelnomics/admin/templates/admin/partials/user_results.html b/web/src/padelnomics/admin/templates/admin/partials/user_results.html new file mode 100644 index 0000000..0f895ab --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/user_results.html @@ -0,0 +1,60 @@ +
    + {% if users %} +
    +
    + + + + + + + + + + + + + {% for u in users %} + + + + + + + + + + {% endfor %} + +
    IDEmailNamePlanJoinedLast Login
    {{ u.id }}{{ u.email }}{{ u.name or '-' }} + {% if u.plan %} + {{ u.plan }} + {% else %} + free + {% endif %} + {{ u.created_at[:10] }}{{ u.last_login_at[:10] if u.last_login_at else 'Never' }} +
    + + +
    +
    + +
    + {% if page > 1 %} + + {% endif %} + Page {{ page }} + {% if users | length == 50 %} + + {% endif %} +
    + {% else %} +

    No users found.

    + {% endif %} + diff --git a/web/src/padelnomics/admin/templates/admin/scenarios.html b/web/src/padelnomics/admin/templates/admin/scenarios.html index 3cfa407..a117a1e 100644 --- a/web/src/padelnomics/admin/templates/admin/scenarios.html +++ b/web/src/padelnomics/admin/templates/admin/scenarios.html @@ -17,13 +17,19 @@ -
    + +
    +
    +
    -
    - - {% if current_search or current_country or current_venue_type %} - Clear - {% endif %} + +
    + +
    diff --git a/web/src/padelnomics/admin/templates/admin/suppliers.html b/web/src/padelnomics/admin/templates/admin/suppliers.html index 11905bf..2a69bba 100644 --- a/web/src/padelnomics/admin/templates/admin/suppliers.html +++ b/web/src/padelnomics/admin/templates/admin/suppliers.html @@ -24,7 +24,8 @@
    + hx-trigger="change, input delay:300ms from:find input" + hx-indicator="#suppliers-loading">
    @@ -52,6 +53,11 @@ {% endfor %}
    + +
    diff --git a/web/src/padelnomics/admin/templates/admin/users.html b/web/src/padelnomics/admin/templates/admin/users.html index f7d661d..58bfba9 100644 --- a/web/src/padelnomics/admin/templates/admin/users.html +++ b/web/src/padelnomics/admin/templates/admin/users.html @@ -9,69 +9,24 @@ ← Dashboard - -
    -
    - - -
    -
    +
    +
    +
    + +
    + +
    +
    - -
    - {% if users %} -
    - - - - - - - - - - - - - - {% for u in users %} - - - - - - - - - - {% endfor %} - -
    IDEmailNamePlanJoinedLast Login
    {{ u.id }}{{ u.email }}{{ u.name or '-' }} - {% if u.plan %} - {{ u.plan }} - {% else %} - free - {% endif %} - {{ u.created_at[:10] }}{{ u.last_login_at[:10] if u.last_login_at else 'Never' }} -
    - - -
    -
    -
    - - -
    - {% if page > 1 %} - ← Previous - {% endif %} - Page {{ page }} - {% if users | length == 50 %} - Next → - {% endif %} -
    - {% else %} -

    No users found.

    - {% endif %} +
    + {% include "admin/partials/user_results.html" %}
    {% endblock %} diff --git a/web/src/padelnomics/static/css/input.css b/web/src/padelnomics/static/css/input.css index 1acff6b..2fa47d3 100644 --- a/web/src/padelnomics/static/css/input.css +++ b/web/src/padelnomics/static/css/input.css @@ -569,6 +569,17 @@ @apply px-4 pb-4 text-slate-dark; } + /* Inline HTMX loading indicator for search forms */ + .search-spinner { + opacity: 0; + flex-shrink: 0; + align-self: center; + } + .search-spinner.htmx-request { + opacity: 1; + animation: spin-icon 0.9s linear infinite; + } + /* Article generation spinner banner */ .generating-banner { @apply flex items-center gap-3 rounded-xl border border-light-gray bg-white text-sm text-slate-dark mb-4; From 6d52a122e502d40c2a559e98667e2fed619f9f98 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 17:04:57 +0100 Subject: [PATCH 40/98] fix(i18n): apply padelnomics wordmark consistently to score names MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Score names always appear as "padelnomics Marktreife-Score" and "padelnomics Marktpotenzial-Score" in headings, chips, intro paragraphs, and FAQ questions/answers — in both EN and DE locales. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/locales/de.json | 24 ++--- web/src/padelnomics/locales/en.json | 137 ++++++++++++---------------- 2 files changed, 70 insertions(+), 91 deletions(-) diff --git a/web/src/padelnomics/locales/de.json b/web/src/padelnomics/locales/de.json index 2b54149..8dbdd86 100644 --- a/web/src/padelnomics/locales/de.json +++ b/web/src/padelnomics/locales/de.json @@ -1630,14 +1630,14 @@ "mscore_subtitle": "Ein datengestütztes Maß für die Attraktivität einer Stadt als Padel-Investitionsstandort.", "mscore_dual_h2": "Zwei Scores, zwei Fragen", "mscore_dual_intro": "Padelnomics veröffentlicht zwei eigenständige Scores für jeden Markt. Sie beantworten unterschiedliche Fragen und basieren auf unterschiedlichen Methoden — beide zu kennen ist entscheidend für eine fundierte Investitionsentscheidung.", - "mscore_reife_chip": "Marktreife-Score", + "mscore_reife_chip": "padelnomics Marktreife-Score", "mscore_reife_question": "Wie etabliert ist dieser Padel-Markt?", "mscore_reife_desc": "Berechnet für Städte mit mindestens einer Padelanlage. Kombiniert Bevölkerungsgröße, Wirtschaftskraft, Nachfragesignale aus Buchungsplattformen und Datenvollständigkeit.", - "mscore_potenzial_chip": "Marktpotenzial-Score", + "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score", "mscore_potenzial_question": "Wo sollte ich eine Padelanlage bauen?", "mscore_potenzial_desc": "Berechnet für alle Standorte weltweit, auch dort, wo es noch keine Anlagen gibt. Angebotslücken, unterversorgte Einzugsgebiete und Schlägersportkultur schlagen positiv zu Buche — die entscheidenden Signale für Erstinvestitionen.", - "mscore_what_h2": "Marktreife-Score: Was er misst", - "mscore_what_intro": "Der Marktreife-Score ist ein Komposit-Index von 0 bis 100, der bewertet, wie etabliert und attraktiv ein bestehender Padel-Markt ist. Er gilt ausschließlich für Städte mit mindestens einer Padelanlage — vier Datenkategorien fließen in eine einzige Kennzahl ein, damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", + "mscore_what_h2": "padelnomics Marktreife-Score: Was er misst", + "mscore_what_intro": "Der padelnomics Marktreife-Score ist ein Komposit-Index von 0 bis 100, der bewertet, wie etabliert und attraktiv ein bestehender Padel-Markt ist. Er gilt ausschließlich für Städte mit mindestens einer Padelanlage — vier Datenkategorien fließen in eine einzige Kennzahl ein, damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", "mscore_cat_demo_h3": "Demografie", "mscore_cat_demo_p": "Bevölkerungsgröße als Indikator für den adressierbaren Markt. Größere Städte tragen in der Regel mehr Anlagen und höhere Auslastung.", "mscore_cat_econ_h3": "Wirtschaftskraft", @@ -1646,7 +1646,7 @@ "mscore_cat_demand_p": "Signale aus dem laufenden Betrieb bestehender Anlagen — Auslastungsraten, Buchungsdaten, Anzahl aktiver Standorte. Wo sich reale Nachfrage bereits messen lässt, ist das der stärkste Indikator.", "mscore_cat_data_h3": "Datenqualität", "mscore_cat_data_p": "Wie umfassend die Datenlage für eine Stadt ist. Ein Score auf Basis unvollständiger Daten ist weniger belastbar — wir machen das transparent, damit Du weißt, wo eigene Recherche sinnvoll ist.", - "mscore_read_h2": "Marktreife-Score: Wie Du ihn liest", + "mscore_read_h2": "padelnomics Marktreife-Score: Wie Du ihn liest", "mscore_band_high_label": "70–100: Starker Markt", "mscore_band_high_p": "Große Bevölkerung, hohe Wirtschaftskraft und nachgewiesene Nachfrage durch bestehende Anlagen. Diese Städte haben validierte Padel-Märkte mit belastbaren Benchmarks für die Finanzplanung.", "mscore_band_mid_label": "45–69: Solides Mittelfeld", @@ -1673,8 +1673,8 @@ "mscore_faq_a4": "Ja. Die Methodik ist für alle Märkte einheitlich, sodass ein Score von 72 in Deutschland direkt vergleichbar ist mit einem 72 in Spanien oder Großbritannien.", "mscore_faq_q5": "Garantiert ein hoher Score eine gute Investition?", "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivität auf Makroebene. Deine konkrete Investition hängt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen.", - "mscore_pot_what_h2": "Marktpotenzial-Score: Was er misst", - "mscore_pot_what_intro": "Der Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Gedacht für Erstinvestoren auf der Suche nach unbestellten Märkten, nicht für den Vergleich bereits erschlossener Standorte.", + "mscore_pot_what_h2": "padelnomics Marktpotenzial-Score: Was er misst", + "mscore_pot_what_intro": "Der padelnomics Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Gedacht für Erstinvestoren auf der Suche nach unbestellten Märkten, nicht für den Vergleich bereits erschlossener Standorte.", "mscore_pot_cat_market_h3": "Adressierbarer Markt", "mscore_pot_cat_market_p": "Logarithmisch skalierte Bevölkerungsgröße, begrenzt auf 500.000 Einwohner. Das Potenzial ist bei mittelgroßen Städten am höchsten — groß genug für eine rentable Anlage, aber noch nicht von Großstadt-Betreibern erschlossen.", "mscore_pot_cat_econ_h3": "Wirtschaftskraft", @@ -1685,15 +1685,15 @@ "mscore_pot_cat_catchment_p": "Entfernung zur nächsten bestehenden Padelanlage. Standorte mehr als 30 km vom nächsten Platz entfernt erhalten die volle Punktzahl — echte Versorgungslücken ohne nahe gelegene Alternative.", "mscore_pot_cat_tennis_h3": "Schlägersportkultur", "mscore_pot_cat_tennis_p": "Tennisplätze im Umkreis von 25 km als Indikator für etablierte Schlägersportnachfrage. Viele neue Padelanlagen entstehen innerhalb bestehender Tennisvereine oder direkt daneben — ein verlässlicher Frühindikator.", - "mscore_pot_read_h2": "Marktpotenzial-Score: So liest Du ihn", + "mscore_pot_read_h2": "padelnomics Marktpotenzial-Score: So liest Du ihn", "mscore_pot_band_high_label": "70–100: Hohes Potenzial", "mscore_pot_band_high_p": "Unterversorgtes Gebiet mit solider Bevölkerungsstruktur und Kaufkraft. Geringes Angebot, weit entfernt von der nächsten Anlage, nachgewiesene Schlägersportkultur. Hohe Priorität für Erstinvestoren.", "mscore_pot_band_mid_label": "45–69: Moderates Potenzial", "mscore_pot_band_mid_p": "Teilweise bereits vorhandenes Angebot, demografische Einschränkungen oder unklare Signallage. Lohnt sich für eine genauere Prüfung — lokale Faktoren können das Bild erheblich verändern.", "mscore_pot_band_low_label": "Unter 45: Geringeres Potenzial", "mscore_pot_band_low_p": "Markt bereits gut versorgt, Bevölkerungszahl gering oder Kaufkraft begrenzt. Konzentriere Dich auf höher bewertete Standorte — es sei denn, Du hast einen konkreten lokalen Vorteil.", - "mscore_faq_q6": "Was ist der Unterschied zwischen Marktreife-Score und Marktpotenzial-Score?", - "mscore_faq_a6": "Der Marktreife-Score misst, wie etabliert und ausgereift ein bestehender Padel-Markt ist — er gilt nur für Städte mit mindestens einer Anlage. Der Marktpotenzial-Score bewertet Investitionschancen in noch unbestellten Märkten und erfasst alle Standorte weltweit. Angebotslücken und unterversorgte Einzugsgebiete fließen positiv ein — auch dort, wo es noch gar keine Anlagen gibt.", - "mscore_faq_q7": "Warum hat mein Ort einen hohen Marktpotenzial-Score, aber keine Padelanlagen?", - "mscore_faq_a7": "Genau darum geht es. Ein hoher Marktpotenzial-Score signalisiert einen unterversorgten Standort: solide Bevölkerungsbasis, wirtschaftliche Kaufkraft, kein bestehendes Angebot und weite Entfernung zur nächsten Anlage. Das sind genau die Signale, die auf eine Pionierchance hinweisen — kein Zeichen für einen schwachen Markt." + "mscore_faq_q6": "Was ist der Unterschied zwischen dem padelnomics Marktreife-Score und dem padelnomics Marktpotenzial-Score?", + "mscore_faq_a6": "Der padelnomics Marktreife-Score misst, wie etabliert und ausgereift ein bestehender Padel-Markt ist — er gilt nur für Städte mit mindestens einer Anlage. Der padelnomics Marktpotenzial-Score bewertet Investitionschancen in noch unbestellten Märkten und erfasst alle Standorte weltweit. Angebotslücken und unterversorgte Einzugsgebiete fließen positiv ein — auch dort, wo es noch gar keine Anlagen gibt.", + "mscore_faq_q7": "Warum hat mein Ort einen hohen padelnomics Marktpotenzial-Score, aber keine Padelanlagen?", + "mscore_faq_a7": "Genau darum geht es. Ein hoher padelnomics Marktpotenzial-Score signalisiert einen unterversorgten Standort: solide Bevölkerungsbasis, wirtschaftliche Kaufkraft, kein bestehendes Angebot und weite Entfernung zur nächsten Anlage. Das sind genau die Signale, die auf eine Pionierchance hinweisen — kein Zeichen für einen schwachen Markt." } diff --git a/web/src/padelnomics/locales/en.json b/web/src/padelnomics/locales/en.json index 4af2d06..f0b06ea 100644 --- a/web/src/padelnomics/locales/en.json +++ b/web/src/padelnomics/locales/en.json @@ -1471,7 +1471,6 @@ "sd_flash_valid_email": "Please enter a valid email address.", "sd_flash_claim_error": "This listing has already been claimed or does not exist.", "sd_flash_listing_saved": "Listing saved successfully.", - "bp_indoor": "Indoor", "bp_outdoor": "Outdoor", "bp_own": "Own", @@ -1480,24 +1479,20 @@ "bp_payback_not_reached": "Not reached in 60 months", "bp_months": "{n} months", "bp_years": "{n} years", - "bp_exec_paragraph": "This business plan models a {facility_type} padel facility with {courts} courts ({sqm} m\u00b2). Total investment is {total_capex}, financed with {equity} equity and {loan} debt. The projected IRR is {irr} with a payback period of {payback}.", - + "bp_exec_paragraph": "This business plan models a {facility_type} padel facility with {courts} courts ({sqm} m²). Total investment is {total_capex}, financed with {equity} equity and {loan} debt. The projected IRR is {irr} with a payback period of {payback}.", "bp_lbl_scenario": "Scenario", - "bp_lbl_generated_by": "Generated by Padelnomics \u2014 padelnomics.io", - + "bp_lbl_generated_by": "Generated by Padelnomics — padelnomics.io", "bp_lbl_total_investment": "Total Investment", "bp_lbl_equity_required": "Equity Required", "bp_lbl_year3_ebitda": "Year 3 EBITDA", "bp_lbl_irr": "IRR", "bp_lbl_payback_period": "Payback Period", "bp_lbl_year1_revenue": "Year 1 Revenue", - "bp_lbl_item": "Item", "bp_lbl_amount": "Amount", "bp_lbl_notes": "Notes", "bp_lbl_total_capex": "Total CAPEX", - "bp_lbl_capex_stats": "CAPEX per court: {per_court} \u2022 CAPEX per m\u00b2: {per_sqm}", - + "bp_lbl_capex_stats": "CAPEX per court: {per_court} • CAPEX per m²: {per_sqm}", "bp_lbl_equity": "Equity", "bp_lbl_loan": "Loan", "bp_lbl_interest_rate": "Interest Rate", @@ -1505,24 +1500,20 @@ "bp_lbl_monthly_payment": "Monthly Payment", "bp_lbl_annual_debt_service": "Annual Debt Service", "bp_lbl_ltv": "Loan-to-Value", - "bp_lbl_monthly": "Monthly", "bp_lbl_total_monthly_opex": "Total Monthly OPEX", "bp_lbl_annual_opex": "Annual OPEX", - "bp_lbl_weighted_hourly_rate": "Weighted Hourly Rate", "bp_lbl_target_utilization": "Target Utilization", "bp_lbl_gross_monthly_revenue": "Gross Monthly Revenue", "bp_lbl_net_monthly_revenue": "Net Monthly Revenue", "bp_lbl_monthly_ebitda": "Monthly EBITDA", "bp_lbl_monthly_net_cf": "Monthly Net Cash Flow", - "bp_lbl_year": "Year", "bp_lbl_revenue": "Revenue", "bp_lbl_ebitda": "EBITDA", "bp_lbl_debt_service": "Debt Service", "bp_lbl_net_cf": "Net CF", - "bp_lbl_moic": "MOIC", "bp_lbl_cash_on_cash": "Cash-on-Cash (Y3)", "bp_lbl_payback": "Payback", @@ -1530,46 +1521,40 @@ "bp_lbl_ebitda_margin": "EBITDA Margin", "bp_lbl_dscr_y3": "DSCR (Y3)", "bp_lbl_yield_on_cost": "Yield on Cost", - "bp_lbl_month": "Month", "bp_lbl_opex": "OPEX", "bp_lbl_debt": "Debt", "bp_lbl_cumulative": "Cumulative", - - "bp_lbl_disclaimer": "Disclaimer: This business plan is generated from user-provided assumptions using the Padelnomics financial model. All projections are estimates and do not constitute financial advice. Actual results may vary significantly based on market conditions, execution, and other factors. Consult with financial advisors before making investment decisions. \u00a9 Padelnomics \u2014 padelnomics.io", - + "bp_lbl_disclaimer": "Disclaimer: This business plan is generated from user-provided assumptions using the Padelnomics financial model. All projections are estimates and do not constitute financial advice. Actual results may vary significantly based on market conditions, execution, and other factors. Consult with financial advisors before making investment decisions. © Padelnomics — padelnomics.io", "email_magic_link_heading": "Sign in to {app_name}", "email_magic_link_body": "Here's your sign-in link. It expires in {expiry_minutes} minutes.", - "email_magic_link_btn": "Sign In \u2192", + "email_magic_link_btn": "Sign In →", "email_magic_link_fallback": "If the button doesn't work, copy and paste this URL into your browser:", "email_magic_link_ignore": "If you didn't request this, you can safely ignore this email.", "email_magic_link_subject": "Your sign-in link for {app_name}", "email_magic_link_preheader": "This link expires in {expiry_minutes} minutes", - "email_quote_verify_heading": "Verify your email to get quotes", "email_quote_verify_greeting": "Hi {first_name},", "email_quote_verify_body": "Thanks for requesting quotes. Verify your email to activate your quote request and create your {app_name} account.", "email_quote_verify_project_label": "Your project:", "email_quote_verify_urgency": "Verified requests get prioritized by our supplier network.", - "email_quote_verify_btn": "Verify & Activate \u2192", + "email_quote_verify_btn": "Verify & Activate →", "email_quote_verify_expires": "This link expires in 60 minutes.", "email_quote_verify_fallback": "If the button doesn't work, copy and paste this URL into your browser:", "email_quote_verify_ignore": "If you didn't request this, you can safely ignore this email.", - "email_quote_verify_subject": "Verify your email \u2014 suppliers are ready to quote", + "email_quote_verify_subject": "Verify your email — suppliers are ready to quote", "email_quote_verify_preheader": "One click to activate your quote request", "email_quote_verify_preheader_courts": "One click to activate your {court_count}-court project", - "email_welcome_heading": "Welcome to {app_name}", "email_welcome_greeting": "Hi {first_name},", - "email_welcome_body": "You now have access to the financial planner, market data, and supplier directory \u2014 everything you need to plan your padel business.", + "email_welcome_body": "You now have access to the financial planner, market data, and supplier directory — everything you need to plan your padel business.", "email_welcome_quickstart_heading": "Quick start:", - "email_welcome_link_planner": "Financial Planner \u2014 model your investment", - "email_welcome_link_markets": "Market Data \u2014 explore padel demand by city", - "email_welcome_link_quotes": "Get Quotes \u2014 connect with verified suppliers", - "email_welcome_btn": "Start Planning \u2192", - "email_welcome_subject": "You're in \u2014 here's how to start planning", + "email_welcome_link_planner": "Financial Planner — model your investment", + "email_welcome_link_markets": "Market Data — explore padel demand by city", + "email_welcome_link_quotes": "Get Quotes — connect with verified suppliers", + "email_welcome_btn": "Start Planning →", + "email_welcome_subject": "You're in — here's how to start planning", "email_welcome_preheader": "Your padel business planning toolkit is ready", - "email_waitlist_supplier_heading": "You're on the Supplier Waitlist", "email_waitlist_supplier_body": "Thanks for your interest in the {plan_name} plan. We're building a platform to connect you with qualified leads from padel entrepreneurs actively planning projects.", "email_waitlist_supplier_perks_intro": "As an early waitlist member, you'll get:", @@ -1577,20 +1562,19 @@ "email_waitlist_supplier_perk_2": "Exclusive launch pricing (locked in)", "email_waitlist_supplier_perk_3": "Dedicated onboarding call", "email_waitlist_supplier_meanwhile": "In the meantime, explore our free resources:", - "email_waitlist_supplier_link_planner": "Financial Planning Tool \u2014 model your padel facility", - "email_waitlist_supplier_link_directory": "Supplier Directory \u2014 browse verified suppliers", - "email_waitlist_supplier_subject": "You're in \u2014 {plan_name} early access is coming", + "email_waitlist_supplier_link_planner": "Financial Planning Tool — model your padel facility", + "email_waitlist_supplier_link_directory": "Supplier Directory — browse verified suppliers", + "email_waitlist_supplier_subject": "You're in — {plan_name} early access is coming", "email_waitlist_supplier_preheader": "Exclusive launch pricing + priority onboarding", "email_waitlist_general_heading": "You're on the Waitlist", - "email_waitlist_general_body": "Thanks for joining. We're building the planning platform for padel entrepreneurs \u2014 financial modelling, market data, and supplier connections in one place.", + "email_waitlist_general_body": "Thanks for joining. We're building the planning platform for padel entrepreneurs — financial modelling, market data, and supplier connections in one place.", "email_waitlist_general_perks_intro": "As an early waitlist member, you'll get:", "email_waitlist_general_perk_1": "Early access before public launch", "email_waitlist_general_perk_2": "Exclusive launch pricing", "email_waitlist_general_perk_3": "Priority onboarding and support", "email_waitlist_general_outro": "We'll be in touch soon.", - "email_waitlist_general_subject": "You're on the list \u2014 we'll notify you at launch", + "email_waitlist_general_subject": "You're on the list — we'll notify you at launch", "email_waitlist_general_preheader": "Early access + exclusive launch pricing", - "email_lead_forward_heading": "New Project Lead", "email_lead_forward_urgency": "This lead was just unlocked. Suppliers who respond within 24 hours are 3x more likely to win the project.", "email_lead_forward_section_brief": "Project Brief", @@ -1607,22 +1591,20 @@ "email_lead_forward_lbl_phone": "Phone", "email_lead_forward_lbl_company": "Company", "email_lead_forward_lbl_role": "Role", - "email_lead_forward_btn": "View in Lead Feed \u2192", + "email_lead_forward_btn": "View in Lead Feed →", "email_lead_forward_reply_direct": "or reply directly to {contact_email}", "email_lead_forward_preheader_suffix": "contact details inside", - "email_lead_matched_heading": "A supplier wants to discuss your project", "email_lead_matched_greeting": "Hi {first_name},", - "email_lead_matched_body": "Great news \u2014 a verified supplier has been matched with your padel project. They have your project brief and contact details.", + "email_lead_matched_body": "Great news — a verified supplier has been matched with your padel project. They have your project brief and contact details.", "email_lead_matched_context": "You submitted a quote request for a {facility_type} facility with {court_count} courts in {country}.", "email_lead_matched_next_heading": "What happens next", - "email_lead_matched_next_body": "The supplier has received your project brief and contact details. Most suppliers respond within 24\u201348 hours via email or phone.", + "email_lead_matched_next_body": "The supplier has received your project brief and contact details. Most suppliers respond within 24–48 hours via email or phone.", "email_lead_matched_tip": "Tip: Responding quickly to supplier outreach increases your chance of getting competitive quotes.", - "email_lead_matched_btn": "View Your Dashboard \u2192", + "email_lead_matched_btn": "View Your Dashboard →", "email_lead_matched_note": "You'll receive this notification each time a new supplier unlocks your project details.", "email_lead_matched_subject": "{first_name}, a supplier wants to discuss your project", - "email_lead_matched_preheader": "They'll reach out to you directly \u2014 here's what to expect", - + "email_lead_matched_preheader": "They'll reach out to you directly — here's what to expect", "email_enquiry_heading": "New enquiry from {contact_name}", "email_enquiry_body": "You have a new enquiry via your {supplier_name} directory listing.", "email_enquiry_lbl_from": "From", @@ -1631,54 +1613,51 @@ "email_enquiry_reply": "Reply directly to {contact_email} to connect.", "email_enquiry_subject": "New enquiry from {contact_name} via your directory listing", "email_enquiry_preheader": "Reply to connect with this potential client", - "email_business_plan_heading": "Your business plan is ready", "email_business_plan_body": "Your padel business plan PDF has been generated and is ready for download.", "email_business_plan_includes": "Your plan includes investment breakdown, revenue projections, and break-even analysis.", - "email_business_plan_btn": "Download PDF \u2192", - "email_business_plan_quote_cta": "Ready for the next step? Get quotes from suppliers \u2192", + "email_business_plan_btn": "Download PDF →", + "email_business_plan_quote_cta": "Ready for the next step? Get quotes from suppliers →", "email_business_plan_subject": "Your business plan PDF is ready to download", - "email_business_plan_preheader": "Professional padel facility financial plan \u2014 download now", - + "email_business_plan_preheader": "Professional padel facility financial plan — download now", "email_footer_tagline": "The padel business planning platform", - "email_footer_copyright": "\u00a9 {year} {app_name}. You received this email because you have an account or submitted a request.", - + "email_footer_copyright": "© {year} {app_name}. You received this email because you have an account or submitted a request.", "footer_market_score": "Market Score", - "mscore_page_title": "The padelnomics Market Score \u2014 How We Measure Market Potential", + "mscore_page_title": "The padelnomics Market Score — How We Measure Market Potential", "mscore_meta_desc": "The padelnomics Market Score rates cities from 0 to 100 on their potential for padel investment. Learn how demographics, economic strength, demand signals, and data coverage feed into the score.", - "mscore_og_desc": "A data-driven composite score (0\u2013100) that measures how attractive a city is for padel court investment. See what goes into it and what it means for your planning.", + "mscore_og_desc": "A data-driven composite score (0–100) that measures how attractive a city is for padel court investment. See what goes into it and what it means for your planning.", "mscore_h1": "The padelnomics Market Score", "mscore_subtitle": "A data-driven measure of how attractive a city is for padel investment.", "mscore_dual_h2": "Two Scores, Two Questions", - "mscore_dual_intro": "Padelnomics publishes two distinct scores for every market. They answer different questions and are calculated using different methodologies \u2014 knowing both is essential for a well-informed investment decision.", - "mscore_reife_chip": "Marktreife-Score", + "mscore_dual_intro": "Padelnomics publishes two distinct scores for every market. They answer different questions and are calculated using different methodologies — knowing both is essential for a well-informed investment decision.", + "mscore_reife_chip": "padelnomics Marktreife-Score", "mscore_reife_question": "How established is this padel market?", "mscore_reife_desc": "Calculated for cities with at least one padel venue. Combines population size, economic power, demand evidence from booking platforms, and data completeness.", - "mscore_potenzial_chip": "Marktpotenzial-Score", + "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score", "mscore_potenzial_question": "Where should I build a padel court?", - "mscore_potenzial_desc": "Calculated for all locations globally, including those with zero courts. Rewards supply gaps, underserved catchment areas, and racket sport culture \u2014 the signals that matter for greenfield investors.", - "mscore_what_h2": "Marktreife-Score: What It Measures", - "mscore_what_intro": "The Marktreife-Score is a composite index from 0 to 100 that evaluates how established and attractive an existing padel market is. It only applies to cities with at least one padel venue, combining four categories of data into a single number designed to help you prioritize markets worth investigating further.", + "mscore_potenzial_desc": "Calculated for all locations globally, including those with zero courts. Rewards supply gaps, underserved catchment areas, and racket sport culture — the signals that matter for greenfield investors.", + "mscore_what_h2": "padelnomics Marktreife-Score: What It Measures", + "mscore_what_intro": "The padelnomics Marktreife-Score is a composite index from 0 to 100 that evaluates how established and attractive an existing padel market is. It only applies to cities with at least one padel venue, combining four categories of data into a single number designed to help you prioritize markets worth investigating further.", "mscore_cat_demo_h3": "Demographics", "mscore_cat_demo_p": "Population size as a proxy for the addressable market. Larger cities generally support more venues and higher utilization.", "mscore_cat_econ_h3": "Economic Strength", "mscore_cat_econ_p": "Regional purchasing power and income indicators. Markets where people have higher disposable income tend to sustain stronger demand for leisure sports like padel.", "mscore_cat_demand_h3": "Demand Evidence", - "mscore_cat_demand_p": "Signals from existing venue activity \u2014 occupancy rates, booking data, and the number of operating venues. Where real demand is already measurable, it\u2019s the strongest indicator.", + "mscore_cat_demand_p": "Signals from existing venue activity — occupancy rates, booking data, and the number of operating venues. Where real demand is already measurable, it’s the strongest indicator.", "mscore_cat_data_h3": "Data Completeness", - "mscore_cat_data_p": "How much data we have for that city. A score influenced by incomplete data is less reliable \u2014 we surface this explicitly so you know when to dig deeper on your own.", - "mscore_read_h2": "Marktreife-Score: How To Read", - "mscore_band_high_label": "70\u2013100: Strong market", + "mscore_cat_data_p": "How much data we have for that city. A score influenced by incomplete data is less reliable — we surface this explicitly so you know when to dig deeper on your own.", + "mscore_read_h2": "padelnomics Marktreife-Score: How To Read", + "mscore_band_high_label": "70–100: Strong market", "mscore_band_high_p": "Large population, economic power, and proven demand from existing venues. These cities have validated padel markets with reliable benchmarks for financial planning.", - "mscore_band_mid_label": "45\u201369: Solid mid-tier", + "mscore_band_mid_label": "45–69: Solid mid-tier", "mscore_band_mid_p": "Good fundamentals with room for growth. Enough data to plan with confidence, but less competition than top-tier cities. Often the sweet spot for new entrants.", "mscore_band_low_label": "Below 45: Early-stage market", - "mscore_band_low_p": "Less validated data or smaller populations. This does not mean a city is a bad investment \u2014 it may mean less competition and first-mover advantage. Expect to do more local research.", - "mscore_read_note": "A lower score does not mean a city is a bad investment. It may indicate less available data or a market still developing \u2014 which can mean less competition and better terms for early entrants.", + "mscore_band_low_p": "Less validated data or smaller populations. This does not mean a city is a bad investment — it may mean less competition and first-mover advantage. Expect to do more local research.", + "mscore_read_note": "A lower score does not mean a city is a bad investment. It may indicate less available data or a market still developing — which can mean less competition and better terms for early entrants.", "mscore_sources_h2": "Data Sources", "mscore_sources_p": "The Market Score draws on data from European statistical offices (population and economic indicators), court booking platforms (venue counts, pricing, occupancy), and geographic databases (venue locations). Data is refreshed monthly as new extractions run.", "mscore_limits_h2": "Limitations", - "mscore_limits_p1": "The score reflects available data, not absolute market truth. Cities where fewer venues are tracked on booking platforms may score lower on demand evidence \u2014 even if local demand is strong.", + "mscore_limits_p1": "The score reflects available data, not absolute market truth. Cities where fewer venues are tracked on booking platforms may score lower on demand evidence — even if local demand is strong.", "mscore_limits_p2": "The score does not account for local factors like real estate costs, permitting timelines, competitive dynamics, or regulatory environment. These matter enormously and require on-the-ground research.", "mscore_limits_p3": "Use the Market Score as a starting point for prioritization, not a final investment decision. The financial planner is where you model your specific scenario.", "mscore_cta_markets": "Browse city scores", @@ -1688,33 +1667,33 @@ "mscore_faq_a1": "A composite index from 0 to 100 that measures how attractive a city is for padel court investment. It combines demographics, economic strength, demand evidence, and data completeness into a single comparable number.", "mscore_faq_q2": "How often is the score updated?", "mscore_faq_a2": "Monthly. New data from statistical offices, booking platforms, and venue databases is extracted and processed on a regular cycle. Scores reflect the most recent available data.", - "mscore_faq_q3": "Why is my city\u2019s score low?", - "mscore_faq_a3": "Usually because of limited data coverage or smaller population. A low score doesn\u2019t mean the city is unattractive \u2014 it means we have less data to quantify the opportunity. Local research can fill the gaps.", + "mscore_faq_q3": "Why is my city’s score low?", + "mscore_faq_a3": "Usually because of limited data coverage or smaller population. A low score doesn’t mean the city is unattractive — it means we have less data to quantify the opportunity. Local research can fill the gaps.", "mscore_faq_q4": "Can I compare scores across countries?", "mscore_faq_a4": "Yes. The methodology is consistent across all markets we track, so a score of 72 in Germany is directly comparable to a 72 in Spain or the UK.", "mscore_faq_q5": "Does a high score guarantee a good investment?", "mscore_faq_a5": "No. The score measures market attractiveness at a macro level. Your specific investment depends on venue type, build costs, lease terms, and dozens of other factors. Use the financial planner to model your scenario with real numbers.", - "mscore_pot_what_h2": "Marktpotenzial-Score: What It Measures", - "mscore_pot_what_intro": "The Marktpotenzial-Score evaluates investment opportunity for locations with little or no existing padel infrastructure. It covers all locations globally, including those with zero courts \u2014 designed for greenfield investors scouting white-space markets, not for benchmarking established venues.", + "mscore_pot_what_h2": "padelnomics Marktpotenzial-Score: What It Measures", + "mscore_pot_what_intro": "The padelnomics Marktpotenzial-Score evaluates investment opportunity for locations with little or no existing padel infrastructure. It covers all locations globally, including those with zero courts — designed for greenfield investors scouting white-space markets, not for benchmarking established venues.", "mscore_pot_cat_market_h3": "Addressable Market", "mscore_pot_cat_market_p": "Log-scaled population, capped at 500K. Opportunity peaks in mid-size cities that can support a court but are not yet served by large-city operators.", "mscore_pot_cat_econ_h3": "Economic Power", - "mscore_pot_cat_econ_p": "Country-level purchasing power (PPS), normalised to international benchmarks. Drives willingness to pay for court fees in the \u20ac20\u201335/hr target range.", + "mscore_pot_cat_econ_p": "Country-level purchasing power (PPS), normalised to international benchmarks. Drives willingness to pay for court fees in the €20–35/hr target range.", "mscore_pot_cat_gap_h3": "Supply Gap", - "mscore_pot_cat_gap_p": "Inverted venue density: zero courts per 100K residents earns full marks. This is the key signal separating the Marktpotenzial-Score from the Marktreife-Score \u2014 white space is the opportunity.", + "mscore_pot_cat_gap_p": "Inverted venue density: zero courts per 100K residents earns full marks. This is the key signal separating the Marktpotenzial-Score from the Marktreife-Score — white space is the opportunity.", "mscore_pot_cat_catchment_h3": "Catchment Gap", - "mscore_pot_cat_catchment_p": "Distance to the nearest existing padel court. Locations more than 30km from any court score maximum points \u2014 they represent genuinely underserved catchment areas with no nearby alternative.", + "mscore_pot_cat_catchment_p": "Distance to the nearest existing padel court. Locations more than 30km from any court score maximum points — they represent genuinely underserved catchment areas with no nearby alternative.", "mscore_pot_cat_tennis_h3": "Racket Sport Culture", "mscore_pot_cat_tennis_p": "Tennis courts within 25km as a proxy for established racket sport demand. Many new padel facilities open inside or next to existing tennis clubs, making this a reliable lead indicator.", - "mscore_pot_read_h2": "Marktpotenzial-Score: How To Read", - "mscore_pot_band_high_label": "70\u2013100: High potential", + "mscore_pot_read_h2": "padelnomics Marktpotenzial-Score: How To Read", + "mscore_pot_band_high_label": "70–100: High potential", "mscore_pot_band_high_p": "Underserved area with strong demographics and economic fundamentals. Low supply, significant catchment gap, and proven racket sport culture. Priority market for greenfield investment.", - "mscore_pot_band_mid_label": "45\u201369: Moderate potential", - "mscore_pot_band_mid_p": "Some supply already exists, demographic limitations, or mixed signals. Worth investigating further \u2014 local factors may significantly change the picture.", + "mscore_pot_band_mid_label": "45–69: Moderate potential", + "mscore_pot_band_mid_p": "Some supply already exists, demographic limitations, or mixed signals. Worth investigating further — local factors may significantly change the picture.", "mscore_pot_band_low_label": "Below 45: Lower potential", "mscore_pot_band_low_p": "Market is already well-served, population is small, or economic purchasing power is limited. Focus resources on higher-scoring locations unless you have a specific local advantage.", - "mscore_faq_q6": "What is the difference between the Marktreife-Score and the Marktpotenzial-Score?", - "mscore_faq_a6": "The Marktreife-Score measures how established and mature an existing padel market is \u2014 it only applies to cities with at least one venue. The Marktpotenzial-Score measures greenfield investment opportunity and covers all locations globally, rewarding supply gaps and underserved catchment areas where no courts exist yet.", - "mscore_faq_q7": "Why does my town have a high Marktpotenzial-Score but no padel courts?", - "mscore_faq_a7": "That is exactly the point. A high Marktpotenzial-Score indicates an underserved location: strong demographics, economic purchasing power, no existing supply, and distance from the nearest court. These are precisely the signals that suggest a greenfield opportunity \u2014 not a sign of a weak market." + "mscore_faq_q6": "What is the difference between the padelnomics Marktreife-Score and the padelnomics Marktpotenzial-Score?", + "mscore_faq_a6": "The padelnomics Marktreife-Score measures how established and mature an existing padel market is — it only applies to cities with at least one venue. The padelnomics Marktpotenzial-Score measures greenfield investment opportunity and covers all locations globally, rewarding supply gaps and underserved catchment areas where no courts exist yet.", + "mscore_faq_q7": "Why does my town have a high padelnomics Marktpotenzial-Score but no padel courts?", + "mscore_faq_a7": "That is exactly the point. A high padelnomics Marktpotenzial-Score indicates an underserved location: strong demographics, economic purchasing power, no existing supply, and distance from the nearest court. These are precisely the signals that suggest a greenfield opportunity — not a sign of a weak market." } From caec0c4410084bfe34f6705e15d5e1d257b5efb3 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 17:08:20 +0100 Subject: [PATCH 41/98] feat(ui): apply wordmark span to score h2 headings, add TM to chips MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 4 section h2 headings now render "padelnomics" in Bricolage Grotesque bold (same styled span as h1), matching the existing "padelnomics Market Score" wordmark pattern - i18n h2 keys now contain only the suffix (e.g. "Marktreife-Score: What It Measures") since "padelnomics" is hardcoded in template - Chip labels (primary score identification) get ™ suffix in both EN + DE Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/locales/de.json | 12 ++++++------ web/src/padelnomics/locales/en.json | 12 ++++++------ .../padelnomics/public/templates/market_score.html | 8 ++++---- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/web/src/padelnomics/locales/de.json b/web/src/padelnomics/locales/de.json index 8dbdd86..8a3c5bb 100644 --- a/web/src/padelnomics/locales/de.json +++ b/web/src/padelnomics/locales/de.json @@ -1630,13 +1630,13 @@ "mscore_subtitle": "Ein datengestütztes Maß für die Attraktivität einer Stadt als Padel-Investitionsstandort.", "mscore_dual_h2": "Zwei Scores, zwei Fragen", "mscore_dual_intro": "Padelnomics veröffentlicht zwei eigenständige Scores für jeden Markt. Sie beantworten unterschiedliche Fragen und basieren auf unterschiedlichen Methoden — beide zu kennen ist entscheidend für eine fundierte Investitionsentscheidung.", - "mscore_reife_chip": "padelnomics Marktreife-Score", + "mscore_reife_chip": "padelnomics Marktreife-Score™", "mscore_reife_question": "Wie etabliert ist dieser Padel-Markt?", "mscore_reife_desc": "Berechnet für Städte mit mindestens einer Padelanlage. Kombiniert Bevölkerungsgröße, Wirtschaftskraft, Nachfragesignale aus Buchungsplattformen und Datenvollständigkeit.", - "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score", + "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score™", "mscore_potenzial_question": "Wo sollte ich eine Padelanlage bauen?", "mscore_potenzial_desc": "Berechnet für alle Standorte weltweit, auch dort, wo es noch keine Anlagen gibt. Angebotslücken, unterversorgte Einzugsgebiete und Schlägersportkultur schlagen positiv zu Buche — die entscheidenden Signale für Erstinvestitionen.", - "mscore_what_h2": "padelnomics Marktreife-Score: Was er misst", + "mscore_what_h2": "Marktreife-Score: Was er misst", "mscore_what_intro": "Der padelnomics Marktreife-Score ist ein Komposit-Index von 0 bis 100, der bewertet, wie etabliert und attraktiv ein bestehender Padel-Markt ist. Er gilt ausschließlich für Städte mit mindestens einer Padelanlage — vier Datenkategorien fließen in eine einzige Kennzahl ein, damit Du schnell einschätzen kannst, welche Märkte sich genauer anzuschauen lohnen.", "mscore_cat_demo_h3": "Demografie", "mscore_cat_demo_p": "Bevölkerungsgröße als Indikator für den adressierbaren Markt. Größere Städte tragen in der Regel mehr Anlagen und höhere Auslastung.", @@ -1646,7 +1646,7 @@ "mscore_cat_demand_p": "Signale aus dem laufenden Betrieb bestehender Anlagen — Auslastungsraten, Buchungsdaten, Anzahl aktiver Standorte. Wo sich reale Nachfrage bereits messen lässt, ist das der stärkste Indikator.", "mscore_cat_data_h3": "Datenqualität", "mscore_cat_data_p": "Wie umfassend die Datenlage für eine Stadt ist. Ein Score auf Basis unvollständiger Daten ist weniger belastbar — wir machen das transparent, damit Du weißt, wo eigene Recherche sinnvoll ist.", - "mscore_read_h2": "padelnomics Marktreife-Score: Wie Du ihn liest", + "mscore_read_h2": "Marktreife-Score: Wie Du ihn liest", "mscore_band_high_label": "70–100: Starker Markt", "mscore_band_high_p": "Große Bevölkerung, hohe Wirtschaftskraft und nachgewiesene Nachfrage durch bestehende Anlagen. Diese Städte haben validierte Padel-Märkte mit belastbaren Benchmarks für die Finanzplanung.", "mscore_band_mid_label": "45–69: Solides Mittelfeld", @@ -1673,7 +1673,7 @@ "mscore_faq_a4": "Ja. Die Methodik ist für alle Märkte einheitlich, sodass ein Score von 72 in Deutschland direkt vergleichbar ist mit einem 72 in Spanien oder Großbritannien.", "mscore_faq_q5": "Garantiert ein hoher Score eine gute Investition?", "mscore_faq_a5": "Nein. Der Score misst die Marktattraktivität auf Makroebene. Deine konkrete Investition hängt von Anlagentyp, Baukosten, Mietkonditionen und Dutzenden weiterer Faktoren ab. Im Finanzplaner kannst Du Dein Szenario mit echten Zahlen durchrechnen.", - "mscore_pot_what_h2": "padelnomics Marktpotenzial-Score: Was er misst", + "mscore_pot_what_h2": "Marktpotenzial-Score: Was er misst", "mscore_pot_what_intro": "Der padelnomics Marktpotenzial-Score bewertet Investitionschancen an Standorten mit wenig oder gar keiner bestehenden Padel-Infrastruktur. Er erfasst alle Standorte weltweit — auch solche ohne eine einzige Anlage. Gedacht für Erstinvestoren auf der Suche nach unbestellten Märkten, nicht für den Vergleich bereits erschlossener Standorte.", "mscore_pot_cat_market_h3": "Adressierbarer Markt", "mscore_pot_cat_market_p": "Logarithmisch skalierte Bevölkerungsgröße, begrenzt auf 500.000 Einwohner. Das Potenzial ist bei mittelgroßen Städten am höchsten — groß genug für eine rentable Anlage, aber noch nicht von Großstadt-Betreibern erschlossen.", @@ -1685,7 +1685,7 @@ "mscore_pot_cat_catchment_p": "Entfernung zur nächsten bestehenden Padelanlage. Standorte mehr als 30 km vom nächsten Platz entfernt erhalten die volle Punktzahl — echte Versorgungslücken ohne nahe gelegene Alternative.", "mscore_pot_cat_tennis_h3": "Schlägersportkultur", "mscore_pot_cat_tennis_p": "Tennisplätze im Umkreis von 25 km als Indikator für etablierte Schlägersportnachfrage. Viele neue Padelanlagen entstehen innerhalb bestehender Tennisvereine oder direkt daneben — ein verlässlicher Frühindikator.", - "mscore_pot_read_h2": "padelnomics Marktpotenzial-Score: So liest Du ihn", + "mscore_pot_read_h2": "Marktpotenzial-Score: So liest Du ihn", "mscore_pot_band_high_label": "70–100: Hohes Potenzial", "mscore_pot_band_high_p": "Unterversorgtes Gebiet mit solider Bevölkerungsstruktur und Kaufkraft. Geringes Angebot, weit entfernt von der nächsten Anlage, nachgewiesene Schlägersportkultur. Hohe Priorität für Erstinvestoren.", "mscore_pot_band_mid_label": "45–69: Moderates Potenzial", diff --git a/web/src/padelnomics/locales/en.json b/web/src/padelnomics/locales/en.json index f0b06ea..6167b98 100644 --- a/web/src/padelnomics/locales/en.json +++ b/web/src/padelnomics/locales/en.json @@ -1630,13 +1630,13 @@ "mscore_subtitle": "A data-driven measure of how attractive a city is for padel investment.", "mscore_dual_h2": "Two Scores, Two Questions", "mscore_dual_intro": "Padelnomics publishes two distinct scores for every market. They answer different questions and are calculated using different methodologies — knowing both is essential for a well-informed investment decision.", - "mscore_reife_chip": "padelnomics Marktreife-Score", + "mscore_reife_chip": "padelnomics Marktreife-Score™", "mscore_reife_question": "How established is this padel market?", "mscore_reife_desc": "Calculated for cities with at least one padel venue. Combines population size, economic power, demand evidence from booking platforms, and data completeness.", - "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score", + "mscore_potenzial_chip": "padelnomics Marktpotenzial-Score™", "mscore_potenzial_question": "Where should I build a padel court?", "mscore_potenzial_desc": "Calculated for all locations globally, including those with zero courts. Rewards supply gaps, underserved catchment areas, and racket sport culture — the signals that matter for greenfield investors.", - "mscore_what_h2": "padelnomics Marktreife-Score: What It Measures", + "mscore_what_h2": "Marktreife-Score: What It Measures", "mscore_what_intro": "The padelnomics Marktreife-Score is a composite index from 0 to 100 that evaluates how established and attractive an existing padel market is. It only applies to cities with at least one padel venue, combining four categories of data into a single number designed to help you prioritize markets worth investigating further.", "mscore_cat_demo_h3": "Demographics", "mscore_cat_demo_p": "Population size as a proxy for the addressable market. Larger cities generally support more venues and higher utilization.", @@ -1646,7 +1646,7 @@ "mscore_cat_demand_p": "Signals from existing venue activity — occupancy rates, booking data, and the number of operating venues. Where real demand is already measurable, it’s the strongest indicator.", "mscore_cat_data_h3": "Data Completeness", "mscore_cat_data_p": "How much data we have for that city. A score influenced by incomplete data is less reliable — we surface this explicitly so you know when to dig deeper on your own.", - "mscore_read_h2": "padelnomics Marktreife-Score: How To Read", + "mscore_read_h2": "Marktreife-Score: How To Read", "mscore_band_high_label": "70–100: Strong market", "mscore_band_high_p": "Large population, economic power, and proven demand from existing venues. These cities have validated padel markets with reliable benchmarks for financial planning.", "mscore_band_mid_label": "45–69: Solid mid-tier", @@ -1673,7 +1673,7 @@ "mscore_faq_a4": "Yes. The methodology is consistent across all markets we track, so a score of 72 in Germany is directly comparable to a 72 in Spain or the UK.", "mscore_faq_q5": "Does a high score guarantee a good investment?", "mscore_faq_a5": "No. The score measures market attractiveness at a macro level. Your specific investment depends on venue type, build costs, lease terms, and dozens of other factors. Use the financial planner to model your scenario with real numbers.", - "mscore_pot_what_h2": "padelnomics Marktpotenzial-Score: What It Measures", + "mscore_pot_what_h2": "Marktpotenzial-Score: What It Measures", "mscore_pot_what_intro": "The padelnomics Marktpotenzial-Score evaluates investment opportunity for locations with little or no existing padel infrastructure. It covers all locations globally, including those with zero courts — designed for greenfield investors scouting white-space markets, not for benchmarking established venues.", "mscore_pot_cat_market_h3": "Addressable Market", "mscore_pot_cat_market_p": "Log-scaled population, capped at 500K. Opportunity peaks in mid-size cities that can support a court but are not yet served by large-city operators.", @@ -1685,7 +1685,7 @@ "mscore_pot_cat_catchment_p": "Distance to the nearest existing padel court. Locations more than 30km from any court score maximum points — they represent genuinely underserved catchment areas with no nearby alternative.", "mscore_pot_cat_tennis_h3": "Racket Sport Culture", "mscore_pot_cat_tennis_p": "Tennis courts within 25km as a proxy for established racket sport demand. Many new padel facilities open inside or next to existing tennis clubs, making this a reliable lead indicator.", - "mscore_pot_read_h2": "padelnomics Marktpotenzial-Score: How To Read", + "mscore_pot_read_h2": "Marktpotenzial-Score: How To Read", "mscore_pot_band_high_label": "70–100: High potential", "mscore_pot_band_high_p": "Underserved area with strong demographics and economic fundamentals. Low supply, significant catchment gap, and proven racket sport culture. Priority market for greenfield investment.", "mscore_pot_band_mid_label": "45–69: Moderate potential", diff --git a/web/src/padelnomics/public/templates/market_score.html b/web/src/padelnomics/public/templates/market_score.html index 34eb5e7..ad875b0 100644 --- a/web/src/padelnomics/public/templates/market_score.html +++ b/web/src/padelnomics/public/templates/market_score.html @@ -107,7 +107,7 @@
    -

    {{ t.mscore_what_h2 }}

    +

    padelnomics {{ t.mscore_what_h2 }}

    {{ t.mscore_what_intro }}

    @@ -136,7 +136,7 @@
    -

    {{ t.mscore_read_h2 }}

    +

    padelnomics {{ t.mscore_read_h2 }}

    @@ -165,7 +165,7 @@
    -

    {{ t.mscore_pot_what_h2 }}

    +

    padelnomics {{ t.mscore_pot_what_h2 }}

    {{ t.mscore_pot_what_intro }}

    @@ -199,7 +199,7 @@
    -

    {{ t.mscore_pot_read_h2 }}

    +

    padelnomics {{ t.mscore_pot_read_h2 }}

    From 165eaf48bfb1b87525ab3588d14d0be58f23f08e Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 17:09:49 +0100 Subject: [PATCH 42/98] fix(admin): live search not firing on text input + spinner always visible MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit hx-trigger bug: "from:find input" in hx-trigger attaches the event listener to the first found in the form — which is the hidden CSRF token input. Typing in the visible search field never fires the listener on that element. Result: only Enter (form submit) triggered HTMX. Fix: drop "from:find input" so the listener is on the form itself, where input/change events from all children bubble naturally. Spinner visibility bug: .search-spinner { opacity: 0 } relied on our compiled output.css. HTMX ships its own built-in CSS for .htmx-indicator (opacity:0 → opacity:1 on htmx-request). Using class="htmx-indicator search-spinner" delegates hide/show to HTMX's own stylesheet with no dependency on whether output.css has been rebuilt. Our .search-spinner only handles positioning and the spin animation. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/admin/templates/admin/articles.html | 4 ++-- web/src/padelnomics/admin/templates/admin/emails.html | 4 ++-- web/src/padelnomics/admin/templates/admin/leads.html | 4 ++-- web/src/padelnomics/admin/templates/admin/scenarios.html | 4 ++-- web/src/padelnomics/admin/templates/admin/suppliers.html | 4 ++-- web/src/padelnomics/admin/templates/admin/users.html | 4 ++-- web/src/padelnomics/static/css/input.css | 6 +++--- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/web/src/padelnomics/admin/templates/admin/articles.html b/web/src/padelnomics/admin/templates/admin/articles.html index 8bfbe89..137b295 100644 --- a/web/src/padelnomics/admin/templates/admin/articles.html +++ b/web/src/padelnomics/admin/templates/admin/articles.html @@ -28,7 +28,7 @@
    @@ -67,7 +67,7 @@
    -
    -
    -
    @@ -53,7 +53,7 @@
    -
    -
    -
    pSEO
    + + + pSEO Engine + +
    Email
    diff --git a/web/src/padelnomics/admin/templates/admin/pseo_dashboard.html b/web/src/padelnomics/admin/templates/admin/pseo_dashboard.html new file mode 100644 index 0000000..212883b --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/pseo_dashboard.html @@ -0,0 +1,195 @@ +{% extends "admin/base_admin.html" %} +{% set admin_page = "pseo" %} + +{% block title %}pSEO Engine - {{ config.APP_NAME }}{% endblock %} + +{% block admin_head %} + +{% endblock %} + +{% block admin_content %} +
    +
    +

    pSEO Engine

    +

    Operational dashboard for programmatic SEO

    +
    +
    All Jobs +
    + + +
    +
    +

    Total Articles

    +

    {{ total_articles }}

    +

    {{ total_published }} published

    +
    +
    +

    Templates

    +

    {{ total_templates }}

    +
    +
    +

    Stale Templates

    +

    + {{ stale_count }} +

    +

    data newer than articles

    +
    +
    +

    Health Checks

    +

    +

    see Health section below

    +
    +
    + + +
    +
    + Templates + Click "Gaps" to load missing articles per template +
    +
    + + + + + + + + + + + + + {% for r in template_rows %} + {% set t = r.template %} + {% set stats = r.stats %} + {% set fr = r.freshness %} + + + + + + + + + + + + {% endfor %} + +
    TemplateData rowsArticles ENArticles DEFreshnessActions
    + {{ t.name }}
    + {{ t.slug }} +
    {{ fr.row_count if fr.row_count is not none else '—' }}{{ stats.by_language.get('en', {}).get('total', 0) }}{{ stats.by_language.get('de', {}).get('total', 0) }} + {% set status = fr.status | default('no_data') %} + + {% if status == 'fresh' %}🟢 Fresh + {% elif status == 'stale' %}🟡 Stale + {% elif status == 'no_articles' %}🟣 No articles + {% else %}⚪ No data + {% endif %} + + + + + + + +
    +
    + +
    +
    +
    +
    + + +{% if jobs %} +
    +
    + Recent Generation Jobs + View all → +
    +
    + + + + + + + + + + + {% for job in jobs %} + + + + + + + {% endfor %} + +
    JobStatusProgressStarted
    + #{{ job.id }} + {% if job.payload %} + — {{ (job.payload | fromjson).get('template_slug', '') }} + {% endif %} + + {% if job.status == 'complete' %} + Complete + {% elif job.status == 'failed' %} + Failed + {% elif job.status == 'pending' %} + Running + {% else %} + {{ job.status }} + {% endif %} + + {% if job.progress_total and job.progress_total > 0 %} +
    +
    +
    +
    + {{ job.progress_current }}/{{ job.progress_total }} +
    + {% else %} + — + {% endif %} +
    {{ job.created_at | default('') | truncate(16, True, '') }}
    +
    +
    +{% endif %} + + +
    +
    +

    Loading health checks…

    +
    +
    +{% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/pseo_gaps.html b/web/src/padelnomics/admin/templates/admin/pseo_gaps.html new file mode 100644 index 0000000..779ff87 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/pseo_gaps.html @@ -0,0 +1,43 @@ +{# HTMX partial — rendered inside the gaps panel for one template. + Loaded via GET /admin/pseo/gaps/. #} + +{% if not gaps %} +

    ✓ No gaps — all {{ template.name }} rows have articles.

    +{% else %} +
    + {{ gaps | length }} missing row{{ 's' if gaps | length != 1 else '' }} +
    + + +
    +
    +
    + + + + + + {% for key in (gaps[0].keys() | list | reject('equalto', '_natural_key') | reject('equalto', '_missing_languages') | list)[:4] %} + + {% endfor %} + + + + {% for gap in gaps[:100] %} + + + + {% for key in (gap.keys() | list | reject('equalto', '_natural_key') | reject('equalto', '_missing_languages') | list)[:4] %} + + {% endfor %} + + {% endfor %} + {% if gaps | length > 100 %} + + + + {% endif %} + +
    {{ template.natural_key }}Missing languages{{ key }}
    {{ gap._natural_key }}{{ gap._missing_languages | join(', ') }}{{ gap[key] | truncate(30) if gap[key] is string else gap[key] }}
    … and {{ gaps | length - 100 }} more rows
    +
    +{% endif %} diff --git a/web/src/padelnomics/admin/templates/admin/pseo_health.html b/web/src/padelnomics/admin/templates/admin/pseo_health.html new file mode 100644 index 0000000..2d2335b --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/pseo_health.html @@ -0,0 +1,99 @@ +{# HTMX partial — loaded by pseo_dashboard.html and /admin/pseo/health directly. + When loaded via HTMX (hx-swap="outerHTML"), renders a full card. + When loaded standalone (full page), also works since it just outputs HTML. #} + +
    +
    + Health Checks + {{ health.counts.total }} issue{{ 's' if health.counts.total != 1 else '' }} +
    + + {% if health.counts.total == 0 %} +

    ✓ No issues found — all articles are healthy.

    + {% else %} + + + {% if health.hreflang_orphans %} +
    + + ⚠ Hreflang orphans ({{ health.counts.hreflang_orphans }}) + — articles missing a sibling language + +
    + + + + {% for o in health.hreflang_orphans[:50] %} + + + + + + + {% endfor %} + {% if health.hreflang_orphans | length > 50 %} + + {% endif %} + +
    TemplateURL pathPresentMissing
    {{ o.template_slug }}{{ o.url_path }}{{ o.present_languages | join(', ') }}{{ o.missing_languages | join(', ') }}
    … and {{ health.hreflang_orphans | length - 50 }} more
    +
    +
    + {% endif %} + + + {% if health.missing_build_files %} +
    + + ❌ Missing build files ({{ health.counts.missing_build_files }}) + — published articles with no HTML on disk + +
    + + + + {% for m in health.missing_build_files[:50] %} + + + + + + + {% endfor %} + {% if health.missing_build_files | length > 50 %} + + {% endif %} + +
    SlugLanguageURL pathExpected path
    {{ m.slug }}{{ m.language }}{{ m.url_path }}{{ m.expected_path }}
    … and {{ health.missing_build_files | length - 50 }} more
    +
    +
    + {% endif %} + + + {% if health.broken_scenario_refs %} +
    + + ❌ Broken scenario refs ({{ health.counts.broken_scenario_refs }}) + — [scenario:slug] markers referencing deleted scenarios + +
    + + + + {% for b in health.broken_scenario_refs[:50] %} + + + + + + {% endfor %} + {% if health.broken_scenario_refs | length > 50 %} + + {% endif %} + +
    SlugLanguageBroken refs
    {{ b.slug }}{{ b.language }}{{ b.broken_scenario_refs | join(', ') }}
    … and {{ health.broken_scenario_refs | length - 50 }} more
    +
    +
    + {% endif %} + + {% endif %} +
    diff --git a/web/src/padelnomics/admin/templates/admin/pseo_job_status.html b/web/src/padelnomics/admin/templates/admin/pseo_job_status.html new file mode 100644 index 0000000..e039860 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/pseo_job_status.html @@ -0,0 +1,45 @@ +{# HTMX partial — replaces the entire for a job row while it's running. + Stops polling once the job is complete or failed (hx-trigger="every 2s" only applies + while this partial keeps returning a polling trigger). #} + +{% set pct = [((job.progress_current / job.progress_total) * 100) | int, 100] | min if job.progress_total else 0 %} + + + #{{ job.id }} + —{# payload not re-fetched in status endpoint — static display #} + + {% if job.status == 'complete' %} + Complete + {% elif job.status == 'failed' %} + Failed + {% else %} + Running… + {% endif %} + + + {% if job.progress_total and job.progress_total > 0 %} +
    +
    +
    +
    + {{ job.progress_current }}/{{ job.progress_total }} +
    + {% else %}—{% endif %} + + {{ job.created_at | default('') | truncate(19, True, '') }} + {{ job.completed_at | default('') | truncate(19, True, '') }} + + {% if job.error %} +
    + Error +
    {{ job.error[:500] }}
    +
    + {% else %}—{% endif %} + + diff --git a/web/src/padelnomics/admin/templates/admin/pseo_jobs.html b/web/src/padelnomics/admin/templates/admin/pseo_jobs.html new file mode 100644 index 0000000..2cb12d3 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/pseo_jobs.html @@ -0,0 +1,95 @@ +{% extends "admin/base_admin.html" %} +{% set admin_page = "pseo" %} + +{% block title %}pSEO Jobs - {{ config.APP_NAME }}{% endblock %} + +{% block admin_head %} + +{% endblock %} + +{% block admin_content %} +
    +
    +

    Generation Jobs

    +

    Recent article generation runs

    +
    + ← pSEO Engine +
    + +{% if not jobs %} +
    +

    No generation jobs found. Use the pSEO Engine dashboard to generate articles.

    +
    +{% else %} +
    +
    + + + + + + + + + + + + + + {% for job in jobs %} + + + + + + + + + + {% endfor %} + +
    #TemplateStatusProgressStartedCompletedError
    #{{ job.id }} + {% if job.payload %} + {% set payload = job.payload | fromjson %} + {{ payload.get('template_slug', '—') }} + {% else %}—{% endif %} + + {% if job.status == 'complete' %} + Complete + {% elif job.status == 'failed' %} + Failed + {% elif job.status == 'pending' %} + {# Poll live status for running jobs #} +
    + Running… +
    + {% else %} + {{ job.status }} + {% endif %} +
    + {% if job.progress_total and job.progress_total > 0 %} +
    +
    +
    +
    + {{ job.progress_current }}/{{ job.progress_total }} +
    + {% else %}—{% endif %} +
    {{ job.created_at | default('') | truncate(19, True, '') }}{{ job.completed_at | default('') | truncate(19, True, '') }} + {% if job.error %} +
    + Error +
    {{ job.error[:500] }}
    +
    + {% else %}—{% endif %} +
    +
    +
    +{% endif %} +{% endblock %} diff --git a/web/src/padelnomics/app.py b/web/src/padelnomics/app.py index 3413d81..5b29b64 100644 --- a/web/src/padelnomics/app.py +++ b/web/src/padelnomics/app.py @@ -1,6 +1,7 @@ """ Padelnomics - Application factory and entry point. """ +import json import time from pathlib import Path @@ -105,6 +106,7 @@ def create_app() -> Quart: app.jinja_env.filters["fmt_n"] = _fmt_n app.jinja_env.filters["tformat"] = _tformat # translate with placeholders: {{ t.key | tformat(count=n) }} app.jinja_env.filters["country_name"] = get_country_name # {{ article.country | country_name(lang) }} + app.jinja_env.filters["fromjson"] = json.loads # {{ job.payload | fromjson }} # Session config app.config["SESSION_COOKIE_SECURE"] = not config.DEBUG From 9107ba9bb852ffc75420e78c54e23dfbdb7474e2 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 19:34:15 +0100 Subject: [PATCH 53/98] perf: fix N+1 queries in templates(), handle_refill_monthly_credits() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit templates() in admin: - Replace per-template SELECT COUNT(*) articles queries with a single GROUP BY query before the loop — O(n) SQLite calls → O(1) - Replace per-template SELECT * LIMIT 501 (for count) with a new count_template_data() that runs SELECT COUNT(*) — cheaper per call - Add count_template_data() to content/__init__.py handle_refill_monthly_credits() in worker: - Replace N×3 per-supplier queries (fetch supplier, insert ledger, update balance) with 2 bulk SQL statements: 1. INSERT INTO credit_ledger SELECT ... for all eligible suppliers 2. UPDATE suppliers SET credit_balance = credit_balance + monthly_credits - Wrap in single transaction() for atomicity - Log total suppliers updated at INFO level audiences() in admin: - Add LIMIT 20 guard + comment explaining why one API call per audience is unavoidable (no bulk contacts endpoint in Resend) Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/admin/routes.py | 27 ++++++++------- web/src/padelnomics/content/__init__.py | 9 +++++ web/src/padelnomics/worker.py | 46 +++++++++++++++++++------ 3 files changed, 58 insertions(+), 24 deletions(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 996133b..138ed69 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1181,9 +1181,11 @@ async def email_compose(): @role_required("admin") async def audiences(): """List Resend audiences with local cache + API contact counts.""" - audience_list = await fetch_all("SELECT * FROM resend_audiences ORDER BY name") + # Cap at 20 — Resend free plan limit is 3 audiences, paid is more but still + # small. One API call per audience is unavoidable (no bulk contacts endpoint). + audience_list = await fetch_all("SELECT * FROM resend_audiences ORDER BY name LIMIT 20") - # Enrich with contact count from API (best-effort) + # Enrich with contact count from API (best-effort, one call per audience) for a in audience_list: a["contact_count"] = None if config.RESEND_API_KEY and a.get("audience_id"): @@ -1263,21 +1265,20 @@ async def audience_contact_remove(audience_id: str): @role_required("admin") async def templates(): """List content templates scanned from disk.""" - from ..content import discover_templates, fetch_template_data + from ..content import count_template_data, discover_templates template_list = discover_templates() - # Attach DuckDB row counts - for t in template_list: - count_rows = await fetch_template_data(t["data_table"], limit=501) - t["data_count"] = len(count_rows) + # Single query: article counts for all templates — avoids N SQLite round-trips + counts_raw = await fetch_all( + "SELECT template_slug, COUNT(*) as cnt FROM articles GROUP BY template_slug" + ) + article_counts = {r["template_slug"]: r["cnt"] for r in counts_raw} - # Count generated articles for this template - row = await fetch_one( - "SELECT COUNT(*) as cnt FROM articles WHERE template_slug = ?", - (t["slug"],), - ) - t["generated_count"] = row["cnt"] if row else 0 + # One DuckDB COUNT(*) per template (N queries, but cheap vs SELECT * LIMIT 501) + for t in template_list: + t["data_count"] = await count_template_data(t["data_table"]) + t["generated_count"] = article_counts.get(t["slug"], 0) return await render_template("admin/templates.html", templates=template_list) diff --git a/web/src/padelnomics/content/__init__.py b/web/src/padelnomics/content/__init__.py index e86ad7e..469a0ad 100644 --- a/web/src/padelnomics/content/__init__.py +++ b/web/src/padelnomics/content/__init__.py @@ -128,6 +128,15 @@ async def fetch_template_data( ) +async def count_template_data(data_table: str) -> int: + """Return the row count of a DuckDB serving table. Returns 0 if unavailable.""" + assert "." in data_table, "data_table must be schema-qualified" + _validate_table_name(data_table) + + rows = await fetch_analytics(f"SELECT COUNT(*) AS cnt FROM {data_table}") + return rows[0]["cnt"] if rows else 0 + + def _validate_table_name(data_table: str) -> None: """Guard against SQL injection in table names.""" assert re.match(r"^[a-z_][a-z0-9_.]*$", data_table), ( diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index 4c9762b..4561b2d 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -18,6 +18,7 @@ from .core import ( init_db, send_email, setup_logging, + transaction, utcnow, utcnow_iso, ) @@ -589,18 +590,41 @@ async def handle_send_supplier_enquiry_email(payload: dict) -> None: @task("refill_monthly_credits") async def handle_refill_monthly_credits(payload: dict) -> None: - """Refill monthly credits for all claimed suppliers with a paid tier.""" - from .credits import monthly_credit_refill + """Refill monthly credits for all claimed suppliers with a paid tier. - suppliers = await fetch_all( - "SELECT id FROM suppliers WHERE tier IN ('growth', 'pro') AND claimed_by IS NOT NULL" - ) - for s in suppliers: - try: - await monthly_credit_refill(s["id"]) - logger.info("Refilled credits for supplier %s", s["id"]) - except Exception as e: - logger.error("Failed to refill credits for supplier %s: %s", s["id"], e) + Uses two bulk SQL statements instead of N×3 per-supplier queries: + 1. INSERT INTO credit_ledger SELECT ... for all eligible suppliers at once + 2. UPDATE suppliers SET credit_balance = credit_balance + monthly_credits + """ + now = utcnow_iso() + async with transaction() as db: + # Batch-insert ledger rows for all eligible suppliers in one statement + await db.execute( + """INSERT INTO credit_ledger + (supplier_id, delta, balance_after, event_type, note, created_at) + SELECT id, + monthly_credits, + credit_balance + monthly_credits, + 'monthly_allocation', + 'Monthly refill (' || tier || ' plan)', + ? + FROM suppliers + WHERE tier IN ('growth', 'pro') + AND claimed_by IS NOT NULL + AND monthly_credits > 0""", + (now,), + ) + # Update balances and refill timestamps in one statement + result = await db.execute( + """UPDATE suppliers + SET credit_balance = credit_balance + monthly_credits, + last_credit_refill = ? + WHERE tier IN ('growth', 'pro') + AND claimed_by IS NOT NULL + AND monthly_credits > 0""", + (now,), + ) + logger.info("Monthly credit refill complete — %d suppliers updated", result.rowcount) @task("generate_business_plan") From dd9ffd6d2796a11a44e2887e2dd7d4902a79e03a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 19:35:12 +0100 Subject: [PATCH 54/98] style: add units to variable names, name busy_timeout constant MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - core.py: rename RATE_LIMIT_WINDOW → RATE_LIMIT_WINDOW_SECONDS (env var name RATE_LIMIT_WINDOW is unchanged — only the Python attribute) - core.py: extract _BUSY_TIMEOUT_MS = 5000 local constant so the PRAGMA value is no longer a bare magic number - worker.py: rename poll_interval → poll_interval_seconds Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/core.py | 7 ++++--- web/src/padelnomics/worker.py | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/web/src/padelnomics/core.py b/web/src/padelnomics/core.py index f23ba24..4117f90 100644 --- a/web/src/padelnomics/core.py +++ b/web/src/padelnomics/core.py @@ -77,7 +77,7 @@ class Config: WAITLIST_MODE: bool = os.getenv("WAITLIST_MODE", "false").lower() == "true" RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100")) - RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60")) + RATE_LIMIT_WINDOW_SECONDS: int = int(os.getenv("RATE_LIMIT_WINDOW", "60")) PLAN_FEATURES: dict = { "free": ["basic"], @@ -149,7 +149,8 @@ async def init_db(path: str = None) -> None: await _db.execute("PRAGMA journal_mode=WAL") await _db.execute("PRAGMA foreign_keys=ON") - await _db.execute("PRAGMA busy_timeout=5000") + _BUSY_TIMEOUT_MS = 5000 + await _db.execute(f"PRAGMA busy_timeout={_BUSY_TIMEOUT_MS}") await _db.execute("PRAGMA synchronous=NORMAL") await _db.execute("PRAGMA cache_size=-64000") await _db.execute("PRAGMA temp_store=MEMORY") @@ -573,7 +574,7 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t Uses SQLite for storage - no Redis needed. """ limit = limit or config.RATE_LIMIT_REQUESTS - window = window or config.RATE_LIMIT_WINDOW + window = window or config.RATE_LIMIT_WINDOW_SECONDS now = utcnow() window_start = now - timedelta(seconds=window) diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index 4561b2d..d50d6ae 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -786,7 +786,7 @@ async def process_task(task: dict) -> None: logger.error("Failed: %s (id=%s): %s", task_name, task_id, e) -async def run_worker(poll_interval: float = 1.0) -> None: +async def run_worker(poll_interval_seconds: float = 1.0) -> None: """Main worker loop.""" setup_logging() logger.info("Starting...") @@ -803,11 +803,11 @@ async def run_worker(poll_interval: float = 1.0) -> None: await process_task(task) if not tasks: - await asyncio.sleep(poll_interval) + await asyncio.sleep(poll_interval_seconds) except Exception as e: logger.error("Error: %s", e) - await asyncio.sleep(poll_interval * 5) + await asyncio.sleep(poll_interval_seconds * 5) async def run_scheduler() -> None: From ad48f23cfc80386d0452d40546f2df06b589b00c Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:42:11 +0100 Subject: [PATCH 55/98] fix: add precondition assertions in extract pipeline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Assert landing_dir.is_dir() and year_month format (YYYY/MM) at the entry point of each extract function — turning silent wrong-path bugs into immediate AssertionError with a descriptive message. Files changed: - playtomic_availability.py: assert in _load_tenant_ids(), extract(), extract_recheck() - eurostat.py: assert in extract() Co-Authored-By: Claude Opus 4.6 --- .../padelnomics_extract/src/padelnomics_extract/eurostat.py | 2 ++ .../src/padelnomics_extract/playtomic_availability.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/eurostat.py b/extract/padelnomics_extract/src/padelnomics_extract/eurostat.py index 6e45cd8..c7dcfe0 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/eurostat.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/eurostat.py @@ -180,6 +180,8 @@ def extract( session: niquests.Session, ) -> dict: """Fetch all Eurostat datasets. Returns run metrics.""" + assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" + assert "/" in year_month and len(year_month) == 7, f"year_month must be YYYY/MM: {year_month!r}" year, month = year_month.split("/") files_written = 0 files_skipped = 0 diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index 4c73b80..cb1e41c 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -59,6 +59,7 @@ _thread_local = threading.local() def _load_tenant_ids(landing_dir: Path) -> list[str]: """Read tenant IDs from the most recent tenants.json.gz file.""" + assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" playtomic_dir = landing_dir / "playtomic" if not playtomic_dir.exists(): return [] @@ -243,6 +244,8 @@ def extract( session: niquests.Session, ) -> dict: """Fetch next-day availability for all known Playtomic venues.""" + assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" + assert "/" in year_month and len(year_month) == 7, f"year_month must be YYYY/MM: {year_month!r}" tenant_ids = _load_tenant_ids(landing_dir) if not tenant_ids: logger.warning("No tenant IDs found — run extract-playtomic-tenants first") @@ -385,6 +388,8 @@ def extract_recheck( session: niquests.Session, ) -> dict: """Re-query venues with slots starting soon for accurate occupancy data.""" + assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" + assert "/" in year_month and len(year_month) == 7, f"year_month must be YYYY/MM: {year_month!r}" now = datetime.now(UTC) target_date = now.strftime("%Y-%m-%d") From 83d148477d28f6c9e0782f98cb98f80fd4c5177f Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:42:52 +0100 Subject: [PATCH 56/98] refactor: extract shared _query_scenarios() to remove duplication scenarios() and scenario_results() both built the same WHERE clause and ran the same filtered query. Extracted into _query_scenarios(search, country, venue_type) -> (rows, total). Each handler is now ~10 lines of param parsing + render_template. Co-Authored-By: Claude Opus 4.6 --- web/src/padelnomics/admin/routes.py | 65 ++++++++++++----------------- 1 file changed, 27 insertions(+), 38 deletions(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 138ed69..464d983 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1417,6 +1417,29 @@ SCENARIO_FORM_FIELDS = [ ] +async def _query_scenarios(search: str, country: str, venue_type: str) -> tuple[list, int]: + """Execute filtered scenario query. Returns (rows, total_count).""" + wheres = ["1=1"] + params: list = [] + if search: + wheres.append("(title LIKE ? OR location LIKE ? OR slug LIKE ?)") + params.extend([f"%{search}%", f"%{search}%", f"%{search}%"]) + if country: + wheres.append("country = ?") + params.append(country) + if venue_type: + wheres.append("venue_type = ?") + params.append(venue_type) + + where = " AND ".join(wheres) + rows = await fetch_all( + f"SELECT * FROM published_scenarios WHERE {where} ORDER BY created_at DESC LIMIT 500", + tuple(params), + ) + total_row = await fetch_one("SELECT COUNT(*) as cnt FROM published_scenarios") + return rows, (total_row["cnt"] if total_row else 0) + + @bp.route("/scenarios") @role_required("admin") async def scenarios(): @@ -1425,37 +1448,20 @@ async def scenarios(): country_filter = request.args.get("country", "") venue_filter = request.args.get("venue_type", "") - wheres = ["1=1"] - params: list = [] - if search: - wheres.append("(title LIKE ? OR location LIKE ? OR slug LIKE ?)") - params.extend([f"%{search}%", f"%{search}%", f"%{search}%"]) - if country_filter: - wheres.append("country = ?") - params.append(country_filter) - if venue_filter: - wheres.append("venue_type = ?") - params.append(venue_filter) - - where = " AND ".join(wheres) - scenario_list = await fetch_all( - f"SELECT * FROM published_scenarios WHERE {where} ORDER BY created_at DESC LIMIT 500", - tuple(params), - ) + scenario_list, total = await _query_scenarios(search, country_filter, venue_filter) countries = await fetch_all( "SELECT DISTINCT country FROM published_scenarios WHERE country != '' ORDER BY country" ) venue_types = await fetch_all( "SELECT DISTINCT venue_type FROM published_scenarios WHERE venue_type != '' ORDER BY venue_type" ) - total = await fetch_one("SELECT COUNT(*) as cnt FROM published_scenarios") return await render_template( "admin/scenarios.html", scenarios=scenario_list, countries=[r["country"] for r in countries], venue_types=[r["venue_type"] for r in venue_types], - total=total["cnt"] if total else 0, + total=total, current_search=search, current_country=country_filter, current_venue_type=venue_filter, @@ -1471,29 +1477,12 @@ async def scenario_results(): country_filter = request.args.get("country", "") venue_filter = request.args.get("venue_type", "") - wheres = ["1=1"] - params: list = [] - if search: - wheres.append("(title LIKE ? OR location LIKE ? OR slug LIKE ?)") - params.extend([f"%{search}%", f"%{search}%", f"%{search}%"]) - if country_filter: - wheres.append("country = ?") - params.append(country_filter) - if venue_filter: - wheres.append("venue_type = ?") - params.append(venue_filter) - - where = " AND ".join(wheres) - scenario_list = await fetch_all( - f"SELECT * FROM published_scenarios WHERE {where} ORDER BY created_at DESC LIMIT 500", - tuple(params), - ) - total = await fetch_one("SELECT COUNT(*) as cnt FROM published_scenarios") + scenario_list, total = await _query_scenarios(search, country_filter, venue_filter) return await render_template( "admin/partials/scenario_results.html", scenarios=scenario_list, - total=total["cnt"] if total else 0, + total=total, is_generating=await _is_generating(), ) From ee49862d9181650bf3d6e2cbf2fe030c9cb58c6a Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:50:03 +0100 Subject: [PATCH 57/98] test(pseo): add 45 tests for health checks + pSEO Engine admin routes Covers content/health.py (get_template_stats, get_template_freshness, get_content_gaps, check_hreflang_orphans, check_missing_build_files, check_broken_scenario_refs, get_all_health_issues) and all 6 routes in admin/pseo_routes.py (dashboard, health partial, gaps partial, generate gaps, jobs list, job status polling). Also fixes two bugs found while writing tests: - check_hreflang_orphans: was grouping by url_path, but EN/DE articles have different paths. Now extracts natural key from slug pattern "{template_slug}-{lang}-{nk}" and groups by nk. - pseo_job_status.html + pseo_jobs.html: | default('') | truncate() fails when completed_at is None (default() only handles undefined, not None). Fixed to (value or '') | truncate(). Co-Authored-By: Claude Sonnet 4.6 --- .../templates/admin/pseo_job_status.html | 4 +- .../admin/templates/admin/pseo_jobs.html | 4 +- web/src/padelnomics/content/health.py | 51 +- web/tests/test_pseo.py | 765 ++++++++++++++++++ 4 files changed, 804 insertions(+), 20 deletions(-) create mode 100644 web/tests/test_pseo.py diff --git a/web/src/padelnomics/admin/templates/admin/pseo_job_status.html b/web/src/padelnomics/admin/templates/admin/pseo_job_status.html index e039860..e55bd2b 100644 --- a/web/src/padelnomics/admin/templates/admin/pseo_job_status.html +++ b/web/src/padelnomics/admin/templates/admin/pseo_job_status.html @@ -32,8 +32,8 @@
    {% else %}—{% endif %} - {{ job.created_at | default('') | truncate(19, True, '') }} - {{ job.completed_at | default('') | truncate(19, True, '') }} + {{ (job.created_at or '') | truncate(19, True, '') }} + {{ (job.completed_at or '') | truncate(19, True, '') }} {% if job.error %}
    diff --git a/web/src/padelnomics/admin/templates/admin/pseo_jobs.html b/web/src/padelnomics/admin/templates/admin/pseo_jobs.html index 2cb12d3..b761c5a 100644 --- a/web/src/padelnomics/admin/templates/admin/pseo_jobs.html +++ b/web/src/padelnomics/admin/templates/admin/pseo_jobs.html @@ -75,8 +75,8 @@
    {% else %}—{% endif %} - {{ job.created_at | default('') | truncate(19, True, '') }} - {{ job.completed_at | default('') | truncate(19, True, '') }} + {{ (job.created_at or '') | truncate(19, True, '') }} + {{ (job.completed_at or '') | truncate(19, True, '') }} {% if job.error %}
    diff --git a/web/src/padelnomics/content/health.py b/web/src/padelnomics/content/health.py index 13a6f34..b5da7fc 100644 --- a/web/src/padelnomics/content/health.py +++ b/web/src/padelnomics/content/health.py @@ -235,10 +235,14 @@ async def check_hreflang_orphans(templates: list[dict]) -> list[dict]: For example: city-cost-de generates EN + DE. If the EN article exists but DE is absent, that article is an hreflang orphan. + Orphan detection is based on the slug pattern "{template_slug}-{lang}-{natural_key}". + Articles are grouped by natural key; if any expected language is missing, the group + is an orphan. + Returns list of dicts: { "template_slug": str, - "url_path": str, + "url_path": str, # url_path of one present article for context "present_languages": list[str], "missing_languages": list[str], } @@ -250,24 +254,39 @@ async def check_hreflang_orphans(templates: list[dict]) -> list[dict]: continue # Single-language template — no orphans possible. rows = await fetch_all( - """SELECT url_path, - GROUP_CONCAT(language) as langs, - COUNT(DISTINCT language) as lang_count - FROM articles - WHERE template_slug = ? AND status = 'published' - GROUP BY url_path - HAVING COUNT(DISTINCT language) < ?""", - (t["slug"], len(expected)), + "SELECT slug, language, url_path FROM articles" + " WHERE template_slug = ? AND status = 'published'", + (t["slug"],), ) + + # Group by natural key extracted from slug pattern: + # "{template_slug}-{lang}-{natural_key}" → strip template prefix, then lang prefix. + slug_prefix = t["slug"] + "-" + by_nk: dict[str, dict] = {} # nk → {"langs": set, "url_path": str} for r in rows: - present = set(r["langs"].split(",")) + slug = r["slug"] + lang = r["language"] + if not slug.startswith(slug_prefix): + continue + rest = slug[len(slug_prefix):] # "{lang}-{natural_key}" + lang_prefix = lang + "-" + if not rest.startswith(lang_prefix): + continue + nk = rest[len(lang_prefix):] + if nk not in by_nk: + by_nk[nk] = {"langs": set(), "url_path": r["url_path"]} + by_nk[nk]["langs"].add(lang) + + for nk, info in by_nk.items(): + present = info["langs"] missing = sorted(expected - present) - orphans.append({ - "template_slug": t["slug"], - "url_path": r["url_path"], - "present_languages": sorted(present), - "missing_languages": missing, - }) + if missing: + orphans.append({ + "template_slug": t["slug"], + "url_path": info["url_path"], + "present_languages": sorted(present), + "missing_languages": missing, + }) return orphans diff --git a/web/tests/test_pseo.py b/web/tests/test_pseo.py new file mode 100644 index 0000000..45627eb --- /dev/null +++ b/web/tests/test_pseo.py @@ -0,0 +1,765 @@ +""" +Tests for the pSEO Engine: health checks, content gaps, freshness, and admin routes. + +Covers: + - content/health.py: get_template_stats, get_template_freshness, get_content_gaps, + check_hreflang_orphans, check_missing_build_files, check_broken_scenario_refs, + get_all_health_issues + - admin/pseo_routes.py: all 6 routes (dashboard, health, gaps, generate, jobs, job status) +""" +import json +from unittest.mock import patch + +import pytest +from padelnomics.content.health import ( + check_broken_scenario_refs, + check_hreflang_orphans, + check_missing_build_files, + get_all_health_issues, + get_content_gaps, + get_template_freshness, + get_template_stats, +) +from padelnomics.core import execute, utcnow_iso + +from padelnomics import core + +# ── Fixtures ────────────────────────────────────────────────────────────────── + + +@pytest.fixture +async def admin_client(app, db): + """Authenticated admin test client.""" + now = utcnow_iso() + async with db.execute( + "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", + ("pseo-admin@test.com", "pSEO Admin", now), + ) as cursor: + admin_id = cursor.lastrowid + await db.execute( + "INSERT INTO user_roles (user_id, role) VALUES (?, 'admin')", (admin_id,) + ) + await db.commit() + + async with app.test_client() as c: + async with c.session_transaction() as sess: + sess["user_id"] = admin_id + yield c + + +# ── DB helpers ──────────────────────────────────────────────────────────────── + + +async def _insert_article( + slug, + url_path, + status="published", + language="en", + template_slug="city-cost-de", + created_at=None, +): + """Insert a minimal article row and return its id.""" + ts = created_at or utcnow_iso() + return await execute( + """INSERT INTO articles + (url_path, slug, title, meta_description, country, region, + status, published_at, language, template_slug, created_at, updated_at) + VALUES (?, ?, ?, ?, 'DE', 'Europe', ?, ?, ?, ?, ?, ?)""", + ( + url_path, + slug, + f"Title {slug}", + f"Desc {slug}", + status, + ts if status == "published" else None, + language, + template_slug, + ts, + ts, + ), + ) + + +async def _insert_scenario(slug="test-scenario"): + """Insert a minimal published_scenario row.""" + from padelnomics.planner.calculator import calc, validate_state + + state = validate_state({"dblCourts": 2}) + d = calc(state) + return await execute( + """INSERT INTO published_scenarios + (slug, title, subtitle, location, country, venue_type, ownership, + court_config, state_json, calc_json) + VALUES (?, ?, '', 'TestCity', 'TC', 'indoor', 'rent', '2 double', ?, ?)""", + (slug, f"Scenario {slug}", json.dumps(state), json.dumps(d)), + ) + + +async def _insert_task(status="pending", progress_current=0, progress_total=0): + """Insert a generate_articles task row and return its id.""" + now = utcnow_iso() + async with core._db.execute( + """INSERT INTO tasks + (task_name, payload, status, run_at, progress_current, progress_total, created_at) + VALUES ('generate_articles', '{}', ?, ?, ?, ?, ?)""", + (status, now, progress_current, progress_total, now), + ) as cursor: + task_id = cursor.lastrowid + await core._db.commit() + return task_id + + +# ── DuckDB mock rows ────────────────────────────────────────────────────────── + +_DUCKDB_ROWS = [ + {"city_slug": "berlin", "city": "Berlin", "country": "DE"}, + {"city_slug": "munich", "city": "Munich", "country": "DE"}, + {"city_slug": "hamburg", "city": "Hamburg", "country": "DE"}, +] + + +async def _mock_fetch_duckdb(query, params=None): + return _DUCKDB_ROWS + + +# ════════════════════════════════════════════════════════════════════════════ +# get_template_stats() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestGetTemplateStats: + async def test_empty_db_returns_zeros(self, db): + stats = await get_template_stats("city-cost-de") + assert stats["total"] == 0 + assert stats["published"] == 0 + assert stats["draft"] == 0 + assert stats["by_language"] == {} + + async def test_counts_per_status(self, db): + await _insert_article("city-cost-de-en-berlin", "/en/markets/germany/berlin", + status="published", language="en") + await _insert_article("city-cost-de-en-munich", "/en/markets/germany/munich", + status="draft", language="en") + await _insert_article("city-cost-de-de-berlin", "/de/markets/germany/berlin", + status="published", language="de") + + stats = await get_template_stats("city-cost-de") + + assert stats["total"] == 3 + assert stats["published"] == 2 + assert stats["draft"] == 1 + assert stats["by_language"]["en"]["total"] == 2 + assert stats["by_language"]["de"]["total"] == 1 + + async def test_ignores_other_templates(self, db): + await _insert_article("other-en-berlin", "/en/other/berlin", template_slug="other") + stats = await get_template_stats("city-cost-de") + assert stats["total"] == 0 + + +# ════════════════════════════════════════════════════════════════════════════ +# get_template_freshness() +# ════════════════════════════════════════════════════════════════════════════ + +_SAMPLE_TEMPLATES = [ + { + "slug": "city-cost-de", + "name": "City Cost DE", + "data_table": "serving.pseo_city_costs_de", + "languages": ["en", "de"], + } +] + + +class TestGetTemplateFreshness: + async def test_no_meta_file_returns_no_data(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "_read_serving_meta", lambda: {}) + + result = await get_template_freshness(_SAMPLE_TEMPLATES) + assert len(result) == 1 + assert result[0]["status"] == "no_data" + + async def test_meta_present_no_articles_returns_no_articles(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "_read_serving_meta", lambda: { + "exported_at_utc": "2026-01-15T10:00:00+00:00", + "tables": {"pseo_city_costs_de": {"row_count": 100}}, + }) + + result = await get_template_freshness(_SAMPLE_TEMPLATES) + assert result[0]["status"] == "no_articles" + assert result[0]["row_count"] == 100 + + async def test_article_older_than_export_returns_stale(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + # Article created Jan 10, data exported Jan 15 → stale + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + status="published", language="en", created_at="2026-01-10T08:00:00", + ) + monkeypatch.setattr(health_mod, "_read_serving_meta", lambda: { + "exported_at_utc": "2026-01-15T10:00:00+00:00", + "tables": {"pseo_city_costs_de": {"row_count": 100}}, + }) + + result = await get_template_freshness(_SAMPLE_TEMPLATES) + assert result[0]["status"] == "stale" + + async def test_article_newer_than_export_returns_fresh(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + # Data exported Jan 10, article updated Jan 15 → fresh + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + status="published", language="en", created_at="2026-01-15T12:00:00", + ) + monkeypatch.setattr(health_mod, "_read_serving_meta", lambda: { + "exported_at_utc": "2026-01-10T10:00:00+00:00", + "tables": {}, + }) + + result = await get_template_freshness(_SAMPLE_TEMPLATES) + assert result[0]["status"] == "fresh" + + +# ════════════════════════════════════════════════════════════════════════════ +# get_content_gaps() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestGetContentGaps: + async def test_no_articles_returns_all_duckdb_rows(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "fetch_analytics", _mock_fetch_duckdb) + + gaps = await get_content_gaps( + template_slug="city-cost-de", + data_table="serving.pseo_city_costs_de", + natural_key="city_slug", + languages=["en"], + ) + assert len(gaps) == len(_DUCKDB_ROWS) + assert all(g["_missing_languages"] == ["en"] for g in gaps) + + async def test_existing_article_excluded_from_gaps(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "fetch_analytics", _mock_fetch_duckdb) + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", language="en", + ) + + gaps = await get_content_gaps( + template_slug="city-cost-de", + data_table="serving.pseo_city_costs_de", + natural_key="city_slug", + languages=["en"], + ) + gap_keys = {g["_natural_key"] for g in gaps} + assert "berlin" not in gap_keys + assert "munich" in gap_keys + assert "hamburg" in gap_keys + + async def test_partial_language_gap_detected(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "fetch_analytics", _mock_fetch_duckdb) + # EN exists for berlin, DE is missing → berlin has a gap for "de" + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", language="en", + ) + + gaps = await get_content_gaps( + template_slug="city-cost-de", + data_table="serving.pseo_city_costs_de", + natural_key="city_slug", + languages=["en", "de"], + ) + berlin = next((g for g in gaps if g["_natural_key"] == "berlin"), None) + assert berlin is not None + assert berlin["_missing_languages"] == ["de"] + + async def test_no_gaps_when_all_articles_exist(self, db, monkeypatch): + import padelnomics.content.health as health_mod + + monkeypatch.setattr(health_mod, "fetch_analytics", _mock_fetch_duckdb) + for key in ("berlin", "munich", "hamburg"): + await _insert_article( + f"city-cost-de-en-{key}", f"/en/markets/germany/{key}", language="en", + ) + + gaps = await get_content_gaps( + template_slug="city-cost-de", + data_table="serving.pseo_city_costs_de", + natural_key="city_slug", + languages=["en"], + ) + assert gaps == [] + + +# ════════════════════════════════════════════════════════════════════════════ +# check_hreflang_orphans() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestCheckHreflangOrphans: + async def test_single_lang_template_no_orphans(self, db): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en"]}] + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + orphans = await check_hreflang_orphans(templates) + assert orphans == [] + + async def test_bilingual_both_present_no_orphans(self, db): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en", "de"]}] + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + await _insert_article( + "city-cost-de-de-berlin", "/de/markets/germany/berlin", + language="de", status="published", + ) + orphans = await check_hreflang_orphans(templates) + assert orphans == [] + + async def test_missing_de_sibling_detected(self, db): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en", "de"]}] + # Only EN for berlin — DE is missing + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + orphans = await check_hreflang_orphans(templates) + assert len(orphans) == 1 + assert orphans[0]["template_slug"] == "city-cost-de" + assert "de" in orphans[0]["missing_languages"] + assert "en" in orphans[0]["present_languages"] + + async def test_draft_articles_not_counted(self, db): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en", "de"]}] + # Draft articles should be ignored + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="draft", + ) + orphans = await check_hreflang_orphans(templates) + assert orphans == [] + + +# ════════════════════════════════════════════════════════════════════════════ +# check_missing_build_files() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestCheckMissingBuildFiles: + async def test_no_articles_returns_empty(self, db, tmp_path): + result = await check_missing_build_files(build_dir=tmp_path) + assert result == [] + + async def test_build_file_present_not_reported(self, db, tmp_path): + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + build_file = tmp_path / "en" / "city-cost-de-en-berlin.html" + build_file.parent.mkdir(parents=True) + build_file.write_text("

    Berlin

    ") + + result = await check_missing_build_files(build_dir=tmp_path) + assert result == [] + + async def test_missing_build_file_reported(self, db, tmp_path): + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + # No build file created + result = await check_missing_build_files(build_dir=tmp_path) + assert len(result) == 1 + assert result[0]["slug"] == "city-cost-de-en-berlin" + assert result[0]["language"] == "en" + + async def test_draft_articles_ignored(self, db, tmp_path): + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="draft", + ) + result = await check_missing_build_files(build_dir=tmp_path) + assert result == [] + + +# ════════════════════════════════════════════════════════════════════════════ +# check_broken_scenario_refs() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestCheckBrokenScenarioRefs: + async def test_no_markdown_files_returns_empty(self, db, tmp_path): + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + result = await check_broken_scenario_refs(build_dir=tmp_path) + assert result == [] + + async def test_valid_scenario_ref_not_reported(self, db, tmp_path): + await _insert_scenario("berlin-scenario") + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + md_dir = tmp_path / "en" / "md" + md_dir.mkdir(parents=True) + (md_dir / "city-cost-de-en-berlin.md").write_text( + "# Berlin\n\n[scenario:berlin-scenario:capex]\n" + ) + result = await check_broken_scenario_refs(build_dir=tmp_path) + assert result == [] + + async def test_missing_scenario_ref_reported(self, db, tmp_path): + # No scenario in DB, but markdown references one + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + md_dir = tmp_path / "en" / "md" + md_dir.mkdir(parents=True) + (md_dir / "city-cost-de-en-berlin.md").write_text( + "# Berlin\n\n[scenario:ghost-scenario:capex]\n" + ) + result = await check_broken_scenario_refs(build_dir=tmp_path) + assert len(result) == 1 + assert "ghost-scenario" in result[0]["broken_scenario_refs"] + + async def test_no_template_slug_articles_ignored(self, db, tmp_path): + # Legacy article (no template_slug) should not be checked + await execute( + """INSERT INTO articles + (url_path, slug, title, status, language, created_at) + VALUES ('/en/legacy', 'legacy', 'Legacy', 'published', 'en', ?)""", + (utcnow_iso(),), + ) + md_dir = tmp_path / "en" / "md" + md_dir.mkdir(parents=True) + (md_dir / "legacy.md").write_text("# Legacy\n\n[scenario:ghost]\n") + + result = await check_broken_scenario_refs(build_dir=tmp_path) + assert result == [] + + +# ════════════════════════════════════════════════════════════════════════════ +# get_all_health_issues() +# ════════════════════════════════════════════════════════════════════════════ + + +class TestGetAllHealthIssues: + async def test_clean_state_returns_zero_counts(self, db, tmp_path): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en"]}] + result = await get_all_health_issues(templates, build_dir=tmp_path) + + assert result["counts"]["total"] == 0 + assert result["counts"]["hreflang_orphans"] == 0 + assert result["counts"]["missing_build_files"] == 0 + assert result["counts"]["broken_scenario_refs"] == 0 + assert "hreflang_orphans" in result + assert "missing_build_files" in result + assert "broken_scenario_refs" in result + + async def test_orphan_counted_in_total(self, db, tmp_path): + templates = [{"slug": "city-cost-de", "name": "City Cost DE", "languages": ["en", "de"]}] + # EN article with no DE sibling → orphan + await _insert_article( + "city-cost-de-en-berlin", "/en/markets/germany/berlin", + language="en", status="published", + ) + result = await get_all_health_issues(templates, build_dir=tmp_path) + assert result["counts"]["hreflang_orphans"] == 1 + assert result["counts"]["total"] >= 1 + + +# ════════════════════════════════════════════════════════════════════════════ +# pSEO Route tests +# ════════════════════════════════════════════════════════════════════════════ + +# Mock objects for route tests — avoids needing a live DuckDB +_MOCK_TEMPLATE_CFG = { + "slug": "city-cost-de", + "name": "City Cost DE", + "data_table": "serving.pseo_city_costs_de", + "natural_key": "city_slug", + "languages": ["en", "de"], + "url_pattern": "/markets/{country}/{city_slug}", +} +_MOCK_TEMPLATES = [_MOCK_TEMPLATE_CFG] + + +def _discover_mock(): + return _MOCK_TEMPLATES + + +def _load_template_mock(slug): + if slug == "city-cost-de": + return _MOCK_TEMPLATE_CFG + raise FileNotFoundError(f"Template {slug!r} not found") + + +async def _freshness_mock(templates): + return [ + { + "slug": t["slug"], + "name": t["name"], + "data_table": t["data_table"], + "status": "fresh", + "exported_at_utc": None, + "last_generated": None, + "row_count": 100, + } + for t in templates + ] + + +async def _stats_mock(slug): + return { + "total": 10, "published": 8, "draft": 2, "scheduled": 0, + "by_language": { + "en": {"total": 5, "published": 4, "draft": 1, "scheduled": 0}, + "de": {"total": 5, "published": 4, "draft": 1, "scheduled": 0}, + }, + } + + +async def _health_mock(templates, build_dir=None): + return { + "hreflang_orphans": [], + "missing_build_files": [], + "broken_scenario_refs": [], + "counts": {"hreflang_orphans": 0, "missing_build_files": 0, + "broken_scenario_refs": 0, "total": 0}, + } + + +async def _gaps_empty_mock(template_slug, data_table, natural_key, languages, limit=200): + return [] + + +async def _gaps_two_mock(template_slug, data_table, natural_key, languages, limit=200): + return [ + {"city_slug": "munich", "_natural_key": "munich", "_missing_languages": ["en"]}, + {"city_slug": "hamburg", "_natural_key": "hamburg", "_missing_languages": ["de"]}, + ] + + +class TestPseoRoutes: + """Tests for all pSEO Engine admin blueprint routes.""" + + # -- Access control -------------------------------------------------------- + + async def test_dashboard_requires_admin(self, client, db): + resp = await client.get("/admin/pseo/") + assert resp.status_code in (302, 403) + + async def test_health_requires_admin(self, client, db): + resp = await client.get("/admin/pseo/health") + assert resp.status_code in (302, 403) + + async def test_gaps_requires_admin(self, client, db): + resp = await client.get("/admin/pseo/gaps/city-cost-de") + assert resp.status_code in (302, 403) + + async def test_jobs_requires_admin(self, client, db): + resp = await client.get("/admin/pseo/jobs") + assert resp.status_code in (302, 403) + + # -- Dashboard ------------------------------------------------------------- + + async def test_dashboard_renders(self, admin_client, db): + with ( + patch("padelnomics.admin.pseo_routes.discover_templates", _discover_mock), + patch("padelnomics.admin.pseo_routes.get_template_freshness", _freshness_mock), + patch("padelnomics.admin.pseo_routes.get_template_stats", _stats_mock), + ): + resp = await admin_client.get("/admin/pseo/") + + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "pSEO Engine" in text + + async def test_dashboard_shows_template_name(self, admin_client, db): + with ( + patch("padelnomics.admin.pseo_routes.discover_templates", _discover_mock), + patch("padelnomics.admin.pseo_routes.get_template_freshness", _freshness_mock), + patch("padelnomics.admin.pseo_routes.get_template_stats", _stats_mock), + ): + resp = await admin_client.get("/admin/pseo/") + + text = await resp.get_data(as_text=True) + assert "City Cost DE" in text + + # -- Health HTMX partial --------------------------------------------------- + + async def test_health_partial_renders(self, admin_client, db): + with ( + patch("padelnomics.admin.pseo_routes.discover_templates", _discover_mock), + patch("padelnomics.admin.pseo_routes.get_all_health_issues", _health_mock), + ): + resp = await admin_client.get("/admin/pseo/health") + + assert resp.status_code == 200 + + # -- Content gaps HTMX partial --------------------------------------------- + + async def test_gaps_unknown_template_returns_404(self, admin_client, db): + def _raise(slug): + raise FileNotFoundError("not found") + + with patch("padelnomics.admin.pseo_routes.load_template", _raise): + resp = await admin_client.get("/admin/pseo/gaps/no-such-template") + + assert resp.status_code == 404 + + async def test_gaps_partial_renders(self, admin_client, db): + with ( + patch("padelnomics.admin.pseo_routes.load_template", _load_template_mock), + patch("padelnomics.admin.pseo_routes.get_content_gaps", _gaps_two_mock), + ): + resp = await admin_client.get("/admin/pseo/gaps/city-cost-de") + + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + # Should show gap count or row content + assert "munich" in text or "missing" in text.lower() + + async def test_gaps_empty_shows_no_gaps_message(self, admin_client, db): + with ( + patch("padelnomics.admin.pseo_routes.load_template", _load_template_mock), + patch("padelnomics.admin.pseo_routes.get_content_gaps", _gaps_empty_mock), + ): + resp = await admin_client.get("/admin/pseo/gaps/city-cost-de") + + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "No gaps" in text or "all" in text.lower() + + # -- Generate gaps POST ---------------------------------------------------- + + async def test_generate_gaps_redirects(self, admin_client, db): + async with admin_client.session_transaction() as sess: + sess["csrf_token"] = "test" + + with ( + patch("padelnomics.admin.pseo_routes.load_template", _load_template_mock), + patch("padelnomics.admin.pseo_routes.get_content_gaps", _gaps_two_mock), + ): + resp = await admin_client.post( + "/admin/pseo/gaps/city-cost-de/generate", + form={"csrf_token": "test"}, + ) + + assert resp.status_code == 302 + + async def test_generate_gaps_enqueues_task(self, admin_client, db): + async with admin_client.session_transaction() as sess: + sess["csrf_token"] = "test" + + with ( + patch("padelnomics.admin.pseo_routes.load_template", _load_template_mock), + patch("padelnomics.admin.pseo_routes.get_content_gaps", _gaps_two_mock), + ): + await admin_client.post( + "/admin/pseo/gaps/city-cost-de/generate", + form={"csrf_token": "test"}, + ) + + tasks = await core.fetch_all( + "SELECT task_name FROM tasks WHERE task_name = 'generate_articles'" + ) + assert len(tasks) == 1 + + async def test_generate_gaps_no_gaps_redirects_without_task(self, admin_client, db): + async with admin_client.session_transaction() as sess: + sess["csrf_token"] = "test" + + with ( + patch("padelnomics.admin.pseo_routes.load_template", _load_template_mock), + patch("padelnomics.admin.pseo_routes.get_content_gaps", _gaps_empty_mock), + ): + resp = await admin_client.post( + "/admin/pseo/gaps/city-cost-de/generate", + form={"csrf_token": "test"}, + ) + + assert resp.status_code == 302 + tasks = await core.fetch_all( + "SELECT task_name FROM tasks WHERE task_name = 'generate_articles'" + ) + assert len(tasks) == 0 + + # -- Jobs list ------------------------------------------------------------- + + async def test_jobs_renders_empty(self, admin_client, db): + resp = await admin_client.get("/admin/pseo/jobs") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Generation Jobs" in text + + async def test_jobs_shows_task_row(self, admin_client, db): + await _insert_task(status="complete", progress_current=20, progress_total=20) + + resp = await admin_client.get("/admin/pseo/jobs") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Complete" in text + + # -- Job status HTMX polled ------------------------------------------------ + + async def test_job_status_not_found_returns_404(self, admin_client, db): + resp = await admin_client.get("/admin/pseo/jobs/9999/status") + assert resp.status_code == 404 + + async def test_job_status_renders_pending(self, admin_client, db): + job_id = await _insert_task( + status="pending", progress_current=5, progress_total=20 + ) + + resp = await admin_client.get(f"/admin/pseo/jobs/{job_id}/status") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Running" in text + + async def test_job_status_renders_complete(self, admin_client, db): + job_id = await _insert_task( + status="complete", progress_current=20, progress_total=20 + ) + + resp = await admin_client.get(f"/admin/pseo/jobs/{job_id}/status") + assert resp.status_code == 200 + text = await resp.get_data(as_text=True) + assert "Complete" in text + + async def test_job_status_complete_no_htmx_poll_trigger(self, admin_client, db): + """A completed job should not include hx-trigger="every 2s" (stops HTMX polling).""" + job_id = await _insert_task( + status="complete", progress_current=20, progress_total=20 + ) + + resp = await admin_client.get(f"/admin/pseo/jobs/{job_id}/status") + text = await resp.get_data(as_text=True) + assert "every 2s" not in text + + async def test_job_status_pending_includes_htmx_poll_trigger(self, admin_client, db): + """A pending job should include hx-trigger="every 2s" (keeps HTMX polling).""" + job_id = await _insert_task( + status="pending", progress_current=0, progress_total=20 + ) + + resp = await admin_client.get(f"/admin/pseo/jobs/{job_id}/status") + text = await resp.get_data(as_text=True) + assert "every 2s" in text From 44c0dd0b8d0afeaf3b038947c7bd3e6af974975c Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:50:43 +0100 Subject: [PATCH 58/98] refactor: minor TigerStyle cleanups MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - export_serving.py: move `import re` to module level — was imported inside a loop body on every iteration - sitemap.py: add comment documenting that the in-memory TTL cache is process-local (valid for single-worker deployment, Dockerfile --workers 1) - playtomic_availability.py: use `or "10"` fallback for CIRCUIT_BREAKER_THRESHOLD env var to handle empty-string case Co-Authored-By: Claude Opus 4.6 --- .../src/padelnomics_extract/playtomic_availability.py | 2 +- src/padelnomics/export_serving.py | 2 +- web/src/padelnomics/sitemap.py | 3 +++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index cb1e41c..6372191 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -42,7 +42,7 @@ MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 MAX_WORKERS = int(os.environ.get("EXTRACT_WORKERS", "1")) RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "90")) -CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD", "10")) +CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10") # Parallel mode submits futures in batches so the circuit breaker can stop # new submissions after it opens. Already-inflight futures in the current diff --git a/src/padelnomics/export_serving.py b/src/padelnomics/export_serving.py index 03d9384..9d79df6 100644 --- a/src/padelnomics/export_serving.py +++ b/src/padelnomics/export_serving.py @@ -26,6 +26,7 @@ Usage: import logging import os +import re import duckdb @@ -60,7 +61,6 @@ def export_serving() -> None: for view_name, view_sql in view_rows: # Pattern: ... FROM "local".sqlmesh__serving.serving__name__hash; # Strip the "local". prefix to get schema.table - import re match = re.search(r'FROM\s+"local"\.(sqlmesh__serving\.\S+)', view_sql) assert match, f"Cannot parse view definition for {view_name}: {view_sql[:200]}" physical_tables.append((view_name, match.group(1))) diff --git a/web/src/padelnomics/sitemap.py b/web/src/padelnomics/sitemap.py index 02e5010..6103093 100644 --- a/web/src/padelnomics/sitemap.py +++ b/web/src/padelnomics/sitemap.py @@ -6,6 +6,9 @@ from quart import Response from .core import fetch_all +# Process-local cache — valid for the current single-Hypercorn-worker deployment +# (Dockerfile: `--workers 1`). If worker count increases, replace with a +# DB-backed cache (e.g. a single-row SQLite table with an expires_at column). _cache_xml: str = "" _cache_timestamp: float = 0.0 CACHE_TTL_SECONDS: int = 3600 # 1 hour From a9b14b8f738b0ee7cc7a3734ce5902f7736a8201 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:51:29 +0100 Subject: [PATCH 59/98] docs: update CHANGELOG + PROJECT.md for pSEO Engine Records all Phase 1 deliverables: content gaps, data freshness, health checks, generation job monitoring, 45 tests, bug fixes. Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 10 ++++++++++ PROJECT.md | 2 ++ 2 files changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8184e00..0e4a58b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] ### Added +- **pSEO Engine admin tab** (`/admin/pseo`) — operational visibility for the programmatic SEO system: + - **Content gap detection** — queries DuckDB serving tables vs SQLite articles to find rows with no matching article per language; per-template HTMX-loaded gap list + - **Data freshness signals** — compares `_serving_meta.json` export timestamp vs `MAX(updated_at)` in articles; per-template status: 🟢 Fresh / 🟡 Stale / 🟣 No articles / ⚫ No data + - **Article health checks** (HTMX partial) — hreflang orphans (EN exists, DE missing), missing HTML build files, broken `[scenario:slug]` references in article markdown + - **Generation job monitoring** — live progress bars polling every 2s while jobs run; stops polling on completion; error drilldown via `
    `; dedicated `/admin/pseo/jobs` list page + - **`_serving_meta.json`** — written by `export_serving.py` after atomic rename; records `exported_at_utc` and per-table row counts; drives freshness signals in pSEO Engine dashboard + - **Progress tracking columns** on `tasks` table (migration 0021): `progress_current`, `progress_total`, `error_log`; `generate_articles()` writes progress every 50 articles and on completion + - 45 new tests covering all health functions + pSEO routes (access control, rendering, gap detection, generate-gaps POST, job status HTMX polling) + + - **Dual market score system** — split the single market score into two branded scores: - **padelnomics Marktreife-Score™** (market maturity): existing score, refined — only for cities with ≥1 padel venue. Adds ×0.85 saturation discount when `venues_per_100k > 8`. diff --git a/PROJECT.md b/PROJECT.md index 2bb85a1..c1bf877 100644 --- a/PROJECT.md +++ b/PROJECT.md @@ -107,6 +107,7 @@ - [x] Task queue management (list, retry, delete) - [x] Lead funnel stats on admin dashboard - [x] Email hub (`/admin/emails`) — sent log, inbox, compose, audiences, delivery event tracking via Resend webhooks +- [x] **pSEO Engine tab** (`/admin/pseo`) — content gap detection, data freshness signals, article health checks (hreflang orphans, missing build files, broken scenario refs), generation job monitoring with live progress bars ### SEO & Legal - [x] Sitemap (both language variants, `` on all entries) @@ -136,6 +137,7 @@ ## In Progress 🔄 - [ ] **Dual market score system** — Marktreife-Score + Marktpotenzial-Score + expanded data pipeline (merging to master) +- [ ] **pSEO Engine** — implemented (worktree `pseo-engine`), pending merge to master --- From ec15012d00b400659ac8004143075cdb13895d3b Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 20:55:38 +0100 Subject: [PATCH 60/98] test: update mock_fetch_analytics to handle COUNT(*) queries count_template_data() uses fetch_analytics with a COUNT(*) query. The pseo_env test fixture's mock returned TEST_ROWS for any unrecognized query, causing a KeyError on rows[0]["cnt"]. Add a COUNT(*) branch that returns [{cnt: len(TEST_ROWS)}]. Co-Authored-By: Claude Opus 4.6 --- web/tests/test_content.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/web/tests/test_content.py b/web/tests/test_content.py index 02cc103..06a1633 100644 --- a/web/tests/test_content.py +++ b/web/tests/test_content.py @@ -134,6 +134,8 @@ def pseo_env(tmp_path, monkeypatch): async def mock_fetch_analytics(query, params=None): if "information_schema" in query: return TEST_COLUMNS + if "COUNT(*)" in query.upper(): + return [{"cnt": len(TEST_ROWS)}] if "WHERE" in query and params: # preview_article: filter by natural key value return [r for r in TEST_ROWS if params[0] in r.values()] From 78ffbc313f05ed851fd172365be87d7024677cfe Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:17:00 +0100 Subject: [PATCH 61/98] feat(extract): parallel DAG scheduler + proxy rotation for tenants MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - all.py: replace sequential loop with graphlib.TopologicalSorter + ThreadPoolExecutor - EXTRACTORS dict declares (func, [deps]) — self-documenting dependency graph - 8 extractors run in parallel immediately; availability starts as soon as tenants finishes (not after all others complete) - max_workers=len(EXTRACTORS) — all I/O-bound, no CPU contention - playtomic_tenants.py: add proxy rotation via make_round_robin_cycler - no throttle when PROXY_URLS set (IP rotation removes per-IP rate concern) - keeps 2s throttle for direct runs - _shared.py: add optional proxy_url param to run_extractor() - any extractor can opt in to proxy support via the shared session - overpass_tennis.py: fix query timeout (out body → out center, timeout 180 → 300) - out center returns centroids only, not full geometry — fits within server limits - playtomic_availability.py: fix CIRCUIT_BREAKER_THRESHOLD empty string crash - int(os.environ.get(..., "10")) → int(os.environ.get(...) or "10") Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/_shared.py | 7 ++ .../src/padelnomics_extract/all.py | 90 ++++++++++++++----- .../padelnomics_extract/overpass_tennis.py | 4 +- .../padelnomics_extract/playtomic_tenants.py | 19 +++- 4 files changed, 95 insertions(+), 25 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/_shared.py b/extract/padelnomics_extract/src/padelnomics_extract/_shared.py index 4df4355..be4ad1b 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/_shared.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/_shared.py @@ -41,12 +41,17 @@ def setup_logging(name: str) -> logging.Logger: def run_extractor( extractor_name: str, func, + proxy_url: str | None = None, ) -> None: """Boilerplate wrapper: open state DB, start run, call func, end run. func signature: func(landing_dir, year_month, conn, session) -> dict The dict must contain: files_written, files_skipped, bytes_written. Optional: cursor_value. + + proxy_url: if set, configure the session proxy before calling func. + Extractors that manage their own proxy logic (e.g. playtomic_availability) + ignore the shared session and are unaffected. """ LANDING_DIR.mkdir(parents=True, exist_ok=True) conn = open_state_db(LANDING_DIR) @@ -58,6 +63,8 @@ def run_extractor( try: with niquests.Session() as session: session.headers["User-Agent"] = USER_AGENT + if proxy_url: + session.proxies = {"http": proxy_url, "https": proxy_url} result = func(LANDING_DIR, year_month, conn, session) assert isinstance(result, dict), f"extractor must return a dict, got {type(result)}" diff --git a/extract/padelnomics_extract/src/padelnomics_extract/all.py b/extract/padelnomics_extract/src/padelnomics_extract/all.py index 15c153f..8b93c94 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/all.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/all.py @@ -1,9 +1,20 @@ -"""Run all extractors sequentially. +"""Run all extractors with dependency-aware parallel execution. Entry point for the combined `uv run extract` command. -Each extractor gets its own state tracking row in .state.sqlite. + +Extractors are declared as a dict mapping name → (func, [dependencies]). +A graphlib.TopologicalSorter schedules them: tasks with no unmet dependencies +run immediately in parallel; each completion may unlock new tasks. + +Current dependency graph: + - All 8 non-availability extractors have no dependencies (run in parallel) + - playtomic_availability depends on playtomic_tenants (starts as soon as + tenants finishes, even if other extractors are still running) """ +from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait +from graphlib import TopologicalSorter + from ._shared import run_extractor, setup_logging from .census_usa import EXTRACTOR_NAME as CENSUS_USA_NAME from .census_usa import extract as extract_census_usa @@ -26,31 +37,68 @@ from .playtomic_tenants import extract as extract_tenants logger = setup_logging("padelnomics.extract") -EXTRACTORS = [ - (OVERPASS_NAME, extract_overpass), - (OVERPASS_TENNIS_NAME, extract_overpass_tennis), - (EUROSTAT_NAME, extract_eurostat), - (EUROSTAT_CITY_LABELS_NAME, extract_eurostat_city_labels), - (CENSUS_USA_NAME, extract_census_usa), - (ONS_UK_NAME, extract_ons_uk), - (GEONAMES_NAME, extract_geonames), - (TENANTS_NAME, extract_tenants), - (AVAILABILITY_NAME, extract_availability), -] +# Declarative: name → (func, [dependency names]) +# Add new extractors here; the scheduler handles ordering and parallelism. +EXTRACTORS: dict[str, tuple] = { + OVERPASS_NAME: (extract_overpass, []), + OVERPASS_TENNIS_NAME: (extract_overpass_tennis, []), + EUROSTAT_NAME: (extract_eurostat, []), + EUROSTAT_CITY_LABELS_NAME: (extract_eurostat_city_labels, []), + CENSUS_USA_NAME: (extract_census_usa, []), + ONS_UK_NAME: (extract_ons_uk, []), + GEONAMES_NAME: (extract_geonames, []), + TENANTS_NAME: (extract_tenants, []), + AVAILABILITY_NAME: (extract_availability, [TENANTS_NAME]), +} + + +def _run_safe(name: str) -> bool: + """Run one extractor, return True on success.""" + func, _ = EXTRACTORS[name] + try: + run_extractor(name, func) + return True + except Exception: + logger.exception("Extractor %s failed", name) + return False def main() -> None: - """Run all extractors. Each gets its own state row.""" + """Run all extractors respecting declared dependencies, maximally parallel.""" logger.info("Running %d extractors", len(EXTRACTORS)) - for i, (name, func) in enumerate(EXTRACTORS, 1): - logger.info("[%d/%d] %s", i, len(EXTRACTORS), name) - try: - run_extractor(name, func) - except Exception: - logger.exception("Extractor %s failed — continuing with next", name) + graph = {name: set(deps) for name, (_, deps) in EXTRACTORS.items()} + ts = TopologicalSorter(graph) + ts.prepare() - logger.info("All extractors complete") + failed: list[str] = [] + with ThreadPoolExecutor(max_workers=len(EXTRACTORS)) as pool: + futures: dict = {} + + # Submit all initially ready tasks (no dependencies) + for name in ts.get_ready(): + futures[pool.submit(_run_safe, name)] = name + + # Process completions and submit newly-unblocked tasks + while futures: + done_set, _ = wait(futures, return_when=FIRST_COMPLETED) + for f in done_set: + name = futures.pop(f) + ok = f.result() + if ok: + logger.info("done: %s", name) + else: + failed.append(name) + logger.warning("FAILED: %s", name) + ts.done(name) + + for ready in ts.get_ready(): + futures[pool.submit(_run_safe, ready)] = ready + + if failed: + logger.warning("Completed with %d failure(s): %s", len(failed), ", ".join(failed)) + else: + logger.info("All %d extractors complete", len(EXTRACTORS)) if __name__ == "__main__": diff --git a/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py b/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py index 79c75e7..d0a6748 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py @@ -27,13 +27,13 @@ OVERPASS_URL = "https://overpass-api.de/api/interpreter" TENNIS_OVERPASS_TIMEOUT_SECONDS = OVERPASS_TIMEOUT_SECONDS * 3 OVERPASS_QUERY = ( - "[out:json][timeout:180];\n" + "[out:json][timeout:300];\n" "(\n" ' node["sport"="tennis"];\n' ' way["sport"="tennis"];\n' ' relation["sport"="tennis"];\n' ");\n" - "out body;" + "out center;" ) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py index a80636a..699ace2 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py @@ -10,7 +10,8 @@ API notes (discovered 2026-02): - `size=100` is the maximum effective page size - ~14K venues globally as of Feb 2026 -Rate: 1 req / 2 s (see docs/data-sources-inventory.md §1.2). +Rate: 1 req / 2 s when running direct (see docs/data-sources-inventory.md §1.2). + No throttle when PROXY_URLS is set — IP rotation removes per-IP rate concern. Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.json.gz """ @@ -23,6 +24,7 @@ from pathlib import Path import niquests from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging +from .proxy import load_proxy_urls, make_round_robin_cycler from .utils import landing_path, write_gzip_atomic logger = setup_logging("padelnomics.extract.playtomic_tenants") @@ -46,10 +48,22 @@ def extract( dest_dir = landing_path(landing_dir, "playtomic", year, month) dest = dest_dir / "tenants.json.gz" + proxy_urls = load_proxy_urls() + cycler = make_round_robin_cycler(proxy_urls) if proxy_urls else None + if cycler: + logger.info("proxy rotation enabled (%d proxies, no throttle)", len(proxy_urls)) + else: + logger.info("no proxies configured — throttle %ds per page", THROTTLE_SECONDS) + all_tenants: list[dict] = [] seen_ids: set[str] = set() for page in range(MAX_PAGES): + if cycler: + proxy = cycler["next_proxy"]() + if proxy: + session.proxies = {"http": proxy, "https": proxy} + params = { "sport_ids": "PADEL", "size": PAGE_SIZE, @@ -82,7 +96,8 @@ def extract( if len(tenants) < PAGE_SIZE: break - time.sleep(THROTTLE_SECONDS) + if not cycler: + time.sleep(THROTTLE_SECONDS) payload = json.dumps({"tenants": all_tenants, "count": len(all_tenants)}).encode() bytes_written = write_gzip_atomic(dest, payload) From aa7a8bad99aaee23e4b35f77176806d7b3c57006 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:24:37 +0100 Subject: [PATCH 62/98] test: sync i18n tests to current translation values MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - wiz_summary_label DE: "Aktuelle Werte" → "Aktuelle Zusammenfassung" - add mscore_reife_chip + mscore_potenzial_chip to identical-value allowlist (branded product names) Co-Authored-By: Claude Sonnet 4.6 --- web/tests/test_i18n_parity.py | 2 ++ web/tests/test_i18n_tips.py | 2 +- web/tests/test_planner_routes.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/web/tests/test_i18n_parity.py b/web/tests/test_i18n_parity.py index 62a28ab..a12c155 100644 --- a/web/tests/test_i18n_parity.py +++ b/web/tests/test_i18n_parity.py @@ -59,6 +59,8 @@ _IDENTICAL_VALUE_ALLOWLIST = { "bp_lbl_ebitda", "bp_lbl_irr", "bp_lbl_moic", "bp_lbl_opex", # Market Score — branded term kept in English in DE "footer_market_score", + # Market Score chip labels — branded product names, same in DE + "mscore_reife_chip", "mscore_potenzial_chip", } diff --git a/web/tests/test_i18n_tips.py b/web/tests/test_i18n_tips.py index fb24f81..f93d8bb 100644 --- a/web/tests/test_i18n_tips.py +++ b/web/tests/test_i18n_tips.py @@ -108,4 +108,4 @@ def test_wiz_summary_label_english_value(): def test_wiz_summary_label_german_value(): - assert DE["wiz_summary_label"] == "Aktuelle Werte" + assert DE["wiz_summary_label"] == "Aktuelle Zusammenfassung" diff --git a/web/tests/test_planner_routes.py b/web/tests/test_planner_routes.py index 0688080..501a1c8 100644 --- a/web/tests/test_planner_routes.py +++ b/web/tests/test_planner_routes.py @@ -120,4 +120,4 @@ class TestWizSummaryLabel: async def test_german_summary_caption_in_response(self, client): resp = await client.post("/de/planner/calculate", form={"activeTab": "capex"}) body = (await resp.get_data()).decode() - assert "Aktuelle Werte" in body + assert "Aktuelle Zusammenfassung" in body From fda7da7d59ddacd1266d25a37f1d60da7310fe94 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:26:19 +0100 Subject: [PATCH 63/98] chore: replace hypercorn with granian (Rust ASGI server) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Granian is ~3-5x faster than Hypercorn in benchmarks. No code changes needed — Quart is standard ASGI so any ASGI server works. - web/pyproject.toml: hypercorn → granian>=1.6.0 (installed: 2.7.1) - Dockerfile CMD: hypercorn → granian --interface asgi - core.py setup_logging(): silence granian loggers instead of hypercorn's Co-Authored-By: Claude Sonnet 4.6 --- Dockerfile | 2 +- uv.lock | 83 ++++++++++++++++++++++++++++++++++++- web/pyproject.toml | 2 +- web/src/padelnomics/core.py | 5 +-- 4 files changed, 85 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6448efc..0fd0862 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,4 +29,4 @@ USER appuser ENV PYTHONUNBUFFERED=1 ENV DATABASE_PATH=/app/data/app.db EXPOSE 5000 -CMD ["hypercorn", "padelnomics.app:app", "--bind", "0.0.0.0:5000", "--workers", "1"] +CMD ["granian", "--interface", "asgi", "--host", "0.0.0.0", "--port", "5000", "--workers", "1", "padelnomics.app:app"] diff --git a/uv.lock b/uv.lock index d8b72d8..6775a7c 100644 --- a/uv.lock +++ b/uv.lock @@ -641,6 +641,85 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, ] +[[package]] +name = "granian" +version = "2.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/e5/c3a745a2c60cba6e67c5607fe6e18883fd2b7800fd7215511c526fab3872/granian-2.7.1.tar.gz", hash = "sha256:cc79292b24895db9441d32c3a9f11a4e19805d566bc77f9deb7ef18daac62e16", size = 128508, upload-time = "2026-02-08T20:02:31.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/fd/44b8027007de2558d09ff7ee688229ad5d4f368bb166589a2547926057e4/granian-2.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bbcdea802c5a594d204b807de6829a7d4b723c397087857ca4d3a3cf2ac1d16e", size = 6447686, upload-time = "2026-02-08T20:00:41.829Z" }, + { url = "https://files.pythonhosted.org/packages/e8/b6/db0b26c9226490fb42d51fa70fd08e8daf5ad9747d60d2dc143dd2517b3d/granian-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1abc6dfe5d5fb1f2e863200ee9edf749ed82ff9c1361c21483b214a91654879", size = 6154446, upload-time = "2026-02-08T20:00:44.1Z" }, + { url = "https://files.pythonhosted.org/packages/2b/1b/44d8acdfda1a1af2c4fa8ba215912bd78318b59f195c5b7831dab69a7719/granian-2.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:edf7cbab2c54a3dd10c0f8a737b133cc605b6309acdfe3aa060bc954d7ae13c5", size = 7144519, upload-time = "2026-02-08T20:00:45.504Z" }, + { url = "https://files.pythonhosted.org/packages/be/ac/6e142e3a26c3fe90d7e6592256ed4940e696f4430933d597e4014b5ee441/granian-2.7.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5240510754712cc802ad5a71507f10efdb83a043dbccd351662897f58916a76a", size = 6353689, upload-time = "2026-02-08T20:00:46.766Z" }, + { url = "https://files.pythonhosted.org/packages/37/49/1836d259060ceae6cf1dc7d0c424864786ac028c93aaeed07f6ea9dfcafc/granian-2.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2c445c13fa6fc7235f95c28f2d203369d0c516aba15ba24faad08ca0a095bd0", size = 6906248, upload-time = "2026-02-08T20:00:48.15Z" }, + { url = "https://files.pythonhosted.org/packages/5c/84/0d18018b05652991c8502da2cbab6b9b8c234926870d0458d2d7c5124a65/granian-2.7.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71776d7319906cfc78f723cc38f927ffaf58bcb9b1707fe5d88c3662827aa1f7", size = 6974742, upload-time = "2026-02-08T20:00:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/19/83/f9c3685681aa4b41feb73def9ef63800b6f639629e9b083a0c279583fb92/granian-2.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab6da78f0fcecf9a9177db2d716e50214b540cb1ea77dafc88e35184ca901266", size = 7030837, upload-time = "2026-02-08T20:00:51.464Z" }, + { url = "https://files.pythonhosted.org/packages/30/62/c445c0c96552f11dee49d002d4af32adbeca19b7e8064a1d106952810345/granian-2.7.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:025218f8ccc5907bc8277b0df9a60927a5862ee607606cfc970cc404d5346af6", size = 7313823, upload-time = "2026-02-08T20:00:53.787Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bc/c9d1dce0b2d11bf76aadd06608d3b01a2b697c030c5ea01474d15e36e2af/granian-2.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98ab412772f2c66260a3535da4101ccc6dd20de30e74a87b32fd7abc729cc14f", size = 7014570, upload-time = "2026-02-08T20:00:55.085Z" }, + { url = "https://files.pythonhosted.org/packages/e3/51/2abe731a4ec42038a0ea24695bd6fd79d4b340797115bd1af40c21cfd1a3/granian-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:ba65410b56d951d9aa2e8b0b0f7796431052c43eca2bb8a526a743d2f8aa539f", size = 4058148, upload-time = "2026-02-08T20:00:56.678Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2d/4a29e3b654ad38b0a7b1fb477a20a1d03b36a40060d15bd98f43654aac3a/granian-2.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d603c53a8d7e6243a5c4b9749116143f4a6184033777451ba376b038905ac57f", size = 6390662, upload-time = "2026-02-08T20:00:57.999Z" }, + { url = "https://files.pythonhosted.org/packages/ab/38/bf86291a04d1d4fd7b469b0134224cdb0cafa4e7cc8de5744f79d045ff5c/granian-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:df3e8f617dc3e21e3a4e543678993e855fb1d008f1207c646d27efd45e45161b", size = 6126936, upload-time = "2026-02-08T20:00:59.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/69/09eea196a4f9883dad20d4acd645be35242c0004ba4a698f73f9e0fe8291/granian-2.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1007f1b58e4ace682d424789dd34b63526a482ba3efc01ca18098b65420d6d", size = 7120523, upload-time = "2026-02-08T20:01:00.731Z" }, + { url = "https://files.pythonhosted.org/packages/9b/89/db6b3504a41e222a1d94417995f73fa17a27dc2fc664c29295dfc34bd64b/granian-2.7.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76b1751c5d5dcc93803e37baf68396dba22d809001037faec4b2df8fdc52af7e", size = 6420419, upload-time = "2026-02-08T20:01:02.189Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ef/5d6712ad81e85841d4fd5436f5cbfcdb3ac3ddeb9e75953fd6b323bfff64/granian-2.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a02a150c6a1ba8a7123634a22c0352a116ea2211e634479e9f64409db72d4489", size = 6895176, upload-time = "2026-02-08T20:01:03.748Z" }, + { url = "https://files.pythonhosted.org/packages/90/f5/bd0fafc93f01f345ad1ecc70fbb459e452c777fe8b4958020399332b7f03/granian-2.7.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:24f09f5dbb9105498e521733e5993135fb276e346ce8f04cead2f4113ca51bba", size = 7002315, upload-time = "2026-02-08T20:01:05.071Z" }, + { url = "https://files.pythonhosted.org/packages/af/ff/b17d357d4f1eff19ff45257ea924bb571d4cf2caefccdc8aca8c0b1a3c7a/granian-2.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc435c5d1881554bf7eb4e2fe8d2ad7e5052a0bacc7195c477bfc97544c7bf46", size = 7018969, upload-time = "2026-02-08T20:01:06.564Z" }, + { url = "https://files.pythonhosted.org/packages/d4/68/e0e24673e943fbb2540a7cd68dd3ea10a4cd9db6f538de9cec26b1c54133/granian-2.7.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:68a136b5d7ade34f3ee5ee743b2bdd55d6c1f0249c6bfdc8e038c6d0846de61e", size = 7274801, upload-time = "2026-02-08T20:01:08.071Z" }, + { url = "https://files.pythonhosted.org/packages/42/ee/cda1e8eb3e7025d82b6594814fc2f95ce252f638691240e4bc523924e204/granian-2.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93100bd3185e653c482c2996e11a7ece58ea28e355ef335bb0a30e4851c3ae8c", size = 7032826, upload-time = "2026-02-08T20:01:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/ca/48/2c89fa53f5cdbc8495f55d587f3fa24f9ff984a8c572dd8930aa991e4301/granian-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:6cab79a863ccf6f18aa8b5e9261865d87c28574cd85174e8bb1bab873220077d", size = 4076284, upload-time = "2026-02-08T20:01:10.862Z" }, + { url = "https://files.pythonhosted.org/packages/53/ce/e8ae26e248daaa8e782c0e6bce1350759da262f8aa637b8a0036c5455376/granian-2.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:b4f0c807fefedfa58d07c2751cc40471765387d331e70ea7ebd2a2ff5d492ca0", size = 6384691, upload-time = "2026-02-08T20:01:12.389Z" }, + { url = "https://files.pythonhosted.org/packages/c2/5f/32f933dac26835ad2f8bc9b4f5762be8f8340318a9bbeca75b32fa6f6195/granian-2.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:78ce501ec337b7db52ba1773c0acf0abd72b3fac71b6b747fe4ae6f38cca0a6b", size = 6128567, upload-time = "2026-02-08T20:01:14.64Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/432b73f713ebb102e1585f5abec9cb2284d76f4d16df73c24f2e4dcc9cbd/granian-2.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2079c9c29b65404283ef61ced11905c8491e4bc68a4e3b56c684fe2dab8cf8c2", size = 7129893, upload-time = "2026-02-08T20:01:16.526Z" }, + { url = "https://files.pythonhosted.org/packages/63/5e/fdd4e42c800804cc277f12a3eba51747d100739b8beb0c1a909837670d86/granian-2.7.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a68bf02c93c2137c68e2acd1dc68e871f49ce2e61b042fec9a145104daf3d5b", size = 6428486, upload-time = "2026-02-08T20:01:18.024Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/7a5632e1a206e11ac3470f9ef79b2aadce67d1dfc5cdf75a5fd9795ae0fa/granian-2.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3f44b244600103a3ad6358937a42370b8cc518b7754c740620be681272e0bd", size = 6888218, upload-time = "2026-02-08T20:01:20.393Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ef/379b77fc6f8909ffc4d9397135b122d93446f303f52e428aca1120d79b08/granian-2.7.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:7681e76c61af0dd1e135139f5fa9561ec16fdbac19d0a9fbf4617079b822bf21", size = 7007452, upload-time = "2026-02-08T20:01:21.864Z" }, + { url = "https://files.pythonhosted.org/packages/4e/49/6849f1f784186f41551ceba040e4402d7daa7a9c5c89e0b4c0fb7df5d73e/granian-2.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bc56766803ec0f958f4f2e3be9f4cb2385f9d6970e34ade6ff5c0ba751a3ce9c", size = 7024506, upload-time = "2026-02-08T20:01:23.24Z" }, + { url = "https://files.pythonhosted.org/packages/52/85/dcbc5b860697e1ebf9fa4206d3fba931a2ea2547fb8d2638ad392f4d5a90/granian-2.7.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a20eaf1b756981caa8c0d6c19c5467e03386aadb07f854b88243218c9db9513b", size = 7289505, upload-time = "2026-02-08T20:01:24.59Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0f/3ddd893a4582943ab21c59853b7a6adae837130445ad64964cd73ea77ce4/granian-2.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:af842b07f14d7433774627c16fb0fbcdc9e60587d2d684636d2eba446c343297", size = 7022894, upload-time = "2026-02-08T20:01:25.973Z" }, + { url = "https://files.pythonhosted.org/packages/19/1e/52173568f8da3a2d50f48eabe1cc19d857586e0878009477ed0c196ebebd/granian-2.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:861d69fc3504c891f152585c2109d1eaf791c35392b13ed22c72fb199dc50dfa", size = 4093077, upload-time = "2026-02-08T20:01:27.735Z" }, + { url = "https://files.pythonhosted.org/packages/33/a8/3e0ea25a85a05618363ac9f90eb4e504ccc00e48c64f30cd37ef7046097f/granian-2.7.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:33ed73fe753fcae51a647555614fc67013558a654d323115ab0fbf60aca6c47a", size = 6354066, upload-time = "2026-02-08T20:01:29.268Z" }, + { url = "https://files.pythonhosted.org/packages/46/8d/a8965de519507ba5dfa13af4760b3c1b334e46bf3283eab55f171693de0a/granian-2.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:de9367e2dca2923bf12b52f004ab975ed0de45c8dedddd87993ed9fffabfb0ce", size = 6049800, upload-time = "2026-02-08T20:01:30.989Z" }, + { url = "https://files.pythonhosted.org/packages/21/f6/ff76aab55b5a7bdbd20f4f73486fcb5a09440f4fd56bd3dc6266e65dee9a/granian-2.7.1-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:38088f6bd4780b280aae8abf15c2205bdf9066def927f8c9690c13a966519286", size = 6219241, upload-time = "2026-02-08T20:01:32.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/3a/7aaf34391df169d54bcc3bfc32919b58de9b8a9e28e66b4f3276b910ef68/granian-2.7.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ebbc04483ada6e1a8a89f055de0b4cad2f90b3cbc94a1ae08fc2b140d905f4b", size = 7114695, upload-time = "2026-02-08T20:01:33.748Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d3/540a9f816884abf4da62d2e411455968a1ee8e4685243d3dd7fee1cf375f/granian-2.7.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2acc29a3eb9b1b9708355abd5438c216caff4ba4536bc77e46b19e44fb1b37ea", size = 6775127, upload-time = "2026-02-08T20:01:35.925Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/d133c36fdab4552db665d6bb2d53ac4834e41a97d8d0244f1aacc03e188f/granian-2.7.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:bec493af655645e58e6d89c7e37eb7751e9bf827506286e765d79a5c4ff10a3f", size = 6847644, upload-time = "2026-02-08T20:01:37.282Z" }, + { url = "https://files.pythonhosted.org/packages/21/4a/619d699acd3cd37de048ab606a85021f5edf42bd54c7f081d20dccd48041/granian-2.7.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:dc51944736d5683b255b7cd33581daf8bc44ae1dab31240e1969eca13d1e75cf", size = 7011427, upload-time = "2026-02-08T20:01:38.858Z" }, + { url = "https://files.pythonhosted.org/packages/91/25/389eea98109e4b85e443fae384b30ff67167f27f4df6fb43d26cd151d0dc/granian-2.7.1-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:f787bbcb06ca605ff4161a04078591b2269b628165214ab913084e7fdb5ab9d8", size = 7261453, upload-time = "2026-02-08T20:01:40.355Z" }, + { url = "https://files.pythonhosted.org/packages/ac/00/75180d71994b87c0b56385c1b60c93b73b8822ed8edba2c63f72b0f836b6/granian-2.7.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:52163a3b609489bcb614e45811e2a66a6780b1459bbbc29504de13c23a115112", size = 7039030, upload-time = "2026-02-08T20:01:41.758Z" }, + { url = "https://files.pythonhosted.org/packages/d3/11/a913af3c65debb5e5d577d3cb5ac988313c05c19fca789e167375ee432df/granian-2.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b2cd2118353db7f06fee0aefdada9e109434e030ac2fdc8f691b669787680d2e", size = 4066745, upload-time = "2026-02-08T20:01:43.161Z" }, + { url = "https://files.pythonhosted.org/packages/41/c1/cc5c0abc5c573a8832c584f52c98f7882119fe81d52a49285800e25d993f/granian-2.7.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:a677972bb9050ec15896452f2c299b56f15e01212c1185d9373b92348fd88930", size = 6397999, upload-time = "2026-02-08T20:01:44.515Z" }, + { url = "https://files.pythonhosted.org/packages/e8/77/5248e8cf1c25f080959c0a4e4a8039107b0b2bf67a9fc8904cfe57614a24/granian-2.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ee4b404425a135274ab69513fdc1883ce954beef22113058e6e2a25d89926e68", size = 6108572, upload-time = "2026-02-08T20:01:45.919Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a0/fa0b961d7c9b1c2f046a58b85ffe1e7bc5d3a7fcc8c947bdd6fd397a312c/granian-2.7.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c4eaf3b0c1602a2ef75a8e418bb6d2867994e7ac246ea6833f7b812289d038c", size = 7101910, upload-time = "2026-02-08T20:01:47.773Z" }, + { url = "https://files.pythonhosted.org/packages/ca/70/edd388b12ebecde4edbbf4d62cd78ed6e5ae0f6b834e88de2fe06e6f948e/granian-2.7.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d593d80f568b2025a227a9b0bf664db94c9423069b27c120e288a2350507a4d8", size = 6399861, upload-time = "2026-02-08T20:01:49.594Z" }, + { url = "https://files.pythonhosted.org/packages/74/18/6e8962f1be1a578841e9c68bb8f3a416c30880003c3180a1e6b852ad1717/granian-2.7.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab94be605aaf90968fd04fb527f1b2790f6815dd0e9690586adb4a9be1f25010", size = 6951789, upload-time = "2026-02-08T20:01:51.115Z" }, + { url = "https://files.pythonhosted.org/packages/eb/47/9f07664d847653115b196f70594016de8fd7629e5aa1645d6d20f771cf14/granian-2.7.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:392f6cc3eb7a5039a815a823c3f468161b4eb179d061450c0ec843cef0eb1b54", size = 6983541, upload-time = "2026-02-08T20:01:52.693Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c6/08b9203a4f897a31810bb18344b5ecaf26eb34135916c257c14ec762eb51/granian-2.7.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff063c417ee16fadca3c534e2059a6cf47e1df2607f1c6012be4ea6486b814f5", size = 7032652, upload-time = "2026-02-08T20:01:54.336Z" }, + { url = "https://files.pythonhosted.org/packages/f7/69/c7a5c595313432a5373e6014980a77d8f028f24f31b68406af97ace94fe6/granian-2.7.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:1a59ed88c40896db96a973e49a5ba2a2f84d7569c1da8cf11c685d11bffc2ef1", size = 7254611, upload-time = "2026-02-08T20:01:55.74Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/fe283eeb7a2f525472bd6ef2b0c6b7fb95d4369902b75d8e7e252628e62e/granian-2.7.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:773ff347d4541634e8c50b82b532eefa68c0043cda100bd44712b88565a5495b", size = 7110307, upload-time = "2026-02-08T20:01:57.117Z" }, + { url = "https://files.pythonhosted.org/packages/61/ea/b6901c64cac1fc3b455acdba279d80454fe963eca314ebfaf4e2eec9933c/granian-2.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:089f8a0d6d6a215f6773aa9dfdb56ec349d28840203517e7a7933485b1a1f404", size = 4122834, upload-time = "2026-02-08T20:01:58.682Z" }, + { url = "https://files.pythonhosted.org/packages/5c/41/bd76745d2fd2e2735390037324cb2d2b2f934473d77fb27f176494f5b2f3/granian-2.7.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:e25c7dedd9325e11bda1d9692f25314791d24ae39b8206fb858f18a57087f2ee", size = 6376497, upload-time = "2026-02-08T20:02:00.117Z" }, + { url = "https://files.pythonhosted.org/packages/40/ea/bdb388e3e24308e92c370674d225e819eee6740dd440d6450860039b934a/granian-2.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:387c6032d46191deaf18819f15988e98d0f5c85eef09efb28c4c4b7b8b0dc2d2", size = 6092395, upload-time = "2026-02-08T20:02:01.75Z" }, + { url = "https://files.pythonhosted.org/packages/31/9c/438da7d5c66ed2c9df1c5946485e464fd52a420217212e0c9b5bb90f8e93/granian-2.7.1-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d1b5b47a34ab0f47f8bd447894412b4d9bdcb2011fbb9d1b8f7890c8442d233", size = 6226387, upload-time = "2026-02-08T20:02:03.185Z" }, + { url = "https://files.pythonhosted.org/packages/91/1a/f317272d59618a846a0c7ea019ab0352d947e8afdae40faea580b98600c7/granian-2.7.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b7f65cd1d46c8ee454b0f29743340bebc170c1da2af83bd759fb02d69c24c7e9", size = 7123367, upload-time = "2026-02-08T20:02:04.721Z" }, + { url = "https://files.pythonhosted.org/packages/d0/63/0c0c0005798c808082ae72b6bc3ccc1282d1b078375b060c5477aabbe407/granian-2.7.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d12b93e6467fc079b38e104154d5e5625a5e7c6a1776a59039c1e5fb57e0fe3", size = 6709311, upload-time = "2026-02-08T20:02:06.266Z" }, + { url = "https://files.pythonhosted.org/packages/e6/27/73655570644b3e727b22e3cf4239eebe90c18d1d3c868fc3d71e4d50dd46/granian-2.7.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:ff4aba223bfeca0c6bc8f64ef03d87d04aff36515b2fd91108e5c9f55e67a5ee", size = 6802243, upload-time = "2026-02-08T20:02:07.757Z" }, + { url = "https://files.pythonhosted.org/packages/23/00/2b9655d05f14bee4cd4080f3a18f0f0f4e7014158d7323a1cb0d31ed61cb/granian-2.7.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:2cc036b6f7db04ba6750aa86dff17c7930b7f295e4bfc5f35e9231d9f42e8094", size = 6978785, upload-time = "2026-02-08T20:02:09.269Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e4/deff2560260ddc9a99315ecb345c93485b0b102708838e7c42837c7a6535/granian-2.7.1-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:f2834f178ddbe25f077b28eba3b0e3e3814b17a0fc61fe44c17c270eef37ff54", size = 7303589, upload-time = "2026-02-08T20:02:10.81Z" }, + { url = "https://files.pythonhosted.org/packages/d1/52/7fefaf4f1317883e7a5f25a92bca43f914b47d4762ad8f38f48e7e85b2a8/granian-2.7.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:874d4eedc527f9c59dd192e263be8047b86759e71ac9552283d010bcea93993b", size = 6984251, upload-time = "2026-02-08T20:02:12.753Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c1/d6aa049cdbe15b9ffe7964b01cc50efc8ccc067c3a50da7bc5ced1eaf6a4/granian-2.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:d787d9bf1744c275fa60775629e910305aa6395a88a32eea25b0008652ed9fe9", size = 4051984, upload-time = "2026-02-08T20:02:14.325Z" }, + { url = "https://files.pythonhosted.org/packages/d7/72/36d03ed914f70c79583542a60cedfeb7bc2ab992ee75ca5725612c1191a4/granian-2.7.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:97ebda3ae49c181b25b603d32ace5a8d83880c9c52550d3b66a4bf09f3c1b809", size = 6411236, upload-time = "2026-02-08T20:02:16.136Z" }, + { url = "https://files.pythonhosted.org/packages/f7/79/6d734663ea31a1935ae0d835ba12883cdfe63376593918de84ddf1aa26c2/granian-2.7.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8eeb97b4cc403956cdd782da83d30eddbfb90415e850520b6627d207cf06d8db", size = 6120207, upload-time = "2026-02-08T20:02:18.502Z" }, + { url = "https://files.pythonhosted.org/packages/86/40/c6bf30ae2f9feb305b454a2a2118e40bec9dac94cc5c23a9d68f2d054f14/granian-2.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8272952e6c094cdb24e42c9123eb780e789fe28e3f49a80cfce3df1b080ae2d", size = 6926893, upload-time = "2026-02-08T20:02:20.951Z" }, + { url = "https://files.pythonhosted.org/packages/76/00/b2567a14dd68ae1fee1085d60f9ddaa6e93b155c86893804ed2303228f37/granian-2.7.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32a4414b3ac17eef25d3bc33e2ed4f85150ebed3ef40028d4192bd0a842358c0", size = 7031580, upload-time = "2026-02-08T20:02:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/91/54/4c4aff8f153c3340d0aa26afbeb3db03bc9d7d914905c47705328c2514a8/granian-2.7.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:99b24f1241d142bdcb33c5744e7503b358fbcd899c44e4f48464b2bcaca2bd0f", size = 7097067, upload-time = "2026-02-08T20:02:25.489Z" }, + { url = "https://files.pythonhosted.org/packages/47/82/c1fce66ebeb3d681d4405eee78b9159b230558f8bc99e44456541c03fe7b/granian-2.7.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:f4b715653cc9765c1aea629802c862e030482ff847f5d4c03d5f401830ff617c", size = 7336016, upload-time = "2026-02-08T20:02:26.969Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/6bd215ec3567bcc36defb7cb30a3c03f73f2f56f8a8a34148a24008f94b6/granian-2.7.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:47ef955d06c1cdff1aeb3d4d0ada415359a034295d0f162d7c0a0f98d76d4d6c", size = 7004178, upload-time = "2026-02-08T20:02:28.753Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/c2480b4b4123e22b41bf82fc49e7a3b28cd2274dfa445959a1805f9a603d/granian-2.7.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a9bb46143d77161065cfe1d662f9a758bb17c7e3a2fde178f0a5aaac3fb3a65b", size = 4081455, upload-time = "2026-02-08T20:02:30.271Z" }, +] + [[package]] name = "greenlet" version = "3.3.1" @@ -1297,8 +1376,8 @@ dependencies = [ { name = "duckdb" }, { name = "google-api-python-client" }, { name = "google-auth" }, + { name = "granian" }, { name = "httpx" }, - { name = "hypercorn" }, { name = "itsdangerous" }, { name = "jinja2" }, { name = "mistune" }, @@ -1318,8 +1397,8 @@ requires-dist = [ { name = "duckdb", specifier = ">=1.0.0" }, { name = "google-api-python-client", specifier = ">=2.100.0" }, { name = "google-auth", specifier = ">=2.23.0" }, + { name = "granian", specifier = ">=1.6.0" }, { name = "httpx", specifier = ">=0.27.0" }, - { name = "hypercorn", specifier = ">=0.17.0" }, { name = "itsdangerous", specifier = ">=2.1.0" }, { name = "jinja2", specifier = ">=3.1.0" }, { name = "mistune", specifier = ">=3.0.0" }, diff --git a/web/pyproject.toml b/web/pyproject.toml index d33fdc7..6062f02 100644 --- a/web/pyproject.toml +++ b/web/pyproject.toml @@ -10,7 +10,7 @@ dependencies = [ "python-dotenv>=1.0.0", "itsdangerous>=2.1.0", "jinja2>=3.1.0", - "hypercorn>=0.17.0", + "granian>=1.6.0", "paddle-python-sdk>=1.13.0", "mistune>=3.0.0", "resend>=2.22.0", diff --git a/web/src/padelnomics/core.py b/web/src/padelnomics/core.py index 4117f90..2853dc0 100644 --- a/web/src/padelnomics/core.py +++ b/web/src/padelnomics/core.py @@ -104,9 +104,8 @@ def setup_logging() -> None: format="%(asctime)s %(levelname)-8s %(name)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) - logging.getLogger("hypercorn").setLevel(logging.WARNING) - logging.getLogger("hypercorn.error").setLevel(logging.WARNING) - logging.getLogger("hypercorn.access").setLevel(logging.WARNING) + logging.getLogger("granian").setLevel(logging.WARNING) + logging.getLogger("granian.access").setLevel(logging.WARNING) logging.getLogger("asyncio").setLevel(logging.WARNING) logging.getLogger("aiosqlite").setLevel(logging.WARNING) From c95d66982bc1fc44a9e4ea0e3d3385fe67bc5581 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:40:23 +0100 Subject: [PATCH 64/98] fix(logging): restore hypercorn logger silencing (still used by Quart dev server) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Quart depends on Hypercorn and uses it in app.run() → run_task(). Removing the silencing caused hypercorn.error noise in dev logs. Keep both granian and hypercorn logger config. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/core.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/web/src/padelnomics/core.py b/web/src/padelnomics/core.py index 2853dc0..b7b42c9 100644 --- a/web/src/padelnomics/core.py +++ b/web/src/padelnomics/core.py @@ -106,6 +106,9 @@ def setup_logging() -> None: ) logging.getLogger("granian").setLevel(logging.WARNING) logging.getLogger("granian.access").setLevel(logging.WARNING) + logging.getLogger("hypercorn").setLevel(logging.WARNING) + logging.getLogger("hypercorn.error").setLevel(logging.WARNING) + logging.getLogger("hypercorn.access").setLevel(logging.WARNING) logging.getLogger("asyncio").setLevel(logging.WARNING) logging.getLogger("aiosqlite").setLevel(logging.WARNING) From 77d4c02db370bacbeb1683fae656662d90e52c42 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:42:30 +0100 Subject: [PATCH 65/98] chore: run dev server with granian --reload for dev/prod parity Replaces `python -m padelnomics.app` (Quart's built-in Hypercorn-based dev runner) with granian directly. Adds granian[reload] extra which pulls in watchfiles for file-change detection. Co-Authored-By: Claude Sonnet 4.6 --- uv.lock | 96 +++++++++++++++++++++++++++++++++++++++++- web/pyproject.toml | 2 +- web/scripts/dev_run.sh | 2 +- 3 files changed, 96 insertions(+), 4 deletions(-) diff --git a/uv.lock b/uv.lock index 6775a7c..483b7ff 100644 --- a/uv.lock +++ b/uv.lock @@ -720,6 +720,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/14/c2480b4b4123e22b41bf82fc49e7a3b28cd2274dfa445959a1805f9a603d/granian-2.7.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a9bb46143d77161065cfe1d662f9a758bb17c7e3a2fde178f0a5aaac3fb3a65b", size = 4081455, upload-time = "2026-02-08T20:02:30.271Z" }, ] +[package.optional-dependencies] +reload = [ + { name = "watchfiles" }, +] + [[package]] name = "greenlet" version = "3.3.1" @@ -1376,7 +1381,7 @@ dependencies = [ { name = "duckdb" }, { name = "google-api-python-client" }, { name = "google-auth" }, - { name = "granian" }, + { name = "granian", extra = ["reload"] }, { name = "httpx" }, { name = "itsdangerous" }, { name = "jinja2" }, @@ -1397,7 +1402,7 @@ requires-dist = [ { name = "duckdb", specifier = ">=1.0.0" }, { name = "google-api-python-client", specifier = ">=2.100.0" }, { name = "google-auth", specifier = ">=2.23.0" }, - { name = "granian", specifier = ">=1.6.0" }, + { name = "granian", extras = ["reload"], specifier = ">=1.6.0" }, { name = "httpx", specifier = ">=0.27.0" }, { name = "itsdangerous", specifier = ">=2.1.0" }, { name = "jinja2", specifier = ">=3.1.0" }, @@ -2722,6 +2727,93 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/d9/e81c8de18b3edd22e1884ed6b8cfc2ce260addb110fd519781ea54274e38/wassima-2.0.5-py3-none-any.whl", hash = "sha256:e60b567b26b87c83ff310a191d9c584113f13c0bcea0564f92e7630b17da319b", size = 138778, upload-time = "2026-02-07T16:52:32.844Z" }, ] +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + [[package]] name = "wcwidth" version = "0.6.0" diff --git a/web/pyproject.toml b/web/pyproject.toml index 6062f02..4dc2e30 100644 --- a/web/pyproject.toml +++ b/web/pyproject.toml @@ -10,7 +10,7 @@ dependencies = [ "python-dotenv>=1.0.0", "itsdangerous>=2.1.0", "jinja2>=3.1.0", - "granian>=1.6.0", + "granian[reload]>=1.6.0", "paddle-python-sdk>=1.13.0", "mistune>=3.0.0", "resend>=2.22.0", diff --git a/web/scripts/dev_run.sh b/web/scripts/dev_run.sh index 4820af0..1afdaab 100755 --- a/web/scripts/dev_run.sh +++ b/web/scripts/dev_run.sh @@ -165,7 +165,7 @@ echo "" echo "Press Ctrl-C to stop all processes." echo "" -run_with_label "$COLOR_APP" "app " uv run python -u -m padelnomics.app +run_with_label "$COLOR_APP" "app " uv run granian --interface asgi --host 127.0.0.1 --port 5000 --reload padelnomics.app:app run_with_label "$COLOR_WORKER" "worker" uv run python -u -m padelnomics.worker run_with_label "$COLOR_CSS" "css " make css-watch From 19dd9843af6a34c57c7f1151b0822c5ea20b8a4b Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 21:49:14 +0100 Subject: [PATCH 66/98] fix(dev): scope granian --reload-paths to web/src to stop DB WAL triggering reloads Co-Authored-By: Claude Sonnet 4.6 --- web/scripts/dev_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/scripts/dev_run.sh b/web/scripts/dev_run.sh index 1afdaab..785db87 100755 --- a/web/scripts/dev_run.sh +++ b/web/scripts/dev_run.sh @@ -165,7 +165,7 @@ echo "" echo "Press Ctrl-C to stop all processes." echo "" -run_with_label "$COLOR_APP" "app " uv run granian --interface asgi --host 127.0.0.1 --port 5000 --reload padelnomics.app:app +run_with_label "$COLOR_APP" "app " uv run granian --interface asgi --host 127.0.0.1 --port 5000 --reload --reload-paths web/src padelnomics.app:app run_with_label "$COLOR_WORKER" "worker" uv run python -u -m padelnomics.worker run_with_label "$COLOR_CSS" "css " make css-watch From 79d1b0e672be10d6a3a09f876d1c66642e712b12 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 22:15:11 +0100 Subject: [PATCH 67/98] feat(extract): tiered proxy with circuit breaker + proxy provider research MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - playtomic_tenants.py: simplify proxy cycler call (cycler() instead of cycler["next_proxy"]()) — matches refactored proxy API - docs/proxy-provider-inventory.md: proxy provider comparison table for Playtomic scraping (~14k req/day, residential IPs, pay-per-GB) - .env.*.sops: updated encrypted secrets (re-encrypted) Co-Authored-By: Claude Sonnet 4.6 --- .env.dev.sops | 104 +-- .env.prod.sops | 120 +-- docs/proxy-provider-inventory.md | 735 ++++++++++++++++++ .../padelnomics_extract/playtomic_tenants.py | 2 +- 4 files changed, 849 insertions(+), 112 deletions(-) create mode 100644 docs/proxy-provider-inventory.md diff --git a/.env.dev.sops b/.env.dev.sops index 3f9210e..893ba07 100644 --- a/.env.dev.sops +++ b/.env.dev.sops @@ -1,77 +1,77 @@ -#ENC[AES256_GCM,data:OODjUg==,iv:E1PqA4jzCrltGb9T8tiB2wrkLmzefekOVJt3jXze6bI=,tag:t+nmaZ+gsR0Qr3XeJ/szMg==,type:comment] -APP_NAME=ENC[AES256_GCM,data:MIW1LUcXGSRAXQo=,iv:WlHAjnFaoo5HgsyqGnvbiuMfvZIYXeA+ssw43fsP3TM=,tag:BOGuyhHj5+T1Tg7TBbCPHQ==,type:str] -SECRET_KEY=ENC[AES256_GCM,data:F43Bn1FzhYyzdGINTQA90Uw2aZWJ7REhVnM5Tc6KFS8=,iv:ZAbWVSCrVDKqVzPkNfAvH6p8iRISZSTD4U68JVZwL7Q=,tag:B5tmroCp3EJ+QDnGXgbXOg==,type:str] -BASE_URL=ENC[AES256_GCM,data:wmh0BoYwd3GutNhk7uQZKO4MAXin,iv:uZ4FivDic5KL7fQvrH1rhN4bTmXWqnHfzMJq7/4o+5w=,tag:NFfkEGkS1vxjODo/zB5Mcg==,type:str] -DEBUG=ENC[AES256_GCM,data:aqfTbw==,iv:g7jWtA4OD+b7QW4Sor7dX/6pJxif7d2SiU/An21QSfY=,tag:sjSyQQNtk0rLaOhYzNbAKw==,type:str] -#ENC[AES256_GCM,data:dSj2wIiLuhNq9FvDEhTL3F6LBJ4ITNsaKYO1YRmLZLDS3+C0jk7Hx5kPjcor4T9kst5tOE1KMH7HKn+0CHeWf+pdidAWikd1xA==,iv:hbnREdnOSgivoTI9/GwUOpRg1yROIgjbTpiR1R9u3AI=,tag:am/IFpc8a8Du0Wx8X7u9nA==,type:comment] -ADMIN_EMAILS=ENC[AES256_GCM,data:yS0kOYrM5VRhAAkxqg==,iv:eCqHA4kcgwt9T03Umq8MYiLYeB+Zh3pL9R7adcSZkYw=,tag:k3O0EbevHeWgG1JQAh30hw==,type:str] -#ENC[AES256_GCM,data:FDydGDKV+yuB,iv:3Q2JHDLqOWS/R+91Kx+5l7IST9DRngBd7Nd2Oau0kFw=,tag:JmavycCv5EeW7nDCum9SsA==,type:comment] -DATABASE_PATH=ENC[AES256_GCM,data:2INhi9u4WHoC6eA=,iv:JvkmUtrd5oxw0ZGQJAhI4kj1MtPqxRi5keGIzgCHcPQ=,tag:T7Yb1QKkVR3U7Ww5puKinA==,type:str] -#ENC[AES256_GCM,data:c3ikDZY=,iv:ssaKWl8+ddTa48pst3cz2n5ywMqMdrUN69jlLGRbgJU=,tag:7Dk53YiaBzYCyrrddUHb1A==,type:comment] -MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:gCw=,iv:u+tNE3Jc0GD1JJB7gX4MFelHb0JhqOiFAjP5cvsS1eU=,tag:hQJPei0T/Hz8eUnUmGxnyQ==,type:str] -SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:NT0=,iv:wzqXr3Stux9WrJBF89iYtmZMuoog2jr5ScTd+W3T9E4=,tag:Ei/FtIMAmBChc1Q6GS01SA==,type:str] -#ENC[AES256_GCM,data:0gHTU0jfkx3K/aeVQ4ab,iv:MhpRgozzMhtlDw05jDdPvNB9linCCep8WWPXFCxESI0=,tag:yxQncJ6uBHPGM2PsgsqA2w==,type:comment] -#ENC[AES256_GCM,data:N1R78wzjduwR6vj2/2io04RHvDz4bdMZQah2eG6iuQfe76dk4yVA2is6vIAO0wGHQbKOFj2XCFnex9h6LK7yfVB2TNNxndTLDOemlg==,iv:ifFHqlb2oY4flnWX3sDYKEFP3cufi5pfuF1H0zjZGRQ=,tag:ZJC6zkPE5/29GCpRK6G7QA==,type:comment] +#ENC[AES256_GCM,data:ErezNQ==,iv:GtNs5EgH0f1gI5cDTApW6Vvw2x90NI2rAGXBJpMmLgQ=,tag:EmNenJvfpuNP3HeYvD8UsA==,type:comment] +APP_NAME=ENC[AES256_GCM,data:+vZp0sqaYRWXFlI=,iv:kvBQt4XxEHI7E1v//c9Z/rqjvaJXTgx5cvHzHqzaI3c=,tag:EaJb6qZXvzzJEapM96j1+g==,type:str] +SECRET_KEY=ENC[AES256_GCM,data:UCK7JrrYd5oHSHdT4MyB+qg/L/Si2LrZxOEEkjJUV/s=,iv:tyZjyOe+7PBPa83jKMfSRrjCuKsqSk2StJLBteE91qc=,tag:Lmz32nb9Xv4RcY7akFjXLw==,type:str] +BASE_URL=ENC[AES256_GCM,data:T8gU8YF64yAH7umlYGh63Ju4nQOm,iv:woziZtqBwtv57Mohdc7QJwt5r7BGYkNq9yo6LxUus4Y=,tag:RVDFUzblyENvi6Uv7Agp5g==,type:str] +DEBUG=ENC[AES256_GCM,data:1Hv6qw==,iv:3Y8a1DCCaNgGsuaBVmUrjEqW1nUG+I8Dqi4XXgIWcNc=,tag:5Hw12JrPTc5T4xeV3CaEeg==,type:str] +#ENC[AES256_GCM,data:zATUJ7DT3TrnC+wYuxoo5H4Cnzvx+qtKBubK5JdlZIku71vaESrQ40LtzRf7bzE+nBWrlbLzoZA1hThR1xQUAF3YQFp6tMnECg==,iv:4d/TqqrUf/SMMfP65cRlHePm1E7uUiElXGAO1dVpQvs=,tag:oQVHdf+8uAPYo7qEBoAuvQ==,type:comment] +ADMIN_EMAILS=ENC[AES256_GCM,data:1fSFoO+nZ4PfdnoQfA==,iv:dyBsQa38WLH7YhhY03l+i4iaNmXXi/YauyT0CO/xtaY=,tag:otWDrofdNcFigotMcv96tg==,type:str] +#ENC[AES256_GCM,data:d8rqleReUHUt,iv:5f4bzvxWzty2PIkrAGb6iw+Nb2evkfPFFoH1F7kdFaY=,tag:GpqLD6QJcGGyEyUWbcPsQg==,type:comment] +DATABASE_PATH=ENC[AES256_GCM,data:m6BUOfuhyTcqZPI=,iv:t31+AEMO+Pa7jKjUPjVhe3DUe3yhY02xOF81tn7+Zkw=,tag:F8N72pVRqfJda6p5exHvjg==,type:str] +#ENC[AES256_GCM,data:zqiXDW4=,iv:wT9f8UaixLuj54yqtDX+SFRDCqhtCpuGeVWj6kXL9mc=,tag:eAdQVPZTURDT81ty9LjptQ==,type:comment] +MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:rzo=,iv:0zOl1jc3LwGxqF+UVHVAqBp1Sj7asyxc0RuG6OhTl5c=,tag:MhP/tAt2nzGyaY4haSNUwg==,type:str] +SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:CKI=,iv:RpK3WEPqZOQ9UgXNiH87DyD4mTRfJLUVt6Rka+rITQI=,tag:tXpGVO88TbovPWcGnogexA==,type:str] +#ENC[AES256_GCM,data:4F7//y4HtysenIogVqS6,iv:dzJEBKbprd9Sc5uKtpj7+eYxMhWHuqeRnfmJt3l61eM=,tag:SDOu+zmhdRwZVtV8Tjy0Lw==,type:comment] +#ENC[AES256_GCM,data:7jnSYJ2+MHYKp6H904+2BVSHBwhVqeJsLubNgud5vzC+pSG8/Aaq8l5Jne+73QbDYUJPDIW83yc4Elgd72SDh4zLeq7bSoY59mNM3Q==,iv:jDHERGIEAfCWAD6TrYrV6wM8dmlx2xxoItMA1PSv6mM=,tag:1jQjhbkr7dnTAfLGPDPPzg==,type:comment] # -#ENC[AES256_GCM,data:tuw7/hEPqtY59Y+L3Nq/Y6op26+U05GzZjQpGpwskwXJk5fauks66lPr+P9vlgEJXPOzuF1ZmB+M9td780mXavW49PKVh5NnjP/GbQ8t0Gc=,iv:wj6MCF/ujeaIoYC+pcgeW+slt1CBYPK5Lj4+LyHfehg=,tag:9EJJ46Exxd2On2SdCba+vQ==,type:comment] -#ENC[AES256_GCM,data:KApVqBUmJYwUpHp6BPBkzzVR+BPe8h/u79lODjPLvQoCT9XIfJ3ulZngRUUIDCJr6+yM4y98qxQJf7bhs4z7CK6oB1L/,iv:FAuFeIOD/Rx81L97cAqYIPZ3bhyrqOReQ5xeOjyN/m8=,tag:Z69JIWO4e/Q8+BQ5ThslVg==,type:comment] -#ENC[AES256_GCM,data:80vv1OoLoWwS+J9cbZ1Nt+UeF6Fc4D8LUHLX5BiEmBQAXh8ti3EZZnwi2QZb0wN+Ma1zwsM=,iv:6xnMzedX62XPU3GGpoTie8RZDFlzvrpFMc4u+fCbKYw=,tag:V7HtIqw1+ltW4dbvqobjQA==,type:comment] -#ENC[AES256_GCM,data:irTPd03a7pDOi2WOsL43GB3c/luAhlm0u6b3Wj/oLKnlzmmUV++aoyDum32wGLI4qH9Sy5XJS44=,iv:HH3uPdh0ZBdvX2bhH5w22TCsn67HaJa+/f5bk8NJNvA=,tag:f4xcS4URJS1EAipjcpFn2A==,type:comment] -#ENC[AES256_GCM,data:uW8E3Yhcepw3FJ3En5N8CLpN70a1ovXZJyS/gSqaWsMtScbTEqlMZ39R0S8hiS3j1y7ST7GaNXburXx0lv8=,iv:Xk8GhHPk7zVnMi4PbazefyxumHSv/XwpO/lk2SCxYEc=,tag:r5IpNFQ0wxQ05+UfOaenig==,type:comment] -#ENC[AES256_GCM,data:Ep/iNZ9JUQTFhk63ukNzs4YltDYEZ1b4jvi3yBpT2yfnSuVvrz1xSc+oU5WbHI8SeGbpaTgn5eA=,iv:vry5r45x5xNcIr1oyCbVAkFxFq2vpvvOEzqlvWI/bQc=,tag:MjTSas4nqwHg7J2ZfxlkfQ==,type:comment] -#ENC[AES256_GCM,data:OwkE2btYFBq2akgxnlHA3lVUJ4eXI4YVjIxlbK4lbrn6cd9U6xz7DhrDuFbOxMo1VtbyxBQ6dCBM5TODPHMFRBCIyuuXQi17,iv:emVmqGfwrpkF5HoQ0OD0UhiN076RezDEgsmBm2e5FZg=,tag:Hj+KHO9yKGqv2W5XkkHCZg==,type:comment] +#ENC[AES256_GCM,data:P4C+IQhLXzs9SBKhIWgdKvXpMn/dnE0qeaMaQZsg3BYxNVFQYvJL46miAwvIV+eRNtdWZMXdYve6iOJ3fsx0H+hC2xl7HAOLZZrSZP/GUDs=,iv:9Em6JDy8MKSbX51g/5S7R2BdeRwtXh/8RkgNdIwNudw=,tag:hDJNBhfudzrDHWAQ8VwZAA==,type:comment] +#ENC[AES256_GCM,data:FYpzYV8avmJuvjRf07b9tDCAgwfE0AKBcG/Vd8NzKaq1W75mzE17N5MRY7xvwIrHEPpyvgm9lZfZAiWVhS6Nw2V8E/b/,iv:o680eJd30GT03Xnjcsm+P1bH+d6XKYCnpSkt1r9hRXk=,tag:VH1mGp19gxU7df7fLs3RTg==,type:comment] +#ENC[AES256_GCM,data:NSK0Ph74ABD9TD+TmJ+J1xKo59DDVGWno+I96XSsaBX9rKGLDkEI1SKyqsrwW9ReuZYy/5E=,iv:Y8QSzOMMiLVSuUUVCPbMaACzsHu1Cd8j8rqoAN99INg=,tag:Mixnrsfjq/Q+TOkGbWx1fw==,type:comment] +#ENC[AES256_GCM,data:ayv+LfzbDxOZF39CMYGWGR1r4LS5vmSO3p27Jm/p+9W+z73FmsYJeBY/k1sRYz5+RRFupJW/L18=,iv:DwtFFLL2zqUNxvEFg5tQEz0eq5quflqcDOCR6Y3IhVM=,tag:EpMqnrZUwkv4M+gw8YQu8w==,type:comment] +#ENC[AES256_GCM,data:9/K3pVWOl0cmXDg9VUcJjoX2RrFuLnqYGxK+hM9FXdUC4qjB9fZtaWNlYNIVqzNtjihHtpB2ex9Q5+9JCQI=,iv:iRt4qxShtLbe4dYBE2jY1KgeCEM4S3IrnqEpatkpUdk=,tag:DII9n35Qnx2n7qNlYvQuPg==,type:comment] +#ENC[AES256_GCM,data:pAL6rk14PeuF4kQs/dNret8ZtWgRsDfx5YBcEZtUPJSk7qHaDFFpZmG0Ok7mVRL1OIyoQXVOlIs=,iv:JgJ408wpCkET9xDv7yBOP+iN/bOg7huyc4xFcw6BWo8=,tag:1Esx3pxQBX6AHU3t7B58ug==,type:comment] +#ENC[AES256_GCM,data:VnpXVaOfq//CGtKnUtq6kLqm+qXPLQS8y30HaZ8YNbOMZEwUV21xvSC8B347148qHt1rHhGM2QFp/kKS8IxoLngMq9QWj3No,iv:v0eQpUn51pdGDSpg2A6WGX0plFYil0K4okICi0OJwGQ=,tag:BJnMZ3Vmx5Qw7MMLsFlsZg==,type:comment] # -#ENC[AES256_GCM,data:1rm7G2bjcRWiQBcHI3v6/iVxnASJKchxrEot8YznFxoaGcEQNyQQXapZUFRR02pT9FT+vy6mCWxDyqRdyrtdlGjIL+N5R0ukNcI=,iv:dAFaoyjPikp3gOCIph6UWVrAR3ascxMKr+weOCAqO0Y=,tag:a8aDQifsAp/6+uemGMwryg==,type:comment] +#ENC[AES256_GCM,data:k3uooGSCEetsRz85KBWdAo1k+DZMWbVc3V/PumOEhdfxHp2I0vRApdIfqimF7cJmW+uE5/aSK1zHMqc4EUgq8A5N3gO2LVmMkhA=,iv:0d1oROXKygJ3xnwQ+TZcjK02PpCEA3ydL6reRdAp6ZI=,tag:C8rWINR7+41JHxqmFGEyLQ==,type:comment] RESEND_API_KEY= -EMAIL_FROM=ENC[AES256_GCM,data:aKRFxdRPnDMs5ch7uYx2l2D5kwg=,iv:cxmNdrdXZnRjI0oDXUrz028mI7KnSUXK/pUz71NkUUA=,tag:UjKqpg6oq1t2WIdaXGHzaA==,type:str] -LEADS_EMAIL=ENC[AES256_GCM,data:BJ9r1b4E2Ck2b162f6K2P6z2rXs=,iv:NmrlbVJU7pfBf7pXc4No+69AyvqgF0j7vZEkSvYV6SE=,tag:fI3oyLbePaqi9TqkiJXZtA==,type:str] +EMAIL_FROM=ENC[AES256_GCM,data:k4dZes/5yKevE9ywxItbe5s2fU0=,iv:NmGOq9a6/CfcEKzaGkz2knYuz8Eeluaf600kzYImYRg=,tag:oh8dvUP9MtuJFx75O5YOTw==,type:str] +LEADS_EMAIL=ENC[AES256_GCM,data:hlI9HMFloMmLEvm9deqz/eAR6ak=,iv:+ACxp0Q0uGB75BymlWP6to0f+njE5ybUTOIkxrNIn/E=,tag:XizFAGcWmGhbMMARt5PLxw==,type:str] RESEND_WEBHOOK_SECRET= -#ENC[AES256_GCM,data:6T9lVhn+1mcyCxBYy/dsPaGp0zy8+XQRhRCFKxlos4QOIiLBjrYuI11v2oSO80Nn6AfGNms40ewi4RpHtt7Afawb3A==,iv:f9gfD7cM3WJGdvtFQ3Svi8Cui2yadmMXfuF6IAIn/LI=,tag:3jt5lFeI0P827LqQWUGzOA==,type:comment] -#ENC[AES256_GCM,data:ja0Rgj3IVihRU618EIroO3bJg9sWFOd3Ua88HLP9yrzEZ7Ty8Havd+2vroD/TGeidqjiFmpMlXH2R3v1jLrviRbkWqBlbBO4se4G,iv:IfuAx8HcJByEcLaqegqZZZVNfO9H8LnuPzIcXHYRBDY=,tag:FumQcnMmcKHI81FjYrT8Mw==,type:comment] -#ENC[AES256_GCM,data:eHDEBuHGW7rKMPW1NM9b47rBS9BMmtmrwICbijYIVdogMvqJCMEk8zfT/lc/bnXFiamFYfJhhHDNEEOBg69ZdZ7M+mWPCRps6cXP6A==,iv:7SDIHytbnp/v6zXG0j4PbkIIzjeDVqp4BKthmTIqF1M=,tag:+fPDhe9NIe2Qmqp91KuRjw==,type:comment] -#ENC[AES256_GCM,data:VrBNRBK97VxVTcwPZItM14tr5IbWQ2jGTLh1Hca4TPHGWPo75fxUa8MFWSCUcwXIKjMm,iv:FRa+BpQFtyx5BuNt609duHng+QgMbQavvOzDpPb0Ta8=,tag:+OiUXm7Vh+xhdfQ33KEhSQ==,type:comment] +#ENC[AES256_GCM,data:PyHRX/kgXwa24gyol3okcZ0KP29OS0T39/6bx7mbFQ7wfsnIDaUWja0/3F5tTDycvsy465M4Fe6onAQpKiyzf9CuNw==,iv:ZE3vSHGW+rD/dy2olWvTYBvQI/1SZImAAPW66HOns+I=,tag:r+LHnyBSX4PpH7OF21rPyA==,type:comment] +#ENC[AES256_GCM,data:J0vWvAfeSQlhthf5eWm8USRUAfkVfgIqmAnt3GsC8r692x8HLAX3NbZ9FN7pz5SgiofS/pNjqb/no0TINyUjsQL10jwRXPtPDzr3,iv:i3SYbUl2Xsi6Ua8Hgg9u2f4y9U9n3QrclRmiripgQ7c=,tag:D2cXlCE7wkHDUnPf5hNdgQ==,type:comment] +#ENC[AES256_GCM,data:sUC7z1bskxOjA8d9lWwxNfxelL22yYNuha/qbIUq9tT9XbiuDczKXNc2sNmP6QJdscSuXcX0dizk8qfmsAYwOm4IBc20JUmeYC6oPw==,iv:X4ghkHGgslkYSup814U86/mOiWljJBd7dc/4ZCdoyTo=,tag:z+cr+vaI3dENwJJivpnViw==,type:comment] +#ENC[AES256_GCM,data:bmLZN2890ZS2BMdXfFbPiou/RErEwEc0IHLhO2mVZyBljHxauIU36sIUdor0xIHsvNUH,iv:i4GIqVbNJ/NdX39AbzdB3JGs9ExvNxilguuvbzzkCVU=,tag:0D9vRXuno8P+LEWWg6S6CA==,type:comment] PADDLE_API_KEY= PADDLE_CLIENT_TOKEN= PADDLE_WEBHOOK_SECRET= PADDLE_NOTIFICATION_SETTING_ID= -PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:Xm4mm3DBjg==,iv:6k4ZSP7GQF4LwVG0OAMBdInAfaf3hhJLjmIztNfdCWg=,tag:/LJeoFbovsvzP7GBEiLsTA==,type:str] -#ENC[AES256_GCM,data:1c0sgo2l+OssNz8Rgm8/DI5ormrHXNHKDTKx6bWmv/2CchefqxsRq1uS7uR5c86eTxfp023LGhy947i7,iv:PAEdaE/pgfy3jxuod1PJ6VcyIYOKmlKkPmudSs5xbxw=,tag:KYnQmbim+fWsnW6nlEnhGA==,type:comment] -UMAMI_API_URL=ENC[AES256_GCM,data:yj4OYkvZIDrgjgkdM3a8xyzvoGJOk75wUhAgNQ==,iv:JmQMdU9XZ5ABfvk0w3XP/WkdcR1KrgXTPVI39+drhO8=,tag:jW1MUL5Nv+z8KP9FjmYFcg==,type:str] +PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:QUmkNRMhtA==,iv:ynAcdJvkBhDC01fhXiPNzVF0rJ/06kbPfn1fJDyuwfE=,tag:JBaSLOK1pgrSd1RnouarlQ==,type:str] +#ENC[AES256_GCM,data:HdyY6Znpwyz45cIncl5UQ0q/COW9nTpDJtRpFIh/axiHSoJVb8xZMPMGZs+ZBlsdY2Fk6DLbk/QgQKou,iv:0dF92m4g6xOFl2tyI/XzN6iZ7Akjw0GmQoxtfi2UHAg=,tag:1FRSwRfSMjWHLdU+Z8nq8w==,type:comment] +UMAMI_API_URL=ENC[AES256_GCM,data:CpNizZHSgMxaXoI58ZqaHf8anQSPnlaScHqtYA==,iv:xcIXxNjo8/kPdB5RAlyHTvHfo51cEgxlKD0bum7JMOU=,tag:nFOLENvS9QnneD9X6IyE4Q==,type:str] UMAMI_API_TOKEN= -#ENC[AES256_GCM,data:5nypMO9DFup4c3p0xXM=,iv:GdbOATtVOmFZKvDfv4gdNDDdq4WmB+/yk/D/xgC7Uos=,tag:TWfgxyuzlzvzxJRMF0L0cA==,type:comment] -RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:o/R0,iv:TGPynY3rpH5fL8zQvI9EAWg/LFv7earnaCbGhkkN2FE=,tag:lpnRZGaONv1TZdwJg17AIg==,type:str] -RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:2dU=,iv:6wSl6GAWAVxfVC/LB0uqhB+8gtHhkrFybASXdQseuVk=,tag:gVjomO7E837h/plD/8948Q==,type:str] -#ENC[AES256_GCM,data:7CoH3Rv284IHofVG9bruk5NYF9S4oeGqupZFQ38OpfCde1p9ks7WGicFmfrMQDNTLG1YoR3TD/u/XQ+19SoP+XTcSlhoYwo2mx19ETWnww4=,iv:BvgKjGm1YB97I8EQG74uaoDk1QraWxTVtZoiNGkdx6U=,tag:vbS882eevc0rzOA9pGtzng==,type:comment] +#ENC[AES256_GCM,data:aQSM5Lzgkz9NUUIugBA=,iv:2XKv8zYRiQJOObbPMZPF0KYjTKPfNwm3UDFuwhWhD3U=,tag:53tLhXhELygfPr4fpT+75A==,type:comment] +RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:rvuL,iv:yj7Jz/srP/mM5sq9ZVprXJonRz8yIYFObMEb9BH/gts=,tag:UPTPgf7o7XD6VRX8SM6Z3A==,type:str] +RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:Ko0=,iv:HqyLf2FGYGvs39pHNAHVylRDmwGD7ZE5tzoVrJH5lWU=,tag:a6DUeRYQQgzo/onCCglk4w==,type:str] +#ENC[AES256_GCM,data:VjeIPAbAtb9UChZpMNB7JgylhPjJWf5TN2yirhXbvy0GwQz1ggfHOwMCfKT3gdQ0sS6538IVVrM7g7+t2LQU7UtwwXA/m/g7t7OOsMMLdus=,iv:h465KTCd4djaAYXNwTIoEYb0Jv+n++ggH1LSg9lGWsA=,tag:5kbWbQ/MKQnRD4dDaanVOw==,type:comment] LITESTREAM_R2_BUCKET= LITESTREAM_R2_ACCESS_KEY_ID= LITESTREAM_R2_SECRET_ACCESS_KEY= LITESTREAM_R2_ENDPOINT= -#ENC[AES256_GCM,data:1BAAp6TJWo4w47vsPtE=,iv:mMZHNSUPmFYK+jLv7DJ4QqZ+wu4mm+QJJxNKxVSiXaM=,tag:rqg9uSb26e3YmL2KIGhsHg==,type:comment] -DUCKDB_PATH=ENC[AES256_GCM,data:QupAI4arYuggsPTVJ2E9BGt3Yhco,iv:nuXFZUleLIIVf5bXXKuQ174psZZjR8AgwNo677A1Kx0=,tag:+9bn+OTtBCmxZqPt0cYCzg==,type:str] -SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:okf9kDEFoPYreiBk1rEoB/6dAFUr,iv:iE+AvfUPX2jxKKBQG9iVMu4LkDcNTxLB9sHY+bmLrso=,tag:GeVRiwbScPrUSsZ1WtfN1g==,type:str] -LANDING_DIR=ENC[AES256_GCM,data:cMnL22EkJGT+jAgM,iv:nItexm3BrtgUVU7TBjHKxBIMuXnuEoAvZcEFFGTmhSI=,tag:E+uXdhTGjKRpjCtRXgcUCA==,type:str] -#ENC[AES256_GCM,data:Cwg5GJ0vOj3LfvtUHVoePytBmvCqJZbqJ5AWhdHcbVFpFVVDYo6GQARMlaLvlEgTe+4mxiR1BgDn6jTokWvynBxgVvQbNLFUINY=,iv:p0fjB0/TQRwdtSxTqFZPtAz9CFRvNjYwxqFLDw9wiko=,tag:EyUfEr0z4PtwvWSo9LbLkA==,type:comment] -REPO_DIR=ENC[AES256_GCM,data:/A==,iv:gTBC1pW2sn/8ZwU2UWErJSecIGmnl5voru0T/klGHuw=,tag:NlFer7SSK9KyAFhrzwv/DQ==,type:str] -WORKFLOWS_PATH=ENC[AES256_GCM,data:lu7JrtxM1TGqY4EErXD13hHMxr4XJg51h2uzXAAFxQ==,iv:j2azHovy7Cu6CREn+cw5hNZkknA5m++lukJskWi7OXU=,tag:d+FSm+hx06SfpD9o7pwr8Q==,type:str] +#ENC[AES256_GCM,data:m6v0sC7LAUHYcgslJ6U=,iv:M8WIwU9a+1DHgzHzUIbufU7f1M5EWCmMrND0ewn/4Zk=,tag:IvA37+vuLIhazIdyD9OKzA==,type:comment] +DUCKDB_PATH=ENC[AES256_GCM,data:f5cPVJ4hPitrUiGSADEbka7D80i9,iv:9w2lPLwCxGga0spvsvG5b8HAwb+UC3NiPMChhbPsTmI=,tag:D981doEKe/3QuJ5+8PefYA==,type:str] +SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:jMbf6irWdBtvv2Fkt03IgrHf6HEg,iv:qlNdxAhNWAjlxN47CmSLsXS+edD/aCZajV2jMoyuvSw=,tag:AA/7hMNULpQYlP3aQ66MYQ==,type:str] +LANDING_DIR=ENC[AES256_GCM,data:0uX40uNxOg+wPToC,iv:ZegMiav+MbEQUhsxY55MH0OLJZpQl6eS3mekg1fyA28=,tag:EWiXChA2hts0y9R96FviFA==,type:str] +#ENC[AES256_GCM,data:iFa0hLjQSPP8Wn0mbEFVK8coTnFnI0gqnvGNC42sTJVy+b6DHL6It0mzIgO1mImRdsS0+QFPqRx8z990WtDg2QpRaguyY6R4xe0=,iv:Qe4g7sxTRJCWjC004lO1kNKuKnw+P+eU3mwU3PQ6kG4=,tag:U4T9Fwq44UXOdcKmvTcNYQ==,type:comment] +REPO_DIR=ENC[AES256_GCM,data:qw==,iv:gZKl9jg2mLHEPzAmLFhM+i3+hkEtA1UaKip7W1/fmaA=,tag:FBJSEpk70ws5rlYT/KvcLA==,type:str] +WORKFLOWS_PATH=ENC[AES256_GCM,data:2PhIF4TwbOC1VxJf5x412tfGAYhuicOWR/ArUY0dIg==,iv:CgCfmTRqat9eGKdCb7n2eYBq31SultpMx5EOo/5brGk=,tag:d7PTWh/kS1hI7WxXaZR1bQ==,type:str] ALERT_WEBHOOK_URL= NTFY_TOKEN= -#ENC[AES256_GCM,data:cz9SOffDAaXZRjw=,iv:7D7YZAyEk5CNlTiL1+KnbPbqfRYMxdtSk40LWErVOfg=,tag:JRHq+gaWSl2h71biklMMwg==,type:comment] -PROXY_URLS= -EXTRACT_WORKERS=ENC[AES256_GCM,data:sA==,iv:cZ3Ga4VPJfTlKrKeHIXadW7kZI9RqkkriKHAxGT5mqw=,tag:4kTYW7QlCwTkFWHxsl59zQ==,type:str] -RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:RV8=,iv:Jrom3R7f1NYbEL+lzypeUmifhGaayY3+uzjRuBxzoxs=,tag:pjYanguedj9RMnDGKiF7Yw==,type:str] +#ENC[AES256_GCM,data:uuuY0NpFnt4Ihe8=,iv:qhOHBKuz3RlzcFWESucb7LxN54T2QKS/hQ7p+PwQbK8=,tag:Q9AGw0s6t7nrtoFbZKbQxQ==,type:comment] +PROXY_URLS=ENC[AES256_GCM,data:tioCCvPFz4kZDY+KWz9VVpF4GFlWizq62opU0UWWRWJpbuMntNxyITxxp/VSLt+nMKBgL2RsD69qeCjWIktO2B4Ys5qEPznPTjDQx0LkSqFHjeMifFH+4yLh7JJtNMm9es3txYFj63QaT3LOnh/yXoM+VeSKl8BlW8n+H0yVf+0WWD4lkTDeY4wag4AjxL12/0M5FRsSyLG9tUYiPE/K6dL0eqprZ2Zwp367lTIcnF9mVXE9lRJXm8/wKe479JED/Vbcndp1TKZc9C6T8SvlnY9JQSxmhUOr9JMSofXug7OrBr8OEIFcjt10LfUJabsV462BuuCxsG9rX9Jjxytn83oIcTMIL2A2W2e8pLY2w7AND03q/KVzU7tGa55QVdMsNB/ZC6lwwZQpEHxpfr1g/62NliBtsOdD/XSzOmJDOghnYbnDuEp6C/E/IyFDcdOL8SnUv2uVzkyZZGIOs2eSaXT5TxKO7qWpMWwELAK+EGaB9aBT1YvD9nLP/DJ9JlGP8O76pwu+v7cR/upz3la4OvU5LC8WGzi6e7Fh1TlFVVWIzq6wjnD/Rai6L9gzhixCriNDbmZr9YLl2XeLNHZ8cCROm9nWxMBTAjoIk0efzEmcpY0JY6V/uAQbfmJ+YWo=,iv:Rm5NFF4XILpH+GU37PQwWvNJbQnK+Q9lS9LFtCSgMZc=,tag:DIcae33nOPI/6gag8VxJ8w==,type:str] +EXTRACT_WORKERS=ENC[AES256_GCM,data:Pg==,iv:TtnwhOjQuffZ1j/jqPcKWaKZKPWuuTN+pTgohUieFVA=,tag:O+cLuOWVi6SPlpOAgk+ZYg==,type:str] +RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:v8c=,iv:1OLd9YKt7usSJjfMEv/nQVBdn1Yomw4v7IK6Tg1PBTM=,tag:26UlNnOpCbzjrM/9d2QWbw==,type:str] PROXY_URLS_FALLBACK= CIRCUIT_BREAKER_THRESHOLD= -#ENC[AES256_GCM,data:cPEkskKyxJENcKo0sFupC9R9qq5bQp7sYT9TdQy06FfHPoDQuWw+JuZ5Bs6pOli4NNQBrPX9fdvRwL1TsqmfajHxrSfGgr3l,iv:wF3I5Y8UzYrrJrIeokXn0P2E51eXnqVFdo8lXpKHZGk=,tag:oTPO6+UjI3ZBY5gOf9RLsQ==,type:comment] +#ENC[AES256_GCM,data:pA90yPzfo293IovEhFv6+z/EOj2xuNPkOOx0u2Im9b5tPrrGks8hNbmcJabL6qOQe5+YwIccdSsTscJ+DocigNrIZcMY/+0h,iv:3+tiv1qAVX+Ex4Y49qTj094B8nxqhPEshTYhimlC1xE=,tag:gxcDM6Yyb6pUPO15NkqxCQ==,type:comment] GSC_SERVICE_ACCOUNT_PATH= GSC_SITE_URL= BING_WEBMASTER_API_KEY= BING_SITE_URL= -#ENC[AES256_GCM,data:CHMw0ywPH0adeYkcwSndr3JdWyw8f9AdQXQYG62SRCg=,iv:V73eodJud6Z7U8FvuTM52s/2nqNkFz9eneYpPC8OzJM=,tag:obnBC2ERQ19xj8gI8jOosQ==,type:comment] -GEONAMES_USERNAME=ENC[AES256_GCM,data:mLFHQ76lAa2Ygtc=,iv:XWAkwQ075Ph+8qSNmD36PyvPomROENvb3SebVEuVoZg=,tag:cOyW28h6h8nhTbaBZqhmZQ==,type:str] +#ENC[AES256_GCM,data:ObwV9+nxI3OjibZaWomt41y0b+bdHgcdH3r3xddgMyU=,iv:LwdgMBeoUid9GGRwmn/HG8HvpdDkwFnH6eV5Tr2w3hs=,tag:MxfkT6hQ2hptiim1VKtNaw==,type:comment] +GEONAMES_USERNAME=ENC[AES256_GCM,data:62LndEyH65LKGXI=,iv:BBm1OOxgJa2fkuCyneTgcHg3ArYfkA35NOOd2yT9MW8=,tag:MzRGfuDOnA9pbrvXerOWRA==,type:str] CENSUS_API_KEY= -sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB2L2dtZFVnU3BlODk3WXRQ\nOGxSaTBZSXpBc1lpcUkxSHI5dmlyYUo1K0NFClpycmREMTQxNnIyWnpHWVZOWmFx\nUTNRZDFzcFArMVAvckNBbXJMRVBudHcKLS0tIFRhWWQrNkdVTzlucG5nOVJXUVc2\nREFpWHdpV081TFZON1R2ZDlGNHVsWjQKSgmj4hrVEvrIizGmTpgj93ct1a3lUYXl\nBbuPUT8k+Hj5UmP+SoZqNS3kh2a1Nvr17K6e4PjfRAcEfM0UgNA7Jw==\n-----END AGE ENCRYPTED FILE-----\n +sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBrOW1qbkdDUlZobGRqbDFM\nS0NRTmdSazY5S08yOURBZ1lWeDBuNi95aTI4ClVRTWlnWXplWjVuNVE2dmRzMk1p\nRk5ZZHlpT2RVcjJqb2FpTEdOZXlKbTgKLS0tIHN6UU1tazd0cmNscVUzTmcwNE5L\nNE5uSEtiY3hwSzE1bm90b1VNZzYwK28KJlCJWkMx7dMoBtJi252GwERp6kSkuVFu\nJBwnSJ2KC5/zzjcfnLprgc/b3s4RsbLYDTkcPcuF08X9F3R8uqScdQ==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T15:38:25Z -sops_mac=ENC[AES256_GCM,data:eZOqrSiA4f6mYUaYPS6TD6vL2ON1DsLchIjzSE7bcGpZuaTspItlkBNUR6bsiPnhZ+RCv0xfFMvWallLJe4Y/8ftlQCeq2fGLJ30ZlktgrBocXw5ZYUcJz99NjAXf5gvXoq7Bn5DPnX81ju2a2D8YIhGCZ4YzzE+ae0b44MK9zg=,iv:BG/M2ugqhacqm7dEaHmH+v3dTbu7aKojRjsQBSy/8vE=,tag:1zSJgTCzBUKVsahvdriQrg==,type:str] +sops_lastmodified=2026-02-24T20:32:28Z +sops_mac=ENC[AES256_GCM,data:hQspZvYHHTsf1vFtNUvhiyRSkfynxNRUjGCjdK9GPGC30aXIzhg2si/M8wmk3VyhTMOuSFVtNgiiefvChO0N4hwqDkp2llEnkwy/uvtnCTLKNF8I90GS4ZOXNdYN5bTAS/0EF3gWOTPKl+EhJQ4lAvb3k+PY4fI7bZIkWf7sSbo=,iv:JpsFyMTpI0lfncfzPvt0snAqyNfCGCrSiQinIuDivUM=,tag:DBAIePZdHq3lfTn3AycBgQ==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 diff --git a/.env.prod.sops b/.env.prod.sops index 0c1afbc..384f87c 100644 --- a/.env.prod.sops +++ b/.env.prod.sops @@ -1,62 +1,64 @@ -#ENC[AES256_GCM,data:ucMhtQ==,iv:sGhlYo+lSjTp5nwwZTSgMqT64ut4T80hx5CVT+g82lY=,tag:RMnG4i6LpK1Y5Bw4gFPqjw==,type:comment] -APP_NAME=ENC[AES256_GCM,data:qruXhXS0DfkEbgM=,iv:tlyiDXsNaIj5vHBaO0dE7mVi2c/IBsLBpsEgibS8DyA=,tag:qnWKM1VcaO3JsIfHz7qo/w==,type:str] -SECRET_KEY=ENC[AES256_GCM,data:9bXIuM5FOXgp,iv:/df/NFPVVNpCtWSWdxfn5UkEeRmbtmJ1coMHvG6c414=,tag:wOtxv/ftApPR07ywoIvO4w==,type:str] -BASE_URL=ENC[AES256_GCM,data:C7l7voU3GttiIRbAZ3/dhHootWm7wQ==,iv:HyDI4yfZkBuRuvUUao//mu6nkfW/lyKDdXS472pJuK8=,tag:IXC5Xpe1YcEOCCTsnBuNDg==,type:str] -DEBUG=ENC[AES256_GCM,data:Aq7nwAo=,iv:px2NR94oiodO8FbCa+VMNptNR51sHavOmiQBQ667pVg=,tag:vHIwuM+sg2Lpvw+ZUMZVRw==,type:str] -#ENC[AES256_GCM,data:Bf/QsEOoDh1gfJIHwA==,iv:nSoT2Bnk9y2VxoL2opvUrSBImRstydELaSk2IO9NYPE=,tag:YAqhk8/qpW4UsMsjdS+RAw==,type:comment] -ADMIN_EMAILS=ENC[AES256_GCM,data:CDh9bd1OesYs,iv:C6Dwn2h6BYXc21VBFYplpfhwNj8TPevSRvkPCarL7eo=,tag:BibcNChGFh1b2jCF67Nn0A==,type:str] -#ENC[AES256_GCM,data:56tQlB/WNuue,iv:lh7+zHQuoAC9jDEgI6/g41H/9gj544nDOwAYcFWjpQs=,tag:XIVb5LJKxlSQz5qPidwJQw==,type:comment] -DATABASE_PATH=ENC[AES256_GCM,data:itpuoRhwDXFgEIg=,iv:2/UkQmyyzd3jaUgcxbMCmsflN9ubY+T/y3U9DIj0+3I=,tag:mLONYoV1YRldPS4fjmWo0A==,type:str] -#ENC[AES256_GCM,data:3AMoLpE=,iv:w3+deBCRpccgTepZ7/j36pkzUIWAmaM3KuJVpeN1qiA=,tag:V9cfSO0NOKy1WLVQ4OSQYA==,type:comment] -MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:QZ8=,iv:0KWT8VtITdDyBTM41wK4Xe29vbwbVXq5JI+Bk1C0zLk=,tag:y3DG33dd3YLhCC3RHfvJJw==,type:str] -SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:J0Y=,iv:CTSBzRzrn5EHO7eNQsZH55vPx1l00WfaQ2tQOQPbNxY=,tag:jvP5sAHd/MkGtzQ5tJc3jw==,type:str] -#ENC[AES256_GCM,data:4AzExSrkvf6tPpvTuvhD,iv:WZBGtFORarHjnBVZbyIzeeY4qFePnrEfUVFIGshiytI=,tag:grHkOyLwjcti7sHvzSdVSg==,type:comment] -RESEND_API_KEY=ENC[AES256_GCM,data:7s9gnzHzzYNy,iv:+2c0tLkLGwx15iLBzlCK+NO0XLLOoar90KRXqN/HwuQ=,tag:2gYY2jxVRYvt5MpBCWb3Jg==,type:str] -EMAIL_FROM=ENC[AES256_GCM,data:66oS65zkYQJ4BXh9clfF861fwsR09gVgRT0hwZegpnURwA==,iv:9fTh7YU+DzIP6Cj+unxuzJa0mnHsHcwNfvhI6zffjC0=,tag:ZLaPF45Ns0F3xNynXW42uw==,type:str] -LEADS_EMAIL=ENC[AES256_GCM,data:SkoYDLDQOCWLHGPFd6HQ1cCU/VHuU9jK9FSjNMHKxnCbXw==,iv:kt4tkw2BJF+Fz5oquje3OvMosqculK8Kxm6h30LWxj0=,tag:PSy1qSkCV+Z/z8dYGLV3BA==,type:str] -RESEND_WEBHOOK_SECRET=ENC[AES256_GCM,data:M0BNhZKQvfdT,iv:XIGForPlTQl3NO+hUp90D7sW+wZz4CmCWvPKHedL9MA=,tag:wbxHr0YBKKMOt8tGYG0dVQ==,type:str] -#ENC[AES256_GCM,data:XU7LmjtgBw==,iv:mTo7c7tQ4bCrUpRjfpkl/eTMv9qgVVwG2BwDJjDENng=,tag:pJlSVRi3sj1CgapPKDmcMw==,type:comment] -PADDLE_API_KEY=ENC[AES256_GCM,data:F/VGqmpu5Pja,iv:RIIaP3LsvnQ3pPQ4OpXlzz2N3vYnQEyrwFrukpHY3qs=,tag:MZJYLxddceccoAPgmW2tgA==,type:str] -PADDLE_CLIENT_TOKEN=ENC[AES256_GCM,data:sMUzWxIx835F,iv:mXc20kRmnofJf4Th2O4sjJoyTAlKHUbmbkd3tl19VkA=,tag:a3/N0KT3v1bo/o65sTuB+A==,type:str] -PADDLE_WEBHOOK_SECRET=ENC[AES256_GCM,data:AQ5VbLqVyoVr,iv:tqloKV/Vfo3P05QZDsc8p0bfnwcylsQgVjhF17AyE7U=,tag:7gpQEQVcxOu8YgN/518u/w==,type:str] -PADDLE_NOTIFICATION_SETTING_ID=ENC[AES256_GCM,data:ZBhOIvcGpGWY,iv:ByQcuU88DjAUEs4x++8+3E80vyiDWqbA6VR4bG5oZuU=,tag:TXnesiQ7K+baSOH6XlzBPA==,type:str] -PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:8Ayb93UcbMDSAg==,iv:DydRKcY3zHa30+L6g/2ooDZbtyMHy3yJ1ETRssqDkFs=,tag:1nqgd26CAeB6HQEGHQr16g==,type:str] -#ENC[AES256_GCM,data:c1C8AUiw,iv:vU1muGR79S+rr5dTQbzDEYZ5WZdpB2zaHEcEvPIgPYU=,tag:vr07Boz2lZtBscvlHGt10g==,type:comment] -UMAMI_API_URL=ENC[AES256_GCM,data:uZMJ+vSyXZkgJPRwY67HTzKCA5qA9vMp4lmCBw==,iv:/SuiWslYHayfB4eaJ4rOtqv/CFBt2GbtOe/83ZYvCxM=,tag:y0aJ/GVhZigxAHOklmLg9Q==,type:str] -UMAMI_API_TOKEN=ENC[AES256_GCM,data:aYqT3Xwytvrl,iv:MRei8ZxgohwsbDyP2xruEDdiZQaGA41IlSeb4oqr93A=,tag:0VnkdreND5HggCA0LS+sRg==,type:str] -#ENC[AES256_GCM,data:3QFdBeiJqY3UfkClvPs=,iv:2OUB0JZbwSwFOiNo4GO5fTF6WwSD++Gy9iy6EoH8VGM=,tag:mEszf5Zgvv4bUGE7Iqhf+g==,type:comment] -RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:gPCG,iv:AC3g0YDWdQRexRbod3m8UXNKzy/qn0C4LOy5kNCC3cc=,tag:pX9JwjdauMQkkcddYvk9Dg==,type:str] -RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:S2Y=,iv:PzZDhpC06Vh8qZ2/ImgIIp8ENUhEIHkyzTi3Ob+PWw0=,tag:5hpdFo0nJaSUoA63inq3jg==,type:str] -#ENC[AES256_GCM,data:DS4SYOlYWDPAzQe9TbFKC4hSfPgt,iv:HI0pGyQnnBIo6Ufb5QlT6539QVgLNd3Q1E0nVZT2YNw=,tag:VbcykF5tPDULflTGSBw9Bg==,type:comment] -LITESTREAM_R2_BUCKET=ENC[AES256_GCM,data:N/buXMd8Bmen,iv:AFvbKwo+oipuFOB4noaks//IQ92I7gvalUgLYJmp3h0=,tag:6nljZEU+01I2A6bYmHf3XA==,type:str] -LITESTREAM_R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:AJujAQyPg+7i,iv:x3sC5WeoUtnJGN01J7p+8W5q8QCbI7g8kA6+njdgsHA=,tag:WUAjGE8rTizRppwl4zR3Xw==,type:str] -LITESTREAM_R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:0uAA0+aYhDPS,iv:h3+1Tb7Z8JU+N5MsNOYy9oKfMZirFL1j8q0sCwol3bE=,tag:kUcuX79/CkFx8vXRq9LzaA==,type:str] -LITESTREAM_R2_ENDPOINT=ENC[AES256_GCM,data:LevPHY6+xQzE,iv:YKSlo15j2JOpaDFE3fodV2xzrFPma23LoRtbmrcAwKk=,tag:lRKpLojIj91NP3rQj0lujg==,type:str] -#ENC[AES256_GCM,data:hCAkcPu59qPCG+gdRZc=,iv:fsXe8zekkwZsVyKhD15gJUy+nrmlkE3y5GTSxnrsSBI=,tag:QzuZ3hjg0RP5ydk9LLqZtQ==,type:comment] -DUCKDB_PATH=ENC[AES256_GCM,data:7ovwg981vYaSxbPLhliEGiE+f/8V,iv:8+sT3EGhN3qPAGXehVXFibxGqebShA4+fpV8PMH89RY=,tag:Ed6JR6X661riZ19K1Rka4g==,type:str] -SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:gxDrCU7+J4hvzJGXPjxsvjZdJx7R,iv:ig+32NjlalAebrl24/V2L9cvtAGxCFLci/b9nBVsmrk=,tag:oJVU7BX7qMp++tGGYNxzqg==,type:str] -LANDING_DIR=ENC[AES256_GCM,data:Pz7vkNm/xxdO+kn9,iv:QleME3pY9gwgDmW6Vly1LVRMWhdkD04BXyzO3gFX3YM=,tag:QOpMsiXccD7xPsNJ/uuHjg==,type:str] -#ENC[AES256_GCM,data:AQyf+O/gIVE40EU=,iv:uJwMEr44+W4hjRUMPoIhofJfWvXxJDQStRLyjGDdw2w=,tag:Ry07aeuhb6d4CXhpcFkv6A==,type:comment] -REPO_DIR=ENC[AES256_GCM,data:yKOuO4XPNcPM14ZnKgLxpg==,iv:JJIkIHqDw7xessJZtwdxhp56UT9f7KtAHT1Hyi7Zd3A=,tag:PYMuH418TtcMuLBwrxDGsA==,type:str] -WORKFLOWS_PATH=ENC[AES256_GCM,data:tunk/tbrbvhMHONwRHV1A2l2da50L4n4CeJly3fz9g==,iv:WvxDglP2QRpgOl+RelF2a+JuPvjwusvet4xZs2s+tp8=,tag:CJuzy6A0vUbXd2e5+BXNEQ==,type:str] -ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:oHSBbRbq9tCueTxoEJiTmKVJoBIERiV75rsHhUts2Jo=,iv:qm7O2GJ3Rlp3LE1PdQXZPzlO0lropGEe5Wr+28F05Cs=,tag:5Ha/2D4G3NjiqkCE1NjrEw==,type:str] -NTFY_TOKEN=ENC[AES256_GCM,data:jIWqTX+iEzUvjJbSKJIZ7ZzZn0YbM7+4RU1W16D6j2U=,iv:R6be5ijLBLUNZjxHO2YkbgjjQDZdjD8gGRJedcFRI60=,tag:K2GL+O5TjdnIgC5l7pG0tg==,type:str] -SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:HQ==,iv:ZBSitlE8BIhM63+f3niABpM5kUmd07cg66icuVlGzc4=,tag:vxMBFQiuXmaKfeQtkx8oAQ==,type:str] -#ENC[AES256_GCM,data:5IbMzndnVQPyc8g=,iv:jrHRyj1c+AUJ5Jb8Omb3aliG0j0q850wIjA9tIqKbzI=,tag:n1if0sch0E1mBOp3qr2+zg==,type:comment] -PROXY_URLS=ENC[AES256_GCM,data:g8/iWdCPfTVt,iv:ZGvbYkfZk64Y2CC4vuGCj7TpRJGsSOP/psjz2pgKzow=,tag:GYvlKB1xn328bmpBN24W4g==,type:str] -EXTRACT_WORKERS=ENC[AES256_GCM,data:GQ==,iv:mUT35A3XBGaBox2PImgeWZyQx1AMQcPTnS4NJi1QnlI=,tag:dG/ZF2xlGsEJBBvfyG0hZg==,type:str] -RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:vi4=,iv:2oPRzyrFgIwOGjEH13P/7VQACA3xqDOz8+O7cDUnPwI=,tag:IFh7z+ZF+zjoADH0gFrPXg==,type:str] -#ENC[AES256_GCM,data:cG6Bl04wlNIwNTjP5TSykDI=,iv:qOrJNlej+elCvc7paLRL50opAD7zSeHYmIwAeFuH11s=,tag:P1dTLqa/DSMjIFvDTZZqhQ==,type:comment] -GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:wqtE1aoqcK6v,iv:QejEYIbxDcYDiFC9Wdes81cYx2NL3b9o0v0XVRh7G1w=,tag:d3u6Rf8ptOhM/4pwxtg+6A==,type:str] -GSC_SITE_URL=ENC[AES256_GCM,data:25zMmaOduZznbTQwO29huAo7xFq3Ow==,iv:ahq19rDzdx5PB/5YyHxZc7EPAeHya7loe3cdGm5ot+M=,tag:aVmC7IKkqYo8lAH1T1XDQA==,type:str] -BING_WEBMASTER_API_KEY=ENC[AES256_GCM,data:pKZGODRwoo4D,iv:tPsrHU1B10lUkVJza2hub0lGAQ15xFZVePZ8RPI1XWo=,tag:zdWhYljkhnKAKRObOfII2A==,type:str] -BING_SITE_URL=ENC[AES256_GCM,data:VyIcIcxveu+dWz3zY6h/JVLjPw7ysw==,iv:Wj1TU7r9Izfyexp4WdoByRP+l01ZWml1mNgp6ys28EQ=,tag:fbC8Q7yFGeZBdiiDBADLwQ==,type:str] -#ENC[AES256_GCM,data:CDAjB0UL7OjgRPMmu97Z5HHjE4o4idn4Pb3N8/y8KHc=,iv:5ELBfYuFLZblCNMjPpZ10UxQqp9CzAIZQt7iSQwdR54=,tag:NguW5ISLMS5xpXSWfpJaIA==,type:comment] -GEONAMES_USERNAME=ENC[AES256_GCM,data:eaQPCcEreqBdHcw=,iv:CKD9cnL2BOn/yJM5EQs0Y044bAN3d4I6bRyTqhIQkns=,tag:82w/yCiGfKsV8zhpINL29g==,type:str] -CENSUS_API_KEY= -sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBWdVBTa1owM0JrbFJXcmg2\nZVNzRHJuK0MzY3gwbVdGZW1oWFh4VU84a3hjCmZWQnAybjVFSituRVE2eWt3QkpI\ndzAzMEpXeld6UEFraEZLUjJGSEordGMKLS0tIGtEbGd3ak82UnJiRjFDQXJvYkVO\nL0xYVW5Ya0U0QUYzckI2MWZyLzU0OUkKK7Q+mN6ew8pdpN7Z3zMQhWm/Lgkzu8Hi\n8i74oE6TfyKFQkhaCu4jOcBfYWTytMe38ZYLI0ApS5AeIsr/ZVtWGA==\n-----END AGE ENCRYPTED FILE-----\n +#ENC[AES256_GCM,data:Wav/NA==,iv:aL+BhiA8jZhSxEbxDAYuC6t/AOJhTmTL1e4CxZKMFWA=,tag:dm1w5IiHOqWU5rGshI+Lfg==,type:comment] +APP_NAME=ENC[AES256_GCM,data:POhoUbLUGvXG9b4=,iv:+O4EWtOIvkENCYVyKR2i43MzAfcN/qYmuKLzAW5cBMs=,tag:9uVonTXvK9is4wysYgx83Q==,type:str] +SECRET_KEY=ENC[AES256_GCM,data:Sc9tp5z2+m3k,iv:T68rsh79CCuWd4UPzoDhNmxt6PcDF3wYcUC1/kzJFgU=,tag:Qk/NEsVc1ozZC+gDFbw7VA==,type:str] +BASE_URL=ENC[AES256_GCM,data:CVWA4DYxDj8ALfxeFjryj/15y8sqsA==,iv:Q90DnZyg5lnyClvoRtkANc7Pw7Kf0b/IjGCI6X0EC34=,tag:UUnnGJQWhtXMsxo/9WtoPg==,type:str] +DEBUG=ENC[AES256_GCM,data:gp1pqrY=,iv:un/NxyBzL2bsVcYfw5p6YEFDS7PQ58tsgzd/edjKxvw=,tag:+/2hHRk4ePIQWewrAwQwzQ==,type:str] +#ENC[AES256_GCM,data:IBz3HceUcQ2ousTJJw==,iv:d2VdDCd3YM35u2wLtJi/wAwIGD4gwAyL6lYEaGAjREQ=,tag:+9tArSTm/sZhOW23hT/XmQ==,type:comment] +ADMIN_EMAILS=ENC[AES256_GCM,data:IcQNF/E1F557,iv:BZS+H2aujJk/iJ8KNphdKfbNfEPTCCF/YMS+XylA4LY=,tag:jVNngz8DBmY/y0wyI9upPQ==,type:str] +#ENC[AES256_GCM,data:Mh0RRWSB8xzw,iv:NP2x+wqF64GZz9/VauC0CX+0yJHntHlS5TLMTH3jIIU=,tag:2BIIEevah5ZXmQb1MdH24g==,type:comment] +DATABASE_PATH=ENC[AES256_GCM,data:m0MbpPzsUpsNTl4=,iv:W2kV+ZFFjs2mrifZALMLtvYcFsDpEuJ9302zNMEeLQA=,tag:NJEe/EZeisS/dRZE0xsdSA==,type:str] +#ENC[AES256_GCM,data:cab2fnw=,iv:0O2xG1Kd72eBOzuPsENcEN2yglgGketYJ4x6eoBlBFg=,tag:e7UDXywCT8I8i+HoJTDnXQ==,type:comment] +MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:7bU=,iv:Vov5Eqkg17sOr0dMuKH2Yu7GEh+NrA5eQisR1S6l0x0=,tag:Dzv7DynR9bw2vGUYBLyTLw==,type:str] +SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:vUE=,iv:NOGW2PmabHJ4Jg4qqGAthVJm215oP6ZXDxotmGcnaxU=,tag:6HUzPr5vC/H4xTjYDBsZeg==,type:str] +#ENC[AES256_GCM,data:hkqR+T2LqHF0P6Rk9ct0,iv:muiERgZJgUzNCkraL6fpJW3Juq57UwPkcHYBoqHzPZo=,tag:hL79NkHIjiK2APU4u8TYgA==,type:comment] +RESEND_API_KEY=ENC[AES256_GCM,data:c22My3N0oeaD,iv:5HN/Ya+UdlEH47wF5Bt0yrm+QSWICSVnjllAJbICgbk=,tag:blThzzKKFgSCAIhFjMPiwg==,type:str] +EMAIL_FROM=ENC[AES256_GCM,data:/xYHYA1ldi1vbp+SB4OpY6yiLf81aBsKk3I81jWZUYG23w==,iv:C3bvthAepEZuceWKRPxijtxbUPI+REmByVgMDwTt56s=,tag:Ue9dOmZmcwk1LJ6C5/38Jg==,type:str] +LEADS_EMAIL=ENC[AES256_GCM,data:OBxQ0O2S0pz+37t6QfGkeyfEeP4Uj6iiFRl44JTIPW6Ptg==,iv:Tat5MDknzUQxx9aRsiOdOYfZhFSyMeiK8GPdSaBLCKo=,tag:zeDlnndu2ltPUGw3cLxSnQ==,type:str] +RESEND_WEBHOOK_SECRET=ENC[AES256_GCM,data:IkwtdB8OVNoc,iv:HtDFBAeysVW+lF40+Q41OkuKeT+IqMj5QjN3WiWJSUs=,tag:ag8SkdpBJ2kgMQgQjf9ytA==,type:str] +#ENC[AES256_GCM,data:YYm/fBT+tQ==,iv:SfHfEjAUDcx43txowHIOYW8lbESf1aA3G3kfduvwWCc=,tag:kU30Pw6OpOxkwJnDLP3rMw==,type:comment] +PADDLE_API_KEY=ENC[AES256_GCM,data:TiTjORwV6MOp,iv:Jl6P4e9M9EHhzAPUqZp2vP9Y5jk4Qk8lBf+ZhR7iUOQ=,tag:FeJ1xhBScZz5mbU80c2c0Q==,type:str] +PADDLE_CLIENT_TOKEN=ENC[AES256_GCM,data:zuOECQEwof2g,iv:2wywplC0WoFjeahjBhItLCQJuzZjPnc30eiBt+dKPIw=,tag:8e3lQt4/VRJLCIIrDtBKtQ==,type:str] +PADDLE_WEBHOOK_SECRET=ENC[AES256_GCM,data:51gkXZ/DqOZf,iv:r4hTwBeLvq2gH+ctltQUe2ErCxaQtiMjZ6+W/Bh6A3M=,tag:CY/X8ZgVgj7W7MAANz5jtw==,type:str] +PADDLE_NOTIFICATION_SETTING_ID=ENC[AES256_GCM,data:F/A9e6nLY0FE,iv:wt0hB0vIf6WR+Hxp5lTjabIBpcIyRqE7S/h8NW6RT48=,tag:CiUqrb5vH9XiiJ3Di+DEQQ==,type:str] +PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:oyN5vq6WU9+GPQ==,iv:5Vp6AawTjCaet/Vw3ElfOiQs6m78QC46+rMCfA0KCZk=,tag:TRQxuYr6gs6EMPWnI7k5mQ==,type:str] +#ENC[AES256_GCM,data:ETizP+U6,iv:bzs+KIw54HXbCdQncCJJ9NAUiMo4xdE8nAxHxZtl5HM=,tag:5Hq5QuWQxqlfC1gISfVs9w==,type:comment] +UMAMI_API_URL=ENC[AES256_GCM,data:3I67uzhmagf8RPwRD7HiXLCnCjG10em98fVsuA==,iv:qACYc8jKe/816rql5COoUL351e58qCzVi1cdGA5JfZ0=,tag:VwmpCJittpYaN5S6grCKMw==,type:str] +UMAMI_API_TOKEN=ENC[AES256_GCM,data:MXlJOXEh45OT,iv:9nsj2IXpc/EHEaPrj8RkKZC5aH+ky2x6geeWoWuPtJM=,tag:nuTxbjhUenue2krhB5ZNuQ==,type:str] +#ENC[AES256_GCM,data:I3LUWXInlHVEt5HfDpY=,iv:OVGbuQUc1wB3phmwAONAx0DDudF7EReZ/JFwjrNTHU0=,tag:S7nu/WpB5wKU7K7LLwCgAQ==,type:comment] +RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:1mFf,iv:YFckwytvkxgrLKGGOpKyZKE61t3QpkxLAbXR2Th03R0=,tag:zo3vU1BlwcE778jcf2ugBw==,type:str] +RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:dQ0=,iv:hD+YjNfATvzmVc/cpHs4z/601ezfoxRuXkllqShcio0=,tag:Zfw/fZoEmxat9WjPtx+qQw==,type:str] +#ENC[AES256_GCM,data:ZWwLbf5Y3Y0OobFHq45HGcGn6MsG,iv:Y/H/ZafbT+2YArnBe/mM1KkHFuuUDQsAf3JQgonIDUU=,tag:QW0MvN4e3mDsXGHdkaAF7Q==,type:comment] +LITESTREAM_R2_BUCKET=ENC[AES256_GCM,data:avxiVQAIebLG,iv:2CZ1MGjGScdgNgm0nVexurhJFC6UQEmi2ldVR/RxBr4=,tag:oZT+tGt+/EGQhpH8jiwHmw==,type:str] +LITESTREAM_R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:7Mn+GggjzjbN,iv:vmUfPamn8pJR/vFtnO2rhD2+ALHUiZ8ahPTGzncsyVM=,tag:v1Tp97ZsiOJocqDoXCb58g==,type:str] +LITESTREAM_R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:CN+aCZHaIANh,iv:X34JZ/OC5AalWZVAlDoN0G1Kch2z1Q3Uo18PJa1NTE8=,tag:wAqiEz6UCHcaISCOzOgtgg==,type:str] +LITESTREAM_R2_ENDPOINT=ENC[AES256_GCM,data:C8UWFesY4/M+,iv:6ti84Kr0aDxs1S7Y17uNb78j2EzjovAki6N6cAm7agY=,tag:R9Q2Ug+HXpXYm/gTF/be1g==,type:str] +#ENC[AES256_GCM,data:qq7NU6KOZT/mwuTunBs=,iv:5R65DWMFkHdoeKsBiuDQFJqLdoH+nwTsU/DbWD+4yWM=,tag:Daf2m6DEi5pAJ+NQSXFD+g==,type:comment] +DUCKDB_PATH=ENC[AES256_GCM,data:nvlaHUuTDDHl2vx3CMVCRXra6QuJ,iv:2M/eptAN/wNZgZTC0kye4dF0rWUGkjdK+hwAOuyHU1s=,tag:9rExFDqVe2e9KWCTdgnQjw==,type:str] +SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:ZdbRf/E5WVf9CfJ/mA7CM/C80RRW,iv:0dG5+oloGs8tOno4Lu8gJFVQ/ilTQVhqiZn7jis8u1E=,tag:kmphDZt1Y/D8qfbLJQCLwg==,type:str] +LANDING_DIR=ENC[AES256_GCM,data:NQQbZyOeMGCMxQOo,iv:E7+B45xAqG71yt4gc4aa3YA1KTJWDD7OcY9SieMDNd4=,tag:c2S1aDMD8zDQfmFew5sM/g==,type:str] +#ENC[AES256_GCM,data:BASJNxsOqfahZJ4=,iv:Fxwpluhe/V1xVMqO4rWwRamKEIHGfUrGMsx1XmKZv+4=,tag:2MthdZr4YMkVREbSydQC9g==,type:comment] +REPO_DIR=ENC[AES256_GCM,data:FmiIf4/s9qcF1KFriNfZIg==,iv:ur2jpVrM0rHYT55Ir3Wvb7InWr7J3Bxdt3s/D8w222w=,tag:Yk+BUcS2Ae2KzJzLQklcFA==,type:str] +WORKFLOWS_PATH=ENC[AES256_GCM,data:JAKGfDMe0wptVhisI7YC9GMBdslurVuaSG8L9aXI7A==,iv:s9P3m14zWUs3REQvypfQ3j/n/Klt4hPzwGqbGDOEOR8=,tag:wTDLzQ/KUjde7zDpG7rYiQ==,type:str] +ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:3roAiuwKnSNzSjU9d5wIPRJAW7e9l7vtxc7Lj5Sf9Uo=,iv:vC738fioTDsWrVB///9GkKV7Jc67hlvuvcfnSAzwpuQ=,tag:v29o37UGSoA6/S7VxwENaQ==,type:str] +NTFY_TOKEN=ENC[AES256_GCM,data:hQerDktsRYZaBaunhS0koSVtehopZ1Cp/p+9v/FbIZU=,iv:b204WBPs3GYfgfCWr1mi58QN+iLO2Ekl0jrRQWcKoAY=,tag:+7h3on0TNiJUqg7EREZRYw==,type:str] +SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:AQ==,iv:/gJIcFRMd2jU97paHwXKzREQBOeJ2vq9l5ux9gDewO0=,tag:cMa4+fgk4oqUqc+DfBkSSQ==,type:str] +#ENC[AES256_GCM,data:u1BQCX8MCMcj1zU=,iv:Dtk4K4eZ50w3zbZKlZxrvytsWlYFteXFv+0QBYJJ59U=,tag:8iveGLmKeOrVk2bMVG7D7w==,type:comment] +PROXY_URLS=ENC[AES256_GCM,data:KH3171qwQp4rkbP1gJXYUqicmZuS5q+7xQ3Hv/Pb2MG1Sp/RbQwpEMR7X2ZiQYO6WUaIcZW/87qAEAShFe/w+qPHACSi//f7zwxv5vHNAhBaxMeU4tPudh9XGAvfyKonNdrU1rrPWIyQPVZ4ZKOUWJsetOy6P1Xly3PLpk8xh4jfLUmKuHEbuQcJInRqhcJdjSecLcAvaDmElZCDhr6dhq2PVQdciS/0LkDLj1xkZ09UrfKbS0fQD8//SM7VX+mPUNjIVK+4kytTbccrK3w79HEgFUnWa4q5PDnzYUFzexKu8pJEQwEl5gkBoHZo8lDoNNP2qed/pdEE9aMxNOSDJsoidW/vIhSxk1GmYwLuzLXAMcLdvujjrgLrt1lvZEyuWwGyRDhsYSp6SA80U48B3UrgFK3xXzwf1YuZHpcCN3Zjb82KLgXJIUzK+LA9kDiGMdzyEtNT1q6+RdCHS7I4XXgcFPaTJL6J/avUqwU31YpvSMM0LItJeKM9I5FL5BbC/IDhE1ax26Px27Hos710+lI3eUu2RViw82d650gqSu2IFfGbGv9ECsTOyUR8no427iqyB/003DAkgxjbSCs6oJIayKHJNULWAMks2mx35gGQathVLdmysfSfLYizP9I=,iv:IZTwqbh76CRXYPVYyZ6PlSjKof+dE1XHvomV8nA6xWM=,tag:de07uKNUmsGOX/2trFqWfw==,type:str] +EXTRACT_WORKERS=ENC[AES256_GCM,data:nQ==,iv:rUlSbl5+wdpY9OATciWw7u/Wniyi+ctJDyQ2jzdLI1U=,tag:NaVN4gJo2M5dK0Hq9/H67w==,type:str] +RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:kFU=,iv:2yLZn5eC8UXCfOPNa+1fOnRnaY3JhMlh9AdBRAyrCZU=,tag:Dalqeod45g0mn4Mn1YEMvQ==,type:str] +#ENC[AES256_GCM,data:GK/IYt5yyMbVLGx/P8Hzyfc=,iv:mzIaCOzunFypa+eJhf/gAWxZDixH6bxauOZjo6ARVEQ=,tag:r1650Kgs7frUDAVJqpQ3Yw==,type:comment] +GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:ZgsW82cm38rR,iv:Qz4oSAKQ2U3dBgy1GtDl56GGzCqnmvldA9ByiQLjH94=,tag:qxlF/5CvhNd4fOPajwrPvg==,type:str] +GSC_SITE_URL=ENC[AES256_GCM,data:nz5OC1nciCmds6bl257oo9IX3SA7kA==,iv:+LV0kDG4xfwL3E3oGEsUB1h3W7ZpRijv3LipWq+GHg8=,tag:zUmIKWfHd6KwD+cMZb0ZKw==,type:str] +BING_WEBMASTER_API_KEY=ENC[AES256_GCM,data:3h4h5BIApJdX,iv:lG/eNi0HwBF+FL5gyhDUiqHrLdMe+s8/PbXNqInR/i8=,tag:dvXNTf/Gn0RNE+A0U2fzPQ==,type:str] +BING_SITE_URL=ENC[AES256_GCM,data:ij5VYDk61wpIF21DdG1B6oEwN4yzUg==,iv:gJWQIYMCpjdVV+BoWxlpYy5uyM4M9WJQuHgud4Z7djY=,tag:rQwlCHjLSiLFjrATleiHVQ==,type:str] +#ENC[AES256_GCM,data:CJ83yX864ts1yvmP7pQXIbDqXbmwu6o9tAm4AMBH0A4=,iv:GgOY+GkLnS+mMu/iQSe4EFyMjC/tGnSR25GvvYS/gdA=,tag:sHqmi1Z+PMflJo/rHbTdUA==,type:comment] +GEONAMES_USERNAME=ENC[AES256_GCM,data:d9jN+L/93b2wDrY=,iv:IsmVx459v4vw/XREahd3SN88U8yCKQwaVaTty7b2big=,tag:OgO3lzdCt/Yg0S2Z/t9Row==,type:str] +CENSUS_API_KEY=ENC[AES256_GCM,data:KmceIQqzlvJ3l7yPCksP1ecT0KFqo5/owrRx34/u3Md4WUTj2RGn3g==,iv:1YVk6/OUZNxXOkzZ5qZXj6RNhy4mfEnus+uBnRcRLKE=,tag:rP1y2XEvQwim1rq/GjMPNw==,type:str] +PROXY_URLS_FALLBACK=ENC[AES256_GCM,data:hB9OdyCSGXVz6U1wPuSElzW/2rzHinqPBFFiJv9ugiXYlwj1uviGe0SHZ68AQHB050s/1ku3jDnzPwdM,iv:d1e9RLg5LLCPGKeZS1lkmh44Tt9GEW+ZC2YBMU5rnxA=,tag:whOwZYndrq7VB8flwLmW3w==,type:str] +CIRCUIT_BREAKER_THRESHOLD=ENC[AES256_GCM,data:kF8=,iv:6d/79VZFtIQtxKcvtVKF5/69KIBr7bIWpTPKt+vt6fU=,tag:yQStmU9Gjhs5CJw/r9b9rA==,type:str] +sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBhRU1MWm44bGpuSlRyVUpT\nU0syT2x5eU9lSnR6Nk9DejFNOUJYaFUyeENvClJpU0d6dTR6emoyRDIvaUhnTDNU\nL0hpc0w5QWI0K1hKa1p3NTVCU3Jyd0kKLS0tIDBOUkdjNTI1a0ZLMzFpcjV2QmZS\nNHltZ2hIcDFRVkVmSEQ1bnFKdk9YdW8KqJSaCtueN7XdOBfgzOfJYDHUga2rgWXF\n5XbTf4WE8HMqtWWgAq6ZQLFGSbJAhKmPlscjyp/VflFZvKvVqm3qbw==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T15:04:59Z -sops_mac=ENC[AES256_GCM,data:BsSyxkjwN8SNSC8fE3iNZPYIAPgrnbMYLGN/waGNkzH11VfcGAw5vQaPR6Il1PBMrx8gJ9daxRbvTW/DkY+G8VTpF3HWB3IoJPRewTLNUdkDSCxwhGuzfTzT1f7FKeNNVxsCEhJZGMYbDiYzcnbU1owgcHivBfCYl0DF0VM8cS0=,iv:aV5Af5nrhaI9NE2ouGnr20s6mRD9VPHLNcdfola9Ybw=,tag:YD08kkuFLksllK4Q9cfYfQ==,type:str] +sops_lastmodified=2026-02-24T20:32:28Z +sops_mac=ENC[AES256_GCM,data:htupj3WWPJ9KAxkgnsFyX8xnnfBeSna3nEjMA/RoFaDLBxkfFhXJOqiGTGzYAnsoc6KKxSdLy1Raa9wRqmmM0hmSqptBr/9axGthNmTg4m8UAgCzCG/ZJSL+hlTvmnL7Y2p3ryvk9w8Tw8jchdbFWgP5C6wmA1YAMOVBW+BfHxo=,iv:5NUe020D/1j8ISDFhZGyW8pobsKQtga2DJCmIV7yyIA=,tag:TlcQaU6z9ktkVJEduyoWtA==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 diff --git a/docs/proxy-provider-inventory.md b/docs/proxy-provider-inventory.md new file mode 100644 index 0000000..17d94ef --- /dev/null +++ b/docs/proxy-provider-inventory.md @@ -0,0 +1,735 @@ +# Proxy Provider Inventory + +Compiled: 2026-02-24 +Purpose: Identify proxy providers for distributed web extraction — rotating IPs to avoid per-IP rate limits when scraping Playtomic and similar APIs at scale (14k+ venues/day). + +Use case requirements: +- ~14,000 HTTP requests/day to `api.playtomic.io` (availability + tenant endpoints) +- Need IP rotation to stay under per-IP soft bans (observed ~1,000 req/IP/day limit) +- Serial or low-concurrency (1–8 workers), not high-throughput +- Residential IPs strongly preferred — Playtomic's API likely blocks datacenter ranges +- HTTP/HTTPS proxies; SOCKS5 a bonus +- Pay-per-GB preferred over per-IP (light usage: ~1–5 GB/month estimated) +- Budget: €20–100/month target + +**Bandwidth estimate:** A Playtomic availability API response for one venue is roughly 5–30 KB of JSON. At 14,000 requests/day with ~15 KB average (request + response + headers, pre-compression), that is ~200 MB/day or roughly **6 GB/month**. At the low end (small venues, cached pages) it could be 1–2 GB/month; at the high end (large venue JSON, retries) up to 10 GB/month. Budget for 5–10 GB/month when choosing a plan. + +--- + +## Provider Comparison Table + +| Provider | Pool Size | Res. Price (GB) | ISP/Static Price | Min Spend | Sticky | City-level | SOCKS5 | Score | +|----------|-----------|-----------------|------------------|-----------|--------|------------|--------|-------| +| Nimbleway | Millions (proprietary) | $8.00–$14.00 PAYG | — | $300/mo | 30 min | Yes | Yes | 2 | +| Froxy | 10M+ | $12.00–$1.95/GB (tier) | — | $1.99 trial; $60/mo entry | 10–60 min | Yes | Yes (SOCKS4/5) | 3 | +| LunaProxy | 200M+ | $0.65–$0.77 PAYG | $3/IP/week static | $7 (10 GB bundle) | 90 min | Yes | Yes | ⛔ REMOVED | +| Proxy-seller | 20M+ res; 400+ ISP | $0.70–$7.00 | $1–$2/IP/mo ISP | ~$1 (1 GB) | 24 hr res; static ISP | Yes | Yes | 3 | +| ProxyScrape | 48M+ | $4.50–$4.85/GB | — | $2 (1 GB test) | 120 min | Yes | Yes | 3 | +| Bright Data | 150M+ | $2.80–$10.50 PAYG | $2.80/IP/mo | $500/mo sub; PAYG no min | 30 min | Yes | Yes | 3 | +| Oxylabs | 175M+ | $3.75–$8.00 PAYG | $1.15–$1.60/IP/mo | $99/mo | 30 min | Yes | Yes | 3 | +| Decodo (Smartproxy) | 115M+ | $7–$12.50/GB | — | $80/mo sub | 24 hr | Yes | Yes | 3 | +| IPRoyal | 34M+ | $1.75–$7.00 | — | No min (PAYG) | 7 days | Yes | Yes | 5 | +| Rayobyte | 40M+ | $3.20–$9.00 | Unlimited BW plans | No stated min | 60 min | Yes | No | 3 | +| NetNut | 85M+ | $3.54–$15.00 | $17.50+/GB static | $99/mo | Indefinite | Yes | No | 3 | +| SOAX | 191M+ | $3.60–$4.00 | — | $90/mo sub; $1.99 trial | 60 min | Yes | Yes | 3 | +| Webshare | — | $2.00–$3.00 | $0.30/IP/mo | ~$7/mo | Limited | Yes | Yes | 4 | +| Geonode | 10M+ | $0.50–$3.00 | — | No min (PAYG) | 24 hr | Yes | Yes | 4 | +| PacketStream | 45–72M+ | $1.00 flat | — | $50 deposit | Supported | Yes | Yes | 4 | +| Proxy-Cheap | 6M+ | $2.99 | — | No min | Supported | Yes | Yes | 3 | +| ProxyEmpire | — | $1.50–$3.50 | — | $1.97 trial | Unlimited rollover | Yes | Yes | 4 | +| Infatica | 20M+ | $4.00 PAYG | — | $4 trial (1 GB) | Supported | Yes | No | 3 | +| Shifter | 31M+ | Port-based $299+/mo | — | $299/mo | — | Yes | No | 1 | +| Storm Proxies | 20M+ | Port-based $19+/mo | — | $19/mo | 5 min | USA/EU only | No | 2 | +| DataImpulse | 90M+ | $1.00 (never expires) | — | No min | 30 min | Yes | Yes | 5 | +| Evomi | 5–10M+ | $0.99 PAYG / $0.49 sub ($49.99/mo) | — | Money-back guarantee (<1 GB) | Supported | Yes | Yes | 4 | +| 922Proxy | 200M+ claimed | $0.77 | $0.16/IP/day | No min | Supported | Yes | Yes | 3 | + +**Score (1–5):** fit for Playtomic extraction — rotating residential, pay-per-GB, no large minimum, city/country targeting, low total cost at 5–10 GB/month. + +--- + +## 1. Residential Rotating Proxies + +### 1.1 Bright Data + +| Field | Value | +|-------|-------| +| URL | https://brightdata.com | +| Types | Rotating residential, ISP, datacenter, mobile | +| Pool Size | 150M+ residential IPs | +| Countries | 195+ countries; country + city + ZIP + ASN + carrier targeting | +| Pricing (residential PAYG) | $10.50/GB; promo ~$5.25/GB when available | +| Pricing (Starter subscription) | ~$2.80–$3.50/GB for 141 GB ($499/month) | +| Sticky Sessions | Up to 30 minutes | +| Auth Method | Username:password; API key; browser extension | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | Starter plan: $500/month; PAYG: no minimum | +| Trial | 7-day free trial for registered companies; $5 credit on account creation | +| ToS scraping | Commercial scraping explicitly supported; most permissive ToS in market | + +Bright Data is the industry benchmark — 150M+ IPs, 195+ countries, the deepest targeting options (ZIP-level, ASN, carrier), and a platform that includes scraping browsers, CAPTCHA solvers, and managed datasets beyond raw proxies. Their IP quality is the highest in the market; IPs are very unlikely to be pre-flagged by Playtomic. + +The economics are the problem for small-volume use. The Starter plan requires $500/month — 50–100x the actual data cost at 5–10 GB/month. The PAYG rate of $10.50/GB (or $5.25/GB at promo) is usable but the promo is time-limited. The $5 trial credit on account creation allows a no-risk integration test. + +Choose Bright Data if: (a) extraction scales to 100+ GB/month and justifies the Starter plan, (b) Playtomic begins aggressively blocking mid-tier residential IPs and only Bright Data's IP quality works, or (c) you need the managed scraping infrastructure (Web Unlocker API) rather than raw proxies. + +--- + +### 1.2 Oxylabs + +| Field | Value | +|-------|-------| +| URL | https://oxylabs.io | +| Types | Rotating residential, ISP (static residential), datacenter, mobile | +| Pool Size | 175M+ residential IPs | +| Countries | 195+ countries; country + state + city targeting | +| Pricing (residential PAYG) | $8.00/GB | +| Pricing (Micro plan) | $99/month (13 GB = $7.62/GB) | +| Pricing (Starter) | $150/month (40 GB = $3.75/GB) | +| Pricing (ISP proxies) | $1.15–$1.60/IP/month with unlimited bandwidth | +| Sticky Sessions | Up to 30 minutes (residential); indefinite (ISP) | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $99/month (Micro) or PAYG at $8.00/GB | +| Trial | Free trial for qualified businesses | +| ToS scraping | Full commercial scraping support; dedicated scraping APIs available | + +Oxylabs has the largest claimed pool at 175M+ IPs and is consistently strong on reliability. The ISP proxy tier is a notable option: 10 static ISP IPs at $1.60/IP = $16/month with unlimited bandwidth. If Playtomic's rate limits reset daily and you distribute 14,000 requests across 10 IPs (1,400 req/IP/day), this may exceed the soft limit. Expanding to 20 IPs at $32/month brings each IP to 700 req/day — safely under the ~1,000/IP threshold. + +The $99/month minimum for the residential rotating plan makes it expensive for small volumes. Like Bright Data, Oxylabs is better justified at scale. The ISP tier is the most interesting option at this budget level — worth a direct test. + +--- + +### 1.3 Decodo (formerly Smartproxy) + +| Field | Value | +|-------|-------| +| URL | https://decodo.com | +| Types | Rotating residential, ISP, datacenter, mobile | +| Pool Size | 115M+ ethically sourced residential IPs | +| Countries | 195+ locations; country + state + city + ZIP + ASN | +| Pricing (PAYG) | $12.50/GB (1 GB) | +| Pricing (Micro) | $80/month (8 GB = $10.00/GB) | +| Pricing (Starter) | $225/month (25 GB = $9.00/GB) | +| Pricing (Enterprise) | As low as $1.50/GB at 1,000 GB/month | +| Sticky Sessions | Up to 24 hours (1–1,440 minutes configurable) | +| Auth Method | Username:password; whitelist IP | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $80/month (Micro) or PAYG at $12.50/GB | +| Trial | 3-day free trial | +| ToS scraping | Explicitly supported; scraping use case documented | +| Performance | 99.86% success rate, 0.63s avg response time (Proxyway 2025 — top tier) | + +Decodo (rebranded from Smartproxy in 2025) is the top performer in Proxyway's 2025 benchmark: 99.86% success rate and 0.63-second response time, winning Proxyway's "Best Value Provider" award for the fourth consecutive year. The 24-hour sticky sessions are the longest among major providers and the 115M+ pool with ZIP+ASN targeting is excellent. + +The difficulty is pricing at small volumes. $12.50/GB PAYG for 6 GB = $75/month. The Micro plan at $80/month forces purchasing 8 GB regardless — at 6 GB actual usage, 25% is wasted each month. Decodo is the right choice if performance is the primary requirement and/or volume scales to 25+ GB/month where the Starter plan's $9.00/GB becomes competitive. + +--- + +### 1.4 IPRoyal + +| Field | Value | +|-------|-------| +| URL | https://iproyal.com | +| Types | Rotating residential, static residential (Pawns), datacenter, mobile/4G, ISP | +| Pool Size | 34M+ residential IPs | +| Countries | 195+ countries; country + state/region + city targeting | +| Pricing (rotating residential PAYG) | $7.00/GB (1 GB) → $4.90/GB (50 GB) | +| Pricing (sticky residential) | From $1.75/GB; up to 7-day sessions | +| Sticky Sessions | Up to 7 days per IP | +| Auth Method | Username:password; API key | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No minimum; PAYG only | +| Trial | No free trial; small PAYG purchases from $1.75 | +| ToS scraping | Web scraping and data collection explicitly permitted | +| Performance | 99.56% success rate, 1.06s avg response time (Proxyway 2025) | + +IPRoyal stands out for the combination of no-minimum PAYG billing, non-expiring traffic, and industry-unique 7-day sticky sessions. For the Playtomic extraction pipeline, the sticky residential tier at $1.75/GB is exceptional value — lower than almost any other provider's rotating residential rate, with sessions that hold an IP for up to a full week. + +The 34M IP pool is smaller than Bright Data or Oxylabs but very adequate for 14,000 requests/day. The 99.56% success rate and 1.06s response time are strong for a mid-tier provider. SOCKS5 support and city/country targeting complete a well-rounded offering. + +At $1.75/GB sticky residential for 6 GB/month, total cost is approximately $10.50/month — well within budget, with no monthly commitment. Caveat: verify with IPRoyal support whether the $1.75/GB sticky tier draws from the same 34M rotating pool or a separate static IP inventory, as IP quality may differ. + +--- + +### 1.5 Rayobyte + +| Field | Value | +|-------|-------| +| URL | https://rayobyte.com | +| Types | Rotating residential, ISP (static), datacenter (dedicated + semi-dedicated), mobile | +| Pool Size | 40M+ residential IPs across ~180 countries | +| Countries | ~180 countries; country + state + city + ASN targeting | +| Pricing (residential rotating) | ~$9.00/GB for small plans; ~$3.20/GB at 1,000 GB/month | +| Pricing (datacenter dedicated) | From $2.50/IP/month (US); from $1.00/IP/month semi-dedicated | +| Sticky Sessions | Soft sticky (IP held while peer is online) or hard sticky (1–60 min fixed) | +| Auth Method | Username:password; dashboard configuration | +| Protocols | HTTP, HTTPS | +| Min Purchase | No stated minimum; PAYG available | +| Trial | Trial proxies available via portal | +| ToS scraping | Commercial scraping supported | + +Rayobyte (formerly Blazing SEO) built its reputation on datacenter proxies; their residential offering is solid but not price-competitive for small volumes. At ~$9.00/GB it is more expensive than IPRoyal ($7.00/GB), DataImpulse ($1.00/GB), or Evomi ($0.99/GB PAYG) for equivalent low-volume usage. + +The soft/hard sticky session distinction is technically useful — hard sticky guarantees the same IP for up to 60 minutes regardless of peer status; soft sticky holds only while the peer device is online. For venue-by-venue crawling, hard sticky at 5–10 minutes is sufficient. The lack of SOCKS5 for residential is a minor inconvenience. Rayobyte becomes more compelling at 200+ GB/month where the pricing curve ($3.20/GB) is competitive. Not recommended as primary for 5–10 GB/month. + +--- + +### 1.6 NetNut + +| Field | Value | +|-------|-------| +| URL | https://netnut.io | +| Types | Rotating residential, static residential (ISP), datacenter, mobile | +| Pool Size | 85M+ rotating residential; 1M+ static residential | +| Countries | 195+ countries; city targeting | +| Pricing (rotating residential) | Starter: $99/month (28 GB = $3.54/GB); enterprise: $1.59/GB | +| Pricing (static residential) | $17.50+/GB (primarily for long-term sessions) | +| Sticky Sessions | Indefinite (static residential); ~30 minutes (rotating) | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS (no SOCKS5) | +| Min Purchase | $99/month (28 GB) | +| Trial | Free trial for enterprise accounts | +| ToS scraping | Commercial use permitted | + +NetNut's differentiated product is direct ISP connectivity for static residential proxies — lower latency and more predictable performance than peer-sourced networks. The rotating residential pool of 85M+ is competitive. However, the $99/month minimum (28 GB) is oversized for 5–10 GB/month consumption, and the static residential tier at $17.50/GB is very expensive for bandwidth-intensive work. + +NetNut updated pricing in March 2025 to align more closely with Oxylabs and Bright Data, making it less of a budget option than it historically was. The lack of SOCKS5 is a limitation. Best suited to operations running 28+ GB/month where the Starter plan breaks even. + +--- + +### 1.7 SOAX + +| Field | Value | +|-------|-------| +| URL | https://soax.com | +| Types | Rotating residential, mobile, ISP, datacenter | +| Pool Size | 191M+ IPs (residential + mobile combined) | +| Countries | 195+ locations; country + city targeting | +| Pricing (residential Starter sub) | $90/month (25 GB = $3.60/GB) | +| Pricing (residential PAYG) | $4.00/GB | +| Pricing (3-day trial) | $1.99 | +| Pricing (Enterprise) | As low as $0.32/GB at scale | +| Sticky Sessions | Up to 60 minutes | +| Auth Method | Username:password; dashboard-based session management | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $90/month (Starter) or PAYG at $4.00/GB | +| Trial | $1.99 for 3-day trial | +| ToS scraping | Fully supported; web scraping is a primary advertised use case | +| Performance | 99.95% success rate, 0.55s avg response time (Proxyway 2025 — near best in class) | + +SOAX achieves near-best-in-market performance metrics: 99.95% success rate and 0.55-second response time in Proxyway's 2025 benchmarks. The 191M+ combined pool, 60-minute sticky sessions, and SOCKS5 support make it technically strong. The $1.99 three-day trial is the cheapest paid entry point for validation. + +At PAYG: $4.00/GB × 6 GB/month = $24/month — within the lower budget range. The Starter subscription at $90/month is only justified at 20+ GB/month usage. SOAX is a good option if performance reliability is paramount and you do not want to accept the uncertainty of smaller providers like Evomi or DataImpulse. + +--- + +### 1.8 Webshare + +| Field | Value | +|-------|-------| +| URL | https://www.webshare.io | +| Types | Rotating residential, static residential (ISP), datacenter, shared proxies | +| Pool Size | Not publicly stated for residential | +| Countries | 195+ countries; city targeting supported | +| Pricing (residential rotating) | 10 GB/mo: $3.00/GB; 60 GB: $2.33/GB; 100 GB: $2.00/GB | +| Pricing (static residential / ISP) | $0.30/IP/month (unlimited bandwidth) | +| Pricing (free tier) | 10 datacenter proxies + 1 GB/month — permanent | +| Sticky Sessions | Limited; primarily a rotating service | +| Auth Method | Username:password or whitelist IP | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | Very low; plans start ~$7/month for residential | +| Trial | Free tier permanent (10 proxies + 1 GB/month datacenter) | +| ToS scraping | Permitted for commercial use | + +Webshare offers two compelling products for this use case. First, the permanent free tier enables no-cost integration testing. Second, the static residential (ISP) proxies at $0.30/IP/month represent the cheapest stable-IP option on the market: 30 ISP IPs at $9/month provides unlimited bandwidth across a persistent pool of 30 residential-quality IPs. + +If Playtomic's soft ban is per-IP-per-day (resetting at midnight), a pool of 30 ISP IPs at 14,000 requests/day means ~467 requests per IP per day — safely below the ~1,000/IP observed limit. This setup at $9/month is potentially the cheapest viable production configuration, contingent on ISP IPs not being pre-blocked. + +The rotating residential plan at $3.00/GB (10 GB tier) is mid-market and straightforward. Webshare has a developer-friendly API and clean dashboard. + +--- + +### 1.9 Geonode + +| Field | Value | +|-------|-------| +| URL | https://geonode.com | +| Types | Rotating residential, static residential (ISP), datacenter | +| Pool Size | 10M+ residential IPs | +| Countries | 190+ countries; country + city targeting | +| Pricing (PAYG) | $3.00/GB | +| Pricing (Starter plan) | From $1.00/GB (subscription) | +| Pricing (Business plan) | From $0.50/GB | +| Sticky Sessions | Configurable, up to 24 hours | +| Auth Method | Username:password; whitelist IP | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No minimum for PAYG | +| Trial | No formal trial; $2,250 proxy credit on first purchase (promotional) | +| ToS scraping | Explicitly permitted for commercial use | +| Performance | Trustpilot 4.7/5; latency anomalies reported on Reddit | + +Geonode advertises $0.50/GB on the Business plan and up to 24-hour sticky sessions — both market-leading for a mid-tier provider. The PAYG rate of $3.00/GB with no minimum results in approximately $18/month for 6 GB. + +The 10M IP pool is the smallest among recommended providers, meaning IP reuse frequency is higher. For 14,000 requests/day this is still fine — at 10M IPs cycling uniformly, each IP would appear roughly once every 700 days at that volume. In practice pools are not uniform, so some IPs will be reused sooner. + +The reported Reddit latency anomalies (up to 800 seconds for some requests) are concerning for a latency-sensitive extraction loop. Test against a sample before committing; the $3.00/GB PAYG is not cheap enough to absorb high retry overhead. + +--- + +### 1.10 PacketStream + +| Field | Value | +|-------|-------| +| URL | https://packetstream.io | +| Types | Rotating residential, static residential | +| Pool Size | 45–72M+ residential IPs (P2P network) | +| Countries | 102–195 countries; country + city targeting | +| Pricing | $1.00/GB flat | +| Sticky Sessions | Supported | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $50 deposit (credit persists indefinitely) | +| Trial | No formal trial | +| ToS scraping | General commercial use permitted | + +PacketStream's $1.00/GB flat rate with no tiers or subscriptions is extremely simple and competitive. The $50 minimum deposit is the main friction point but credit persists indefinitely. SOCKS5 and city targeting are included at no extra charge. + +The P2P model (volunteer peers share their bandwidth) means IP quality and availability vary by geography. For Europe (Spain, Germany, France — primary Playtomic markets), the peer density should be adequate. The latency variance inherent in P2P networks adds noise to extraction timing. For a background extraction job running overnight, this is not a significant issue. At $1.00/GB for 6–10 GB/month, cost is $6–10/month — comparable to DataImpulse. + +--- + +### 1.11 Proxy-Cheap + +| Field | Value | +|-------|-------| +| URL | https://www.proxy-cheap.com | +| Types | Rotating residential, ISP, datacenter, mobile | +| Pool Size | 6M+ residential IPs | +| Countries | 180+ countries; city targeting | +| Pricing (residential) | $2.99/GB | +| Sticky Sessions | Supported | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No stated minimum | +| Trial | No formal trial | +| ToS scraping | Permitted | + +At $2.99/GB with no minimum and SOCKS5 support: 6 GB/month = $18/month. The 6M IP pool is small. More expensive than DataImpulse ($1.00/GB), PacketStream ($1.00/GB), or Evomi ($0.99/GB PAYG) without a clear advantage. The service is better reviewed for ISP proxies than rotating residential. Viable if preferred on other criteria but not the first choice. + +--- + +### 1.12 ProxyEmpire + +| Field | Value | +|-------|-------| +| URL | https://proxyempire.io | +| Types | Rotating residential, static residential (ISP), mobile/4G | +| Pool Size | Not publicly stated | +| Countries | 195+ countries; country + city + ASN targeting | +| Pricing (rotating residential PAYG) | $3.50/GB (1 GB) → $1.50/GB (1,000 GB/month subscription) | +| Sticky Sessions | Supported; unused GB never expires (unlimited rollover) | +| Auth Method | Username:password; HTTP and SOCKS5 | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $1.97 trial package | +| Trial | $1.97 introductory package | +| ToS scraping | Permitted; ethical use policy | +| Performance | 99%+ success rate (self-reported; limited independent benchmarks) | + +ProxyEmpire's $1.97 trial is the cheapest entry point among all providers — adequate for a live test against the Playtomic API without financial commitment. The unlimited rollover policy (unused GB never expires across months) is genuinely useful for bursty extraction patterns. At $3.50/GB PAYG for 6 GB/month: $21/month — comparable to Geonode at $3.00/GB. ASN-level targeting is a differentiator for precise geo-targeting. + +--- + +### 1.13 Infatica + +| Field | Value | +|-------|-------| +| URL | https://infatica.io | +| Types | Rotating residential, mobile, datacenter | +| Pool Size | 20M+ residential IPs | +| Countries | 195+ countries; city targeting | +| Pricing (PAYG) | $4.00/GB | +| Pricing (Light plan) | $96/month (25 GB = $3.84/GB) | +| Pricing (trial) | $4 for 1 GB / 7 days | +| Sticky Sessions | Supported | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS (no SOCKS5) | +| Min Purchase | $4 (trial); $96/month for subscription | +| Trial | $4 trial (1 GB, 7 days) | +| ToS scraping | Permitted | +| Performance | 94.30% success rate (Proxyway 2025) | + +Infatica's 94.30% success rate in Proxyway testing is lower than IPRoyal (99.56%), DataImpulse (99.66%), SOAX (99.95%), and Decodo (99.86%). For a scraper where 6% of requests fail, that is approximately 840 failed venue lookups per day — requiring a robust retry system. At $4.00/GB PAYG it is also not cheap. The $4 trial allows quick validation. Not recommended as primary for this use case. + +--- + +## 2. Budget Residential Providers (Strong Value) + +### 2.1 DataImpulse + +| Field | Value | +|-------|-------| +| URL | https://dataimpulse.com | +| Types | Rotating residential, mobile | +| Pool Size | 90M+ residential IPs | +| Countries | 195+ countries; country + city targeting | +| Pricing | $1.00/GB (never expires; no monthly minimum) | +| Sticky Sessions | Up to 30 minutes | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No minimum | +| Trial | No formal trial; small purchases from $1 | +| ToS scraping | Permitted | +| Performance | 99.66% success rate (Proxyway 2025) | + +DataImpulse at $1.00/GB with non-expiring credit and a 90M+ pool is the standout value pick for this use case. The 99.66% benchmarked success rate (Proxyway 2025) is excellent for a budget provider — better than Infatica, close to IPRoyal. No monthly subscription, no credit expiry. At 6–10 GB/month: $6–10/month total. + +Country + city targeting and 30-minute sticky sessions cover all requirements. SOCKS5 support is a bonus. The main unknown is IP quality against Playtomic specifically (residential IPs from smaller providers are sometimes flagged in geographic clusters). Test first with a $5–10 credit load. + +--- + +### 2.2 Evomi + +| Field | Value | +|-------|-------| +| URL | https://evomi.com | +| Types | Rotating residential, mobile, datacenter | +| Pool Size | 5–10M+ residential IPs | +| Countries | 150+ countries; city targeting | +| Pricing (PAYG) | $0.99/GB — no expiry, no commitment | +| Pricing (Core subscription) | $0.49/GB — $49.99/month for 100 GB | +| Sticky Sessions | Supported | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No stated minimum for PAYG | +| Trial | Money-back guarantee: full refund if <1 GB or <10% of plan used; credit card required | +| ToS scraping | Permitted | + +Evomi's advertised $0.49/GB requires a $49.99/month Core subscription (100 GB). At 6–10 GB/month that is terrible value — you pay for 90+ GB you will not use. The effective price for this use case is the PAYG rate of **$0.99/GB**, which is essentially identical to DataImpulse ($1.00/GB). The "free trial" is a money-back guarantee rather than a true no-cost trial: you pay, test, and request a refund if you have used less than 1 GB or 10% of the plan. A credit card is required. + +At $0.99/GB PAYG for 6–10 GB/month: $6–10/month — the same cost as DataImpulse. The decision between them is therefore purely on reliability data, where DataImpulse has a published Proxyway independent benchmark (99.66% success rate) and Evomi does not. The 5–10M pool is also notably smaller. The money-back guarantee still enables low-risk validation before committing further spend. + +--- + +### 2.3 922Proxy + +| Field | Value | +|-------|-------| +| URL | https://www.922proxy.com | +| Types | Rotating residential, static residential, mobile | +| Pool Size | 200M+ claimed | +| Countries | 190+ countries; country + city + ZIP targeting | +| Pricing (rotating residential) | $0.77/GB | +| Pricing (static) | $0.16/IP/day | +| Sticky Sessions | Supported | +| Auth Method | Username:password; client application | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | No stated minimum | +| Trial | No formal trial | +| ToS scraping | Permitted; operated from Asia-Pacific | + +922Proxy advertises 200M+ IPs at $0.77/GB — cheaper than DataImpulse ($1.00/GB) and Evomi PAYG ($0.99/GB). The large claimed pool is promising. However, 922Proxy is an Asia-Pacific operation with less visibility in Western proxy benchmarks. IP geographic distribution may skew toward Asia-Pacific rather than Europe, which matters for Playtomic's Spain/Germany focus. Worth testing if DataImpulse is unavailable or saturated, but verify European IP availability first. + +--- + +## 3. ISP / Static Residential Strategy + +If Playtomic's per-IP rate limit resets daily (most common configuration), a static pool of ISP proxies round-robined across workers is potentially the cheapest and most reliable solution — avoiding the latency variance of P2P residential networks. + +| Provider | ISP Pricing | Bandwidth | 30-IP Pool Cost | Notes | +|----------|-------------|-----------|-----------------|-------| +| Webshare | $0.30/IP/month | Unlimited | $9/month | Cheapest option; static residential-quality IPs | +| Oxylabs | $1.15–$1.60/IP/month | Unlimited | $35–48/month | Enterprise-grade; lower blocking risk | +| Bright Data | From $2.80/IP/month | Unlimited | $84/month | Best IP quality; high cost | + +At 14,000 requests/day distributed across 30 IPs: ~467 req/IP/day — below the ~1,000/IP observed soft limit. Replace blocked IPs as they accumulate. Webshare allows adding/replacing IPs ad hoc. + +**If ISP IP ranges are pre-blocked by Playtomic** (common for some ISP providers), fall back to rotating residential. The ISP test is worth running first before committing to a rotating plan. + +--- + +## 4. Port-Based / Unlimited Bandwidth Providers (Not Recommended) + +### 4.1 Shifter (formerly Microleaves) + +| Field | Value | +|-------|-------| +| URL | https://shifter.io | +| Pool Size | 31M+ residential IPs | +| Pricing | $299/month for 25 ports (unlimited bandwidth) | +| Min Purchase | $299/month | + +Designed for 1 TB+/month operations. The $299/month minimum is 30–60x more than the actual cost of 6 GB/month at DataImpulse. Not appropriate for this use case. + +--- + +### 4.2 Storm Proxies + +| Field | Value | +|-------|-------| +| URL | https://stormproxies.com | +| Pool Size | 20M+ residential IPs | +| Countries | USA and EU only (no country-level selection) | +| Pricing | 1 port: $19/month; 5 ports: $50/month | +| Sticky Sessions | 5-minute rotation interval (fixed) | +| Protocols | HTTP, HTTPS | + +Storm Proxies at $19/month for unlimited bandwidth is tempting on paper. However, the geographic restriction to USA/EU with no country-level selection means IPs may not be Spanish/European residential as needed. The fixed 5-minute rotation interval cannot be adjusted. Not flexible enough for this use case. + +--- + +## 5. Mobile / 4G Proxies (Reserve Option) + +| Provider | Mobile Pricing | Performance | +|----------|---------------|-------------| +| SOAX | $7–$15/GB | 0.57s, 99.94% success (Proxyway 2025 — best mobile) | +| Decodo | $7–$12/GB | Excellent | +| Oxylabs | $8–$15/GB | Enterprise-grade | +| Bright Data | $8–$15/GB | Largest mobile pool | + +Mobile proxies use 4G/LTE carrier IPs — the hardest type to block without affecting real users. Costs are 5–10x residential. Not recommended until Playtomic demonstrates active blocking of residential IPs from established providers. + +--- + +## 6. Recommended Setup for Playtomic Extraction + +### Phase 1 — Validation + +Test three products simultaneously against a sample of 500 venues each: + +1. **Evomi** — use the money-back guarantee (credit card required; refund if <1 GB used); test $0.99/GB PAYG against ~500 venues +2. **DataImpulse** — add $10 credit; test $1.00/GB tier with 90M+ pool against the same venues +3. **Webshare static residential** — sign up for 10 ISP IPs ($3/month); test whether static IPs work + +Compare: success rate, 429 rate, response time, connection errors across all three. + +**Decision tree after Phase 1:** +- Evomi >95% success → Evomi as primary ($6–10/month at $0.99/GB PAYG) +- Evomi <95%, DataImpulse >98% → DataImpulse primary ($6–10/month) +- Webshare static passes (>98% success) → Webshare static at $9/month flat (simplest setup) +- All fail → escalate to IPRoyal sticky ($1.75/GB) or SOAX PAYG ($4.00/GB) + +### Phase 2 — Production Configuration + +**Primary recommended:** DataImpulse at $1.00/GB +- Expected monthly cost: $6–10/month for 6–10 GB +- Non-expiring credit (no waste on unused GB) +- 99.66% benchmarked success rate +- 30-minute sticky sessions for multi-step venue crawls +- 90M+ IP pool with country + city targeting + +**Fallback:** IPRoyal sticky residential at $1.75/GB +- Pre-load $20 of credit +- Activate when DataImpulse returns sustained errors or 429s +- 7-day sticky sessions for maximum session stability + +**Integration pattern (Python):** + +```python +import hashlib + +def get_proxy_url(venue_id: str, provider: str = "dataimpulse") -> str: + """Return a sticky proxy URL with a deterministic session ID per venue.""" + # Hash venue_id to a session bucket (0–999) + session_id = int(hashlib.md5(venue_id.encode()).hexdigest(), 16) % 1000 + + if provider == "dataimpulse": + host = "gate.dataimpulse.com" + port = 14433 + user = f"username-country-ES-session-{session_id}" + password = "your_password" + elif provider == "iproyal": + host = "geo.iproyal.com" + port = 12321 + user = f"username_country-ES_session-{session_id}" + password = "your_password" + + return f"http://{user}:{password}@{host}:{port}" +``` + +This distributes 14,000 venues across 1,000 session buckets, so each session handles ~14 venues/day. With 30-minute sticky sessions (DataImpulse) or 7-day sticky (IPRoyal), each bucket retains its IP for all requests to its assigned venues. Per-IP daily load: ~14 requests × (number of API calls per venue). Very safe. + +**Set `PROXY_URLS` in `.env.prod.sops`:** + +``` +PROXY_URLS=http://user:pass@gate.dataimpulse.com:14433,http://user:pass@gate.dataimpulse.com:14433 +EXTRACT_WORKERS=4 +``` + +Use the same endpoint multiple times to create N workers — the sticky session logic differentiates them via session IDs in the username parameter. Or use multiple distinct proxy URLs if the provider assigns different IPs per credential pair. + +### Budget Summary + +| Scenario | Provider | Est. Monthly Cost | Notes | +|----------|----------|-------------------|-------| +| Comparable to DataImpulse | Evomi | $6–10/month | $0.99/GB PAYG ($0.49/GB needs $49.99/mo 100 GB plan — not viable at this volume); money-back guarantee | +| **Best value** | **DataImpulse** | **$6–10/month** | **$1.00/GB; strong benchmarks; no expiry** | +| Static ISP pool | Webshare | $9/month flat | 30 IPs × $0.30; unlimited BW; test ISP blocking first | +| PAYG reliable | IPRoyal sticky | $10–18/month | $1.75/GB; 7-day sessions; well-established | +| PAYG mid-tier | PacketStream | $6–10/month | $1.00/GB; $50 deposit; P2P quality variance | +| PAYG mid-tier | Geonode PAYG | $18–30/month | $3.00/GB; 24-hr sticky; test latency first | +| High-reliability | SOAX PAYG | $24–40/month | $4.00/GB; 99.95% success; justified if others fail | +| Enterprise-grade | Decodo Micro | $80/month | 99.86% success; overkill at current scale | +| Enterprise-grade | Bright Data PAYG | $31+/month | ~$5.25/GB at promo; reverts to $10.50/GB | + +For the stated budget of €20–100/month, DataImpulse at $6–10/month leaves substantial headroom. The money saved versus Decodo or Bright Data can fund a secondary provider for redundancy, or be reserved to escalate to mobile proxies if Playtomic deploys more aggressive anti-bot measures. + +--- + +## 7. Additional Providers + +### 7.1 Nimbleway + +| Field | Value | +|-------|-------| +| URL | https://www.nimbleway.com | +| Types | Rotating residential, ISP/unlocker, datacenter, mobile | +| Pool Size | Millions of IPs (proprietary; smaller than Oxylabs/NetNut) | +| Countries | Nearly every country; country, state, city targeting | +| Pricing (PAYG) | $8.00/GB | +| Pricing ($300/mo plan) | $14.00/GB (21 GB included) | +| Pricing ($4,000/mo plan) | $7.00/GB | +| Sticky Sessions | 1–30 minutes (10 min inactivity timeout) | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS5 (recently added) | +| Min Purchase | $300/month; free trial requires contacting sales | +| Trial | Sales-gated; no self-serve trial | +| ToS scraping | Explicitly built for web data collection; GDPR/CCPA-compliant; KYC process | +| Performance | ~0.25s response time (claimed); 99.9% uptime; AI-assisted IP routing | + +Nimbleway is a premium-tier provider aimed at enterprise data engineering teams — not casual or low-volume scrapers. Its proprietary residential network is quality-over-quantity: the pool is smaller than Oxylabs or Bright Data, but Nimble compensates with intelligent IP optimisation, built-in anti-detection routing, and an AI-layered product stack (Nimble API, Nimble Browser, Nimble IP). The 0.25s response time claim is genuinely fast and geo-targeting granularity is excellent. + +The fatal problem for this use case is the pricing model. The $300/month minimum gives roughly 21 GB at $14/GB — 2–3x the bandwidth needed for ~6–10 GB/month. The PAYG $8/GB rate is more reasonable but the trial is sales-gated. Nimbleway targets teams spending $300–$4,000/month and makes no accommodation for low-volume users. + +Unless Playtomic becomes extremely difficult to scrape and requires elite anti-bot bypass, Nimbleway is overengineered and overpriced for 14,000 requests/day. The $300/month floor alone exceeds the entire stated budget ceiling. Keep on a shortlist if usage scales dramatically or if other providers start failing systematically. + +--- + +### 7.2 Froxy + +| Field | Value | +|-------|-------| +| URL | https://froxy.com | +| Types | Rotating residential, mobile (4G/LTE), SOCKS5 proxies | +| Pool Size | 10M+ IPs worldwide | +| Countries | 200+ countries; city-level targeting | +| Pricing (entry plan) | ~$12.00/GB (5 GB for $60/month) | +| Pricing (scale) | ~$1.95/GB at $2,000/month | +| Sticky Sessions | 10–60 minutes (IP reassigned on inactivity) | +| Auth Method | Username:password | +| Protocols | HTTP, HTTPS, SOCKS4, SOCKS5 | +| Min Purchase | $1.99 trial (3 days / 100 MB); entry plan ~$60/month (5 GB) | +| Trial | $1.99 / 100 MB / 3 days | +| ToS scraping | Permissive for data collection; no anti-scraping clauses in public docs | +| Performance | ~70% success rate (independent tests); 340–800+ ms latency; Froxy claims 99% | + +Froxy is a mid-tier European-facing provider with a clean dashboard, SOCKS4/5 support, and a genuine low-cost entry point via the $1.99 / 100 MB trial — the most honest validation option in this comparison. Port-based rotation with configurable intervals (90–3,600 seconds) gives fine-grained control over IP churn rate. The protocol breadth works with any Python HTTP library. + +The core problems are pricing and performance. At low volumes, Froxy is expensive per GB: 5 GB for $60 = $12/GB. Independent testing puts the real-world success rate at ~70% and latency at 340–800+ ms — measurably worse than premium providers and several budget alternatives. The 10M IP pool is on the smaller side with notable IP recycling reported. + +Froxy is borderline-viable if you accept higher latency and occasional failures that the extractor's retry logic absorbs. Use the $1.99 trial first; if success rates on the actual Playtomic endpoint are acceptable, the $60/month entry plan fits within budget — but you are paying a premium per GB. Treat it as "test first, commit if it works" rather than a confident primary choice. + +--- + +### 7.3 LunaProxy — ⛔ DO NOT USE + +> **Disqualified January 2026.** LunaProxy was revealed to be a front brand operated by IPIDEA, a malicious residential proxy network disrupted by Google Threat Intelligence Group (GTIG) and Cloudflare. The "residential IPs" were sourced from malware-infected devices enrolled without user knowledge via rogue SDKs distributed to developers. In January 2026 Google obtained court orders and partnered with Cloudflare to take down IPIDEA's command-and-control infrastructure, cutting the network by ~40%. Over 550 tracked threat groups had used IPIDEA exit nodes in a single week. +> +> Using LunaProxy routes your traffic through compromised devices belonging to unknowing victims. Beyond the ethical problem, the network is actively degraded and unreliable. Do not use. +> +> Reference: [Google Cloud Blog — Disrupting the World's Largest Residential Proxy Network](https://cloud.google.com/blog/topics/threat-intelligence/disrupting-largest-residential-proxy-network) + +--- + +### 7.4 Proxy-seller + +| Field | Value | +|-------|-------| +| URL | https://proxy-seller.com | +| Types | ISP (static residential), rotating residential, datacenter, mobile | +| Pool Size | 20M+ rotating residential IPs; 400+ ISP networks / 800+ subnets | +| Countries | 220+ countries/regions; city and ISP targeting; ISP covers 22+ countries | +| Pricing (rotating residential bulk) | $0.70/GB; PAYG $7/GB | +| Pricing (ISP static) | $1–$2/IP/month (volume discounts up to 57%) | +| Sticky Sessions | Residential: up to 24 hours; ISP: static (permanent) | +| Auth Method | Username:password; IP whitelisting | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | ~$1 (1 GB residential); ISP minimum varies | +| Trial | No formal free trial; ~$1 entry | +| ToS scraping | Positioned for scraping and privacy use; Trustpilot 4.7/5 (500+ reviews) | +| Performance | ~91–92% success rate (SERP tests); 1 Gbps channel; 99% uptime; DNS leak inconsistency in one independent test | + +Proxy-seller has two genuinely distinct product lines. For rotating residential proxies, the $0.70/GB pricing at volume is competitive and approaches LunaProxy territory, though the PAYG $7/GB rate is high without upfront commitment. The 24-hour sticky session on residential is exceptional — the longest in this comparison for a rotating product. For the ISP line, $1–2/IP/month for dedicated static residential IPs is competitive and useful for account-based access or distributing requests across a fixed pool of IPs. + +The independent security testing revealed a DNS leak issue where anti-fraud databases classified connections inconsistently (residential vs. corporate vs. proxy). This is material concern for high-security targets but unlikely to matter for Playtomic's public API. The 92% SERP success rate is solid though it drops on well-defended targets. City-level targeting across 220+ GEOs is a genuine differentiator if appearing geographically local to Spain is important. + +Proxy-seller is a credible option particularly if you value long sticky sessions, ISP/city targeting granularity, or want a single vendor for both rotating residential and ISP static proxies. The DNS inconsistency is worth monitoring but unlikely to cause failures against a mobile API target. The ~$1 minimum makes it trivially easy to test. + +--- + +### 7.5 ProxyScrape + +| Field | Value | +|-------|-------| +| URL | https://proxyscrape.com | +| Types | Rotating residential, shared datacenter (premium), dedicated datacenter; also maintains a well-known free public proxy list | +| Pool Size | 48M+ residential IPs (some sources cite 55M+) | +| Countries | 195+ countries; country, state, city targeting | +| Pricing (5 GB plan) | $4.85/GB ($24.25/month) | +| Pricing (20 GB plan) | $4.50/GB ($90/month) | +| Pricing (1 GB test) | ~$2 | +| Sticky Sessions | Up to 120 minutes | +| Auth Method | Username:password; IP whitelisting | +| Protocols | HTTP, HTTPS, SOCKS5 | +| Min Purchase | $2 (1 GB test); 3-day / up to 1 GB refund policy | +| Trial | 3-day refund policy functions as a trial | +| ToS scraping | Explicitly positioned for legitimate web scraping; emphasises ethical IP sourcing | +| Performance | ~0.8s response time; 99%+ uptime claimed; some reviews report IPs labelled residential that behave as ISP/datacenter | + +ProxyScrape is best known as the source of large free public proxy lists, and that legacy creates both an opportunity and a perception problem. The paid residential product is a genuinely separate tier: 48M+ IPs, 195+ countries, city-level targeting, 120-minute sticky sessions (the joint-longest in this comparison), and SOCKS5 support. The 3-day refund policy on up to 1 GB effectively functions as a trial — pay $2, test against Playtomic, request a refund if it fails. Data does not expire monthly, which suits variable scraping workloads. + +The concerns are meaningful. Independent reviews report that some IPs advertised as residential resolve to ISP or datacenter ranges when tested by anti-fraud tools, suggesting imprecise pool labelling. The 0.8s average response time is acceptable but slower than Nimbleway (0.25s); for 14,000 requests/day at roughly one request every six seconds, latency is not a bottleneck anyway. Trustpilot feedback is polarised with recurring mentions of support delays and filter reliability issues. + +For the Playtomic workload, ProxyScrape is functionally adequate — pool is large enough, city targeting works, sticky sessions are long, and the $2 test-run is the easiest entry point in this comparison. Monthly cost for 6–10 GB would be $27–$49 on the 5 GB plan, within budget but worse value per dollar than LunaProxy. The main risk is pool quality inconsistency — if Playtomic checks IP type and blocks datacenter ranges, mislabelled proxies would silently inflate failure rates. Treat it as a solid backup or test-first option. + +--- + +## Sources + +- [Bright Data Residential Pricing](https://brightdata.com/pricing/proxy-network/residential-proxies) +- [Oxylabs Residential Pricing](https://oxylabs.io/pricing/residential-proxy-pool) +- [Oxylabs ISP Proxies Pricing](https://oxylabs.io/pricing/isp-proxies) +- [Decodo Residential Pricing](https://decodo.com/proxies/residential-proxies/pricing) +- [IPRoyal Pricing](https://iproyal.com/pricing/residential-proxies/) +- [Rayobyte Pricing](https://rayobyte.com/pricing/) +- [NetNut Residential Proxies](https://netnut.io/residential-proxies/) +- [SOAX Pricing](https://soax.com/pricing) +- [Webshare Pricing](https://www.webshare.io/pricing) +- [Webshare Static Residential](https://www.webshare.io/static-residential-proxy) +- [Geonode Pricing](https://geonode.com/pricing/residential-proxies) +- [PacketStream Pricing](https://packetstream.io/pricing/) +- [Proxy-Cheap Rotating Residential](https://www.proxy-cheap.com/services/rotating-residential-proxies) +- [ProxyEmpire Pricing](https://proxyempire.io/pricing-table/) +- [Infatica Pricing](https://infatica.io/pricing/) +- [DataImpulse Residential](https://dataimpulse.com/residential-proxies/) +- [Evomi Residential](https://evomi.com/product/residential-proxies) +- [922Proxy Residential](https://www.922proxy.com/residential-proxies) +- [Shifter Pricing](https://shifter.io/pricing) +- [Storm Proxies Residential](https://stormproxies.com/residential_proxy.html) +- [Proxyway Best Residential Proxies 2026](https://proxyway.com/best/residential-proxies) +- [Proxyway Market Research 2025](https://proxyway.com/research/proxy-market-research-2025) +- [AIM Multiple Proxy Pricing Comparison](https://research.aimultiple.com/proxy-pricing/) +- [Nimbleway Residential Proxies](https://www.nimbleway.com/nimble-ip/residential-proxies) +- [Nimbleway Review — Proxyway](https://proxyway.com/reviews/nimbleway-review) +- [Nimble Pricing](https://www.nimbleway.com/pricing) +- [Froxy Residential Proxies Pricing](https://froxy.com/en/residential-proxies/pricing) +- [Froxy Review — StupidProxy](https://www.stupidproxy.com/froxy/) +- [Froxy Review — BestProxyFinder](https://bestproxyfinder.com/providers/froxy-review/) +- [LunaProxy Residential Pricing](https://www.lunaproxy.com/pricing/residential-proxies/) +- [LunaProxy Review — Proxyway](https://proxyway.com/reviews/lunaproxy-proxies) +- [Proxy-seller Residential Proxies](https://proxy-seller.com/residential-proxies/) +- [Proxy-seller ISP Proxies](https://proxy-seller.com/isp/) +- [Proxy-seller Review — Proxyway](https://proxyway.com/reviews/proxy-seller-proxies) +- [ProxyScrape Residential Proxies](https://proxyscrape.com/products/residential-proxies) +- [ProxyScrape Pricing](https://proxyscrape.com/pricing) +- [ProxyScrape Review — DiCloak](https://dicloak.com/blog-detail/proxyscrape-2025-review-complete-analysis-of-features-performance-pricing-and-security) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py index 699ace2..b12932c 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py @@ -60,7 +60,7 @@ def extract( for page in range(MAX_PAGES): if cycler: - proxy = cycler["next_proxy"]() + proxy = cycler() if proxy: session.proxies = {"http": proxy, "https": proxy} From 6116445b56784dfde06868e46fd9224550e055dd Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 22:21:05 +0100 Subject: [PATCH 68/98] perf(extract): auto-detect workers from proxies, skip throttle on success, crash-safe partial JSONL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - proxy.py: delete unused make_sticky_selector() - utils.py: add load_partial_results() + flush_partial_batch() for crash-resumable extraction - playtomic_availability.py: - drop MAX_WORKERS / EXTRACT_WORKERS — worker_count = len(proxy_urls) or 1 - skip time.sleep(THROTTLE_SECONDS) on success when proxy_url is set; keep sleeps for 429/5xx - replace cursor-based resumption with .partial.jsonl sidecar (flush every 50 records) - _fetch_venues_parallel accepts on_result callback for incremental partial-file flushing - mirror auto-detect worker count in extract_recheck() Co-Authored-By: Claude Sonnet 4.6 --- .../playtomic_availability.py | 97 ++++++++++++------- .../src/padelnomics_extract/proxy.py | 18 ---- .../src/padelnomics_extract/utils.py | 46 +++++++++ 3 files changed, 106 insertions(+), 55 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index 4c73b80..f01c3e7 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -5,8 +5,13 @@ unauthenticated /v1/availability endpoint for each venue's next-day slots. This is the highest-value source: daily snapshots enable occupancy rate estimation, pricing benchmarking, and demand signal detection. -Parallel mode: set EXTRACT_WORKERS=N and PROXY_URLS=... to fetch N venues -concurrently (one proxy per worker). Without proxies, runs single-threaded. +Parallel mode: worker count is derived from PROXY_URLS (one worker per proxy). +Without proxies, runs single-threaded with per-request throttling. + +Crash resumption: progress is flushed to a .partial.jsonl sidecar file every +PARTIAL_FLUSH_SIZE records. On restart the already-fetched venues are skipped +and prior results are merged into the final file. At most PARTIAL_FLUSH_SIZE +records (a few seconds of work with 10 workers) are lost on crash. Recheck mode: re-queries venues with slots starting within the next 90 minutes. Writes a separate recheck file for more accurate occupancy measurement. @@ -29,7 +34,7 @@ import niquests from ._shared import HTTP_TIMEOUT_SECONDS, USER_AGENT, run_extractor, setup_logging from .proxy import load_fallback_proxy_urls, load_proxy_urls, make_tiered_cycler -from .utils import get_last_cursor, landing_path, write_gzip_atomic +from .utils import flush_partial_batch, landing_path, load_partial_results, write_gzip_atomic logger = setup_logging("padelnomics.extract.playtomic_availability") @@ -40,7 +45,6 @@ AVAILABILITY_URL = "https://api.playtomic.io/v1/availability" THROTTLE_SECONDS = 1 MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 -MAX_WORKERS = int(os.environ.get("EXTRACT_WORKERS", "1")) RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "90")) CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD", "10")) @@ -49,6 +53,9 @@ CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD", "10" # batch still complete. PARALLEL_BATCH_SIZE = 100 +# Flush partial results to disk every N records — lose at most this many on crash. +PARTIAL_FLUSH_SIZE = 50 + # Thread-local storage for per-worker sessions _thread_local = threading.local() @@ -84,22 +91,6 @@ def _load_tenant_ids(landing_dir: Path) -> list[str]: return ids -def _parse_resume_cursor(cursor: str | None, target_date: str) -> int: - """Parse cursor_value to find resume index. Returns 0 if no valid cursor.""" - if not cursor: - return 0 - parts = cursor.split(":", 1) - if len(parts) != 2: - return 0 - cursor_date, cursor_index = parts - if cursor_date != target_date: - return 0 - try: - return int(cursor_index) - except ValueError: - return 0 - - # --------------------------------------------------------------------------- # Per-venue fetch (used by both serial and parallel modes) # --------------------------------------------------------------------------- @@ -149,7 +140,8 @@ def _fetch_venue_availability( continue resp.raise_for_status() - time.sleep(THROTTLE_SECONDS) + if not proxy_url: + time.sleep(THROTTLE_SECONDS) return {"tenant_id": tenant_id, "slots": resp.json()} except niquests.exceptions.RequestException as e: @@ -177,6 +169,7 @@ def _fetch_venues_parallel( worker_count: int, cycler: dict, fallback_urls: list[str], + on_result=None, ) -> tuple[list[dict], int]: """Fetch availability for multiple venues in parallel. @@ -184,6 +177,9 @@ def _fetch_venues_parallel( completes, checks the circuit breaker: if it opened and there is no fallback configured, stops submitting further batches. + on_result: optional callable(result: dict) invoked inside the lock for + each successful result — used for incremental partial-file flushing. + Returns (venues_data, venues_errored). """ venues_data: list[dict] = [] @@ -215,6 +211,8 @@ def _fetch_venues_parallel( if result is not None: venues_data.append(result) cycler["record_success"]() + if on_result is not None: + on_result(result) else: venues_errored += 1 cycler["record_failure"]() @@ -262,41 +260,56 @@ def extract( logger.info("Already have %s — skipping", dest) return {"files_written": 0, "files_skipped": 1, "bytes_written": 0} - # Resume from cursor if crashed mid-run - last_cursor = get_last_cursor(conn, EXTRACTOR_NAME) - resume_index = _parse_resume_cursor(last_cursor, target_date) - if resume_index > 0: - logger.info("Resuming from index %d (cursor: %s)", resume_index, last_cursor) + # Crash resumption: load already-fetched venues from partial file + partial_path = dest.with_suffix(".partial.jsonl") + prior_results, already_done = load_partial_results(partial_path, id_key="tenant_id") + if already_done: + logger.info("Resuming: %d venues already fetched from partial file", len(already_done)) - venues_to_process = tenant_ids[:MAX_VENUES_PER_RUN] - if resume_index > 0: - venues_to_process = venues_to_process[resume_index:] + all_venues_to_process = tenant_ids[:MAX_VENUES_PER_RUN] + venues_to_process = [tid for tid in all_venues_to_process if tid not in already_done] # Set up tiered proxy cycler with circuit breaker proxy_urls = load_proxy_urls() fallback_urls = load_fallback_proxy_urls() - worker_count = min(MAX_WORKERS, len(proxy_urls)) if proxy_urls else 1 + worker_count = len(proxy_urls) if proxy_urls else 1 cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD) start_min_str = start_min.strftime("%Y-%m-%dT%H:%M:%S") start_max_str = start_max.strftime("%Y-%m-%dT%H:%M:%S") + # Partial file for incremental crash-safe progress + partial_file = open(partial_path, "a") # noqa: SIM115 + partial_lock = threading.Lock() + pending_batch: list[dict] = [] + + def _on_result(result: dict) -> None: + # Called inside _fetch_venues_parallel's lock — no additional locking needed. + # In serial mode, called single-threaded — also safe without extra locking. + pending_batch.append(result) + if len(pending_batch) >= PARTIAL_FLUSH_SIZE: + flush_partial_batch(partial_file, partial_lock, pending_batch) + pending_batch.clear() + + new_venues_data: list[dict] = [] + venues_errored = 0 + if worker_count > 1: logger.info("Parallel mode: %d workers, %d proxies", worker_count, len(proxy_urls)) - venues_data, venues_errored = _fetch_venues_parallel( + new_venues_data, venues_errored = _fetch_venues_parallel( venues_to_process, start_min_str, start_max_str, worker_count, cycler, fallback_urls, + on_result=_on_result, ) else: logger.info("Serial mode: 1 worker, %d venues", len(venues_to_process)) - venues_data = [] - venues_errored = 0 for i, tenant_id in enumerate(venues_to_process): result = _fetch_venue_availability( tenant_id, start_min_str, start_max_str, cycler["next_proxy"](), ) if result is not None: - venues_data.append(result) + new_venues_data.append(result) cycler["record_success"]() + _on_result(result) else: venues_errored += 1 circuit_opened = cycler["record_failure"]() @@ -310,7 +323,14 @@ def extract( i + 1, len(venues_to_process), venues_errored, ) - # Write consolidated file + # Final flush of any remaining partial batch + if pending_batch: + flush_partial_batch(partial_file, partial_lock, pending_batch) + pending_batch.clear() + partial_file.close() + + # Consolidate prior (resumed) + new results into final file + venues_data = prior_results + new_venues_data captured_at = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ") payload = json.dumps({ "date": target_date, @@ -321,6 +341,9 @@ def extract( }).encode() bytes_written = write_gzip_atomic(dest, payload) + if partial_path.exists(): + partial_path.unlink() + logger.info( "%d venues scraped (%d errors) -> %s (%s bytes)", len(venues_data), venues_errored, dest, f"{bytes_written:,}", @@ -330,7 +353,7 @@ def extract( "files_written": 1, "files_skipped": 0, "bytes_written": bytes_written, - "cursor_value": f"{target_date}:{len(tenant_ids[:MAX_VENUES_PER_RUN])}", + "cursor_value": f"{target_date}:{len(all_venues_to_process)}", } @@ -421,7 +444,7 @@ def extract_recheck( # Set up tiered proxy cycler with circuit breaker proxy_urls = load_proxy_urls() fallback_urls = load_fallback_proxy_urls() - worker_count = min(MAX_WORKERS, len(proxy_urls)) if proxy_urls else 1 + worker_count = len(proxy_urls) if proxy_urls else 1 cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD) if worker_count > 1 and len(venues_to_recheck) > 10: diff --git a/extract/padelnomics_extract/src/padelnomics_extract/proxy.py b/extract/padelnomics_extract/src/padelnomics_extract/proxy.py index 7d280ee..0e8c82e 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/proxy.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/proxy.py @@ -3,10 +3,6 @@ Proxies are configured via the PROXY_URLS environment variable (comma-separated). When unset, all functions return None/no-op — extractors fall back to direct requests. -Two routing modes: - round-robin — distribute requests evenly across proxies (default) - sticky — same key always maps to same proxy (for session-tracked sites) - Tiered proxy with circuit breaker: Primary tier (PROXY_URLS) is used by default — typically cheap datacenter proxies. Fallback tier (PROXY_URLS_FALLBACK) activates once consecutive failures >= threshold. @@ -141,17 +137,3 @@ def make_tiered_cycler( "is_fallback_active": is_fallback_active, } - -def make_sticky_selector(proxy_urls: list[str]): - """Consistent-hash proxy selector — same key always maps to same proxy. - - Use when the target site tracks sessions by IP (e.g. Cloudflare). - Returns a callable: select_proxy(key: str) -> str | None - """ - if not proxy_urls: - return lambda key: None - - def select_proxy(key: str) -> str: - return proxy_urls[hash(key) % len(proxy_urls)] - - return select_proxy diff --git a/extract/padelnomics_extract/src/padelnomics_extract/utils.py b/extract/padelnomics_extract/src/padelnomics_extract/utils.py index 3cb2562..15777f0 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/utils.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/utils.py @@ -7,7 +7,9 @@ if you add multiple data sources, extract them to a shared workspace package. import gzip import hashlib +import json import sqlite3 +import threading from pathlib import Path # --------------------------------------------------------------------------- @@ -117,6 +119,50 @@ def content_hash(data: bytes, prefix_bytes: int = 8) -> str: return hashlib.sha256(data).hexdigest()[:prefix_bytes] +def load_partial_results(partial_path: Path, id_key: str) -> tuple[list[dict], set[str]]: + """Load already-completed records from a partial JSONL file (crash recovery). + + Returns (records, seen_ids). If the file doesn't exist, returns ([], set()). + Gracefully handles a truncated last line from a mid-write crash. + """ + records: list[dict] = [] + seen_ids: set[str] = set() + if not partial_path.exists(): + return records, seen_ids + with open(partial_path) as f: + for line in f: + line = line.strip() + if not line: + continue + try: + record = json.loads(line) + records.append(record) + rid = record.get(id_key) + if rid: + seen_ids.add(rid) + except json.JSONDecodeError: + break # truncated last line from crash — skip it + return records, seen_ids + + +def flush_partial_batch( + partial_file, + lock: threading.Lock, + batch: list[dict], +) -> None: + """Thread-safe batch write of JSON records to the partial JSONL file. + + Writes all records in one lock acquisition with a single flush. + Call with batches of ~50 records for good I/O throughput vs crash safety tradeoff. + On crash, at most one batch worth of records is lost. + """ + assert batch, "batch must not be empty" + with lock: + for record in batch: + partial_file.write(json.dumps(record, separators=(",", ":")) + "\n") + partial_file.flush() + + def write_gzip_atomic(path: Path, data: bytes) -> int: """Gzip compress data and write to path atomically via .tmp sibling. From 9f010d8c0cc929ff0d6d2eec525a260d832524d1 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 22:30:28 +0100 Subject: [PATCH 69/98] perf(extract): parallel page fetching in tenants, drop EXTRACT_WORKERS env var MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - playtomic_tenants.py: batch_size = len(proxy_urls) pages fired in parallel per batch; each page gets its own session + proxy; sorted(results) ensures deterministic done-detection; falls back to serial + THROTTLE_SECONDS when no proxies. Expected speedup: ~2.5 min → ~15 s with 10 proxies. - .env.dev.sops, .env.prod.sops: remove EXTRACT_WORKERS (now derived from PROXY_URLS length) Co-Authored-By: Claude Sonnet 4.6 --- .env.dev.sops | 101 ++++++++------- .env.prod.sops | 115 +++++++++-------- .../padelnomics_extract/playtomic_tenants.py | 116 +++++++++++++----- 3 files changed, 192 insertions(+), 140 deletions(-) diff --git a/.env.dev.sops b/.env.dev.sops index 3f9210e..aa5e8eb 100644 --- a/.env.dev.sops +++ b/.env.dev.sops @@ -1,77 +1,76 @@ -#ENC[AES256_GCM,data:OODjUg==,iv:E1PqA4jzCrltGb9T8tiB2wrkLmzefekOVJt3jXze6bI=,tag:t+nmaZ+gsR0Qr3XeJ/szMg==,type:comment] -APP_NAME=ENC[AES256_GCM,data:MIW1LUcXGSRAXQo=,iv:WlHAjnFaoo5HgsyqGnvbiuMfvZIYXeA+ssw43fsP3TM=,tag:BOGuyhHj5+T1Tg7TBbCPHQ==,type:str] -SECRET_KEY=ENC[AES256_GCM,data:F43Bn1FzhYyzdGINTQA90Uw2aZWJ7REhVnM5Tc6KFS8=,iv:ZAbWVSCrVDKqVzPkNfAvH6p8iRISZSTD4U68JVZwL7Q=,tag:B5tmroCp3EJ+QDnGXgbXOg==,type:str] -BASE_URL=ENC[AES256_GCM,data:wmh0BoYwd3GutNhk7uQZKO4MAXin,iv:uZ4FivDic5KL7fQvrH1rhN4bTmXWqnHfzMJq7/4o+5w=,tag:NFfkEGkS1vxjODo/zB5Mcg==,type:str] -DEBUG=ENC[AES256_GCM,data:aqfTbw==,iv:g7jWtA4OD+b7QW4Sor7dX/6pJxif7d2SiU/An21QSfY=,tag:sjSyQQNtk0rLaOhYzNbAKw==,type:str] -#ENC[AES256_GCM,data:dSj2wIiLuhNq9FvDEhTL3F6LBJ4ITNsaKYO1YRmLZLDS3+C0jk7Hx5kPjcor4T9kst5tOE1KMH7HKn+0CHeWf+pdidAWikd1xA==,iv:hbnREdnOSgivoTI9/GwUOpRg1yROIgjbTpiR1R9u3AI=,tag:am/IFpc8a8Du0Wx8X7u9nA==,type:comment] -ADMIN_EMAILS=ENC[AES256_GCM,data:yS0kOYrM5VRhAAkxqg==,iv:eCqHA4kcgwt9T03Umq8MYiLYeB+Zh3pL9R7adcSZkYw=,tag:k3O0EbevHeWgG1JQAh30hw==,type:str] -#ENC[AES256_GCM,data:FDydGDKV+yuB,iv:3Q2JHDLqOWS/R+91Kx+5l7IST9DRngBd7Nd2Oau0kFw=,tag:JmavycCv5EeW7nDCum9SsA==,type:comment] -DATABASE_PATH=ENC[AES256_GCM,data:2INhi9u4WHoC6eA=,iv:JvkmUtrd5oxw0ZGQJAhI4kj1MtPqxRi5keGIzgCHcPQ=,tag:T7Yb1QKkVR3U7Ww5puKinA==,type:str] -#ENC[AES256_GCM,data:c3ikDZY=,iv:ssaKWl8+ddTa48pst3cz2n5ywMqMdrUN69jlLGRbgJU=,tag:7Dk53YiaBzYCyrrddUHb1A==,type:comment] -MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:gCw=,iv:u+tNE3Jc0GD1JJB7gX4MFelHb0JhqOiFAjP5cvsS1eU=,tag:hQJPei0T/Hz8eUnUmGxnyQ==,type:str] -SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:NT0=,iv:wzqXr3Stux9WrJBF89iYtmZMuoog2jr5ScTd+W3T9E4=,tag:Ei/FtIMAmBChc1Q6GS01SA==,type:str] -#ENC[AES256_GCM,data:0gHTU0jfkx3K/aeVQ4ab,iv:MhpRgozzMhtlDw05jDdPvNB9linCCep8WWPXFCxESI0=,tag:yxQncJ6uBHPGM2PsgsqA2w==,type:comment] -#ENC[AES256_GCM,data:N1R78wzjduwR6vj2/2io04RHvDz4bdMZQah2eG6iuQfe76dk4yVA2is6vIAO0wGHQbKOFj2XCFnex9h6LK7yfVB2TNNxndTLDOemlg==,iv:ifFHqlb2oY4flnWX3sDYKEFP3cufi5pfuF1H0zjZGRQ=,tag:ZJC6zkPE5/29GCpRK6G7QA==,type:comment] +#ENC[AES256_GCM,data:9JNa0g==,iv:KnGl3/4KQWkVnFXn9iKU5z5Ys6KXWOnSEoE/Jjks2pw=,tag:ZD3nrOQmhUjPkZiwtV330g==,type:comment] +APP_NAME=ENC[AES256_GCM,data:Vic/MJYoxZo8JAI=,iv:n1SEGQaGeZtYMtLmDRFiljDBbNKFvCzZPNtaFBNauYY=,tag:Smsd20Ba56QZKVFpRmhRPQ==,type:str] +SECRET_KEY=ENC[AES256_GCM,data:a3Bhj3gSQaE3llRWBYzpjoFDhhhSsNee67jXJs7+qn4=,iv:yvrx78X5Ut4DBSlmBnIn09ESVc/tuDiwiV4njmjcvko=,tag:cbFUTAEpX+isQD9FCVllsw==,type:str] +BASE_URL=ENC[AES256_GCM,data:LcbPDZf9Pwcuv7RxN9xhNfa9Tufi,iv:cOdjW9nNe+BuDXh+dL4b5LFQL2mKBiKV0FaEsDGMAQc=,tag:3uAn3AIwsztIfGpkQLD5Fg==,type:str] +DEBUG=ENC[AES256_GCM,data:qrEGkA==,iv:bCyEDWiEzolHo4vabiyYTsqM0eUaBmNbXYYu4wCsaeE=,tag:80gnDNbdZHRWVEYtuA1M2Q==,type:str] +#ENC[AES256_GCM,data:YmlGAWpXxRCqam3oTWtGxHDXC+svEXI4HyUxrm/8OcKTuJsYPcL1WcnYqrP5Mf5lU5qPezEXUrrgZy8vjVW6qAbb0IA2PMM4Kg==,iv:dx6Dn99dJgjwyvUp8NAygXjRQ50yKYFeC73Oqt9WvmY=,tag:6JLF2ixSAv39VkKt6+cecQ==,type:comment] +ADMIN_EMAILS=ENC[AES256_GCM,data:hlG8b32WlD4ems3VKQ==,iv:wWO08dmX4oLhHulXg4HUG0PjRnFiX19RUTkTvjqIw5I=,tag:KMjXsBt7aE/KqlCfV+fdMg==,type:str] +#ENC[AES256_GCM,data:b2wQxnL8Q2Bp,iv:q8ep3yUPzCumpZpljoVL2jbcPdsI5c2piiZ0x5k10Mw=,tag:IbjkT0Mjgu9n+6FGiPVihg==,type:comment] +DATABASE_PATH=ENC[AES256_GCM,data:pEpMUrL7ZHAzMT4=,iv:eDGudDVsW5vF0sENri7gQrFlCEdoWYP6hT5ZeXXs3Zg=,tag:Gl91C6uRdCiJ7Jo1Z/MQsg==,type:str] +#ENC[AES256_GCM,data:xVzlko4=,iv:glHTshoRIkIaJNpn4onyAxPOtXTsNh/JohXJyyu4Ars=,tag:fQ/53HdxYXs2JTMx6O8rrA==,type:comment] +MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:Ua4=,iv:ou1kEn+fa42lZDsXaPvpodJcvAF+EZC9UIGNK/tBV/U=,tag:+ed8Bm/8pdIksH7O2X8WwQ==,type:str] +SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:/gk=,iv:kKWy+FaoLp8kAWpZzpoUHX8nVFRaA4yuTTVzN2TSYTs=,tag:QypZTVmTo4lXd7PKTWrBdA==,type:str] +#ENC[AES256_GCM,data:8HLEkeUESRt3bOxIQsma,iv:kzt8+SFNJw2r3LqwwQPzs9bCdacYSfHWPzIvTxARI4k=,tag:n+F13eILUiJCZ3NtQdo26g==,type:comment] +#ENC[AES256_GCM,data:cGkjKPIfdOPWoZFEXTAgw5lsu0LIcNhu1y3ab47kKbVEZMiCk+0KrEUNJcqbQ+ProBQ6F6N38PUhUl0lhKKjMqjepMZUUrUTqFp0Tw==,iv:WyFISLnRnSSOkra/p7bOs5BQWx+qFaaeeZM50EdrIgA=,tag:UM0EiRGy+RFXfdJqRuv3Jw==,type:comment] # -#ENC[AES256_GCM,data:tuw7/hEPqtY59Y+L3Nq/Y6op26+U05GzZjQpGpwskwXJk5fauks66lPr+P9vlgEJXPOzuF1ZmB+M9td780mXavW49PKVh5NnjP/GbQ8t0Gc=,iv:wj6MCF/ujeaIoYC+pcgeW+slt1CBYPK5Lj4+LyHfehg=,tag:9EJJ46Exxd2On2SdCba+vQ==,type:comment] -#ENC[AES256_GCM,data:KApVqBUmJYwUpHp6BPBkzzVR+BPe8h/u79lODjPLvQoCT9XIfJ3ulZngRUUIDCJr6+yM4y98qxQJf7bhs4z7CK6oB1L/,iv:FAuFeIOD/Rx81L97cAqYIPZ3bhyrqOReQ5xeOjyN/m8=,tag:Z69JIWO4e/Q8+BQ5ThslVg==,type:comment] -#ENC[AES256_GCM,data:80vv1OoLoWwS+J9cbZ1Nt+UeF6Fc4D8LUHLX5BiEmBQAXh8ti3EZZnwi2QZb0wN+Ma1zwsM=,iv:6xnMzedX62XPU3GGpoTie8RZDFlzvrpFMc4u+fCbKYw=,tag:V7HtIqw1+ltW4dbvqobjQA==,type:comment] -#ENC[AES256_GCM,data:irTPd03a7pDOi2WOsL43GB3c/luAhlm0u6b3Wj/oLKnlzmmUV++aoyDum32wGLI4qH9Sy5XJS44=,iv:HH3uPdh0ZBdvX2bhH5w22TCsn67HaJa+/f5bk8NJNvA=,tag:f4xcS4URJS1EAipjcpFn2A==,type:comment] -#ENC[AES256_GCM,data:uW8E3Yhcepw3FJ3En5N8CLpN70a1ovXZJyS/gSqaWsMtScbTEqlMZ39R0S8hiS3j1y7ST7GaNXburXx0lv8=,iv:Xk8GhHPk7zVnMi4PbazefyxumHSv/XwpO/lk2SCxYEc=,tag:r5IpNFQ0wxQ05+UfOaenig==,type:comment] -#ENC[AES256_GCM,data:Ep/iNZ9JUQTFhk63ukNzs4YltDYEZ1b4jvi3yBpT2yfnSuVvrz1xSc+oU5WbHI8SeGbpaTgn5eA=,iv:vry5r45x5xNcIr1oyCbVAkFxFq2vpvvOEzqlvWI/bQc=,tag:MjTSas4nqwHg7J2ZfxlkfQ==,type:comment] -#ENC[AES256_GCM,data:OwkE2btYFBq2akgxnlHA3lVUJ4eXI4YVjIxlbK4lbrn6cd9U6xz7DhrDuFbOxMo1VtbyxBQ6dCBM5TODPHMFRBCIyuuXQi17,iv:emVmqGfwrpkF5HoQ0OD0UhiN076RezDEgsmBm2e5FZg=,tag:Hj+KHO9yKGqv2W5XkkHCZg==,type:comment] +#ENC[AES256_GCM,data:icBc0Zv+oedobh8DOTwV2Fc+N0C9CqjZvLciC1dmEIygr/P5oOBLH9Bnhf7XW3X+fiLUtLPQPUIh9CjX+wVeX+MpUZj8ksS0meoz1O2kSBs=,iv:oWreqF6QxaFZn2r35uqY7yy/nItwy3k3VuXAcLyqMbI=,tag:ezQO+m6qkQSEwe17vYtYcw==,type:comment] +#ENC[AES256_GCM,data:BmlWl0f3aiOrEVglJisqHb507/ipmyRCUvkygs2jBfh2gw3BJgrCAAqoK+DekvIls2myRn7RynqWTMZKGXtJMHu5SEA8,iv:QjPoSzyNl6CBYmJAd2OfFEEoXO3jz0LL2VNegP0mY8Q=,tag:miTvj9PK1a00BKaAjUTICw==,type:comment] +#ENC[AES256_GCM,data:kzSYQCopgU9wXpw61WGfYpRtOjV3iEVvZ09RP4OqVl+Rqnd2wCKREKKrB7F15bp4BB3OzMo=,iv:TFFpROfYKaUlWQm3ISYkyYdZCarSJbqHItLMUplYiXc=,tag:Xbk3ii3DYE4yPe8cJOt+Tw==,type:comment] +#ENC[AES256_GCM,data:qbh0Mnu6wEbDaBideJQCZa74G/DqSWuoiy22zCGoKqKZ2YVQEf0QxRCO/DgOD8rdp1Neqp8u4oE=,iv:dKV76b8sT1ghlyEadeAqTjtNTXrBp9n5ZbGMGi2/GyU=,tag:1HsI5iDekTqxeYmcEpL3HA==,type:comment] +#ENC[AES256_GCM,data:kTLnLnwPVVDFKYncBbFjGmnbxmNfpPXSpKyZu5ZQx6PeVs5s6hpDa55zRXxAetyBAHsmV99ZV2q1NTDXF6Q=,iv:m/ulRFQcGl15vi2ohMwVeYBmcRtp274ROiKXPsyJkfQ=,tag:T0E5p4d/inHyuupbg7bZHQ==,type:comment] +#ENC[AES256_GCM,data:aziyEFGCGxbc+q2ma2QN4MvdhQ6bnYuZA7Cgqr6p3zGjPG3oybTWwILejJqD2lHmULXh0UN0qco=,iv:XZwwEUOAEXIUyXiiHFS/bdL91bWKIhZ5IzcXWXAR63Y=,tag:JLsDM4s2yh4aBFQtxWLhDA==,type:comment] +#ENC[AES256_GCM,data:JroBWIbSs5a7+lg/AtBNPxgbtxaztjmVzI1JXuhBmfWD45Qp+w8ePZg9PA1FYzPtEATPHso6m3tZdMTrPtv4Jj6ig/+KHOLn,iv:CbJp6MHOU4yk9OdynQTPwVgZj9Djw9IC9TE90go5RDk=,tag:NTVDg0mFeZsxfVeIFwFhPw==,type:comment] # -#ENC[AES256_GCM,data:1rm7G2bjcRWiQBcHI3v6/iVxnASJKchxrEot8YznFxoaGcEQNyQQXapZUFRR02pT9FT+vy6mCWxDyqRdyrtdlGjIL+N5R0ukNcI=,iv:dAFaoyjPikp3gOCIph6UWVrAR3ascxMKr+weOCAqO0Y=,tag:a8aDQifsAp/6+uemGMwryg==,type:comment] +#ENC[AES256_GCM,data:JDxXJ3IJVaBF+MOWe0WXBOPnee48RyjNDtflwVM2FFbLSp1h2uYf6+aRjC4w6pb/R1pl9+AzjjlQaTukjQVwXfbBVKH8SAbhOdU=,iv:K9kvgAbltLIcBo4vZ8NUiaL/Ik+x5Arl9Pj5sh3SIHo=,tag:IjneoyVD+dxd6N2PVV43ww==,type:comment] RESEND_API_KEY= -EMAIL_FROM=ENC[AES256_GCM,data:aKRFxdRPnDMs5ch7uYx2l2D5kwg=,iv:cxmNdrdXZnRjI0oDXUrz028mI7KnSUXK/pUz71NkUUA=,tag:UjKqpg6oq1t2WIdaXGHzaA==,type:str] -LEADS_EMAIL=ENC[AES256_GCM,data:BJ9r1b4E2Ck2b162f6K2P6z2rXs=,iv:NmrlbVJU7pfBf7pXc4No+69AyvqgF0j7vZEkSvYV6SE=,tag:fI3oyLbePaqi9TqkiJXZtA==,type:str] +EMAIL_FROM=ENC[AES256_GCM,data:QX6duq5wx3z98o39nRXTrPpNXwI=,iv:ikpykHOeHRay+k3B4MvHn2SOuHNGOIuvjetOt+cjTrQ=,tag:8ryM56ogWySp9RAv0/ABTg==,type:str] +LEADS_EMAIL=ENC[AES256_GCM,data:aVFgeh0Yx7W/88noeURvf8rirv8=,iv:5KjsCMAsu1Ywz4BI2JjB1KmQ6QM94U1zlNGJ3BKl7Uo=,tag:voi0kjdhz0SsOQHqtMID4Q==,type:str] RESEND_WEBHOOK_SECRET= -#ENC[AES256_GCM,data:6T9lVhn+1mcyCxBYy/dsPaGp0zy8+XQRhRCFKxlos4QOIiLBjrYuI11v2oSO80Nn6AfGNms40ewi4RpHtt7Afawb3A==,iv:f9gfD7cM3WJGdvtFQ3Svi8Cui2yadmMXfuF6IAIn/LI=,tag:3jt5lFeI0P827LqQWUGzOA==,type:comment] -#ENC[AES256_GCM,data:ja0Rgj3IVihRU618EIroO3bJg9sWFOd3Ua88HLP9yrzEZ7Ty8Havd+2vroD/TGeidqjiFmpMlXH2R3v1jLrviRbkWqBlbBO4se4G,iv:IfuAx8HcJByEcLaqegqZZZVNfO9H8LnuPzIcXHYRBDY=,tag:FumQcnMmcKHI81FjYrT8Mw==,type:comment] -#ENC[AES256_GCM,data:eHDEBuHGW7rKMPW1NM9b47rBS9BMmtmrwICbijYIVdogMvqJCMEk8zfT/lc/bnXFiamFYfJhhHDNEEOBg69ZdZ7M+mWPCRps6cXP6A==,iv:7SDIHytbnp/v6zXG0j4PbkIIzjeDVqp4BKthmTIqF1M=,tag:+fPDhe9NIe2Qmqp91KuRjw==,type:comment] -#ENC[AES256_GCM,data:VrBNRBK97VxVTcwPZItM14tr5IbWQ2jGTLh1Hca4TPHGWPo75fxUa8MFWSCUcwXIKjMm,iv:FRa+BpQFtyx5BuNt609duHng+QgMbQavvOzDpPb0Ta8=,tag:+OiUXm7Vh+xhdfQ33KEhSQ==,type:comment] +#ENC[AES256_GCM,data:1HqXvAspvNIUNpCxJwge3mEsyO0Y/EWvD3vbLxkgGqIex0hABcupX/Nzk15u8iOY5JWvvEuAO414MNt6mFvnWBDpEw==,iv:N7gCzTNJAR/ljx5gGsX+ieZctya8vQbCIb3hw49OhXg=,tag:PJKNyzhrit5VgIXl+cNlbQ==,type:comment] +#ENC[AES256_GCM,data:do6DZ/1Osc5y4xseG8Q8bDX84JBHLzvmVbHiqxP7ChlicmzYBkZ85g43BuM7V0KInFTFgvaC8xmFic+2d37Holuf1ywdAjbLkRhg,iv:qrNmhPbmFDr2ynIF5EdOLZl3FI5f68WDrxuHMkAzuuU=,tag:761gYOlEdNM+e1//1MbCHg==,type:comment] +#ENC[AES256_GCM,data:dseLIQiUEU20xJqoq2dkFho9SnKyoyQ8pStjvfxwnj8v18/ua0TH/PDx/qwIp9z5kEIvbsz5ycJesFfKPhLA5juGcdCbi5zBmZRWYg==,iv:7JUmRnohJt0H5yoJXVD3IauuJkpPHDPyY02OWHWb9Nw=,tag:KcM6JGT01Aa1kTx+U30UKQ==,type:comment] +#ENC[AES256_GCM,data:GgXo4zkhJsxXEk8F5a/+wdbvBUGN00MUAutZYLDEqqN4T1rZu92fioOLx7MEoC0b8i61,iv:f1hUBoZpmnzXNcikf/anVNdRSHNwVmmjdIcba3eiRI4=,tag:uWpF40uuiXyWqKrYGyLVng==,type:comment] PADDLE_API_KEY= PADDLE_CLIENT_TOKEN= PADDLE_WEBHOOK_SECRET= PADDLE_NOTIFICATION_SETTING_ID= -PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:Xm4mm3DBjg==,iv:6k4ZSP7GQF4LwVG0OAMBdInAfaf3hhJLjmIztNfdCWg=,tag:/LJeoFbovsvzP7GBEiLsTA==,type:str] -#ENC[AES256_GCM,data:1c0sgo2l+OssNz8Rgm8/DI5ormrHXNHKDTKx6bWmv/2CchefqxsRq1uS7uR5c86eTxfp023LGhy947i7,iv:PAEdaE/pgfy3jxuod1PJ6VcyIYOKmlKkPmudSs5xbxw=,tag:KYnQmbim+fWsnW6nlEnhGA==,type:comment] -UMAMI_API_URL=ENC[AES256_GCM,data:yj4OYkvZIDrgjgkdM3a8xyzvoGJOk75wUhAgNQ==,iv:JmQMdU9XZ5ABfvk0w3XP/WkdcR1KrgXTPVI39+drhO8=,tag:jW1MUL5Nv+z8KP9FjmYFcg==,type:str] +PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:KIGNxEaodA==,iv:SRebaYRpVJR0LpfalBZJLTE8qBGwWZB/Fx3IokQF99Q=,tag:lcC56e4FjVkCiyaq41vxcQ==,type:str] +#ENC[AES256_GCM,data:2Hs7ds2ppeRqKB7EiAAbWqlainKdZ+eTYZSvPloirT4Hlsuf+zTwtJTA6RzHNCuK4em//jhOx8R2k80I,iv:1N6CNPqYWp3z8lm5e2Vp6OlpgHdMOiD7dsEYp23nMtA=,tag:ulWP/BFFoLljLMVCrsgizw==,type:comment] +UMAMI_API_URL=ENC[AES256_GCM,data:oX/m95YB+S2ziUKoxDhsDzMhGZfxppw+w603tQ==,iv:GAj7ccF6seiCfLAh2XIjUi13RpgNA3GONMtINcG+KMw=,tag:mUfRlvaEWrw2QWFydtnbNA==,type:str] UMAMI_API_TOKEN= -#ENC[AES256_GCM,data:5nypMO9DFup4c3p0xXM=,iv:GdbOATtVOmFZKvDfv4gdNDDdq4WmB+/yk/D/xgC7Uos=,tag:TWfgxyuzlzvzxJRMF0L0cA==,type:comment] -RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:o/R0,iv:TGPynY3rpH5fL8zQvI9EAWg/LFv7earnaCbGhkkN2FE=,tag:lpnRZGaONv1TZdwJg17AIg==,type:str] -RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:2dU=,iv:6wSl6GAWAVxfVC/LB0uqhB+8gtHhkrFybASXdQseuVk=,tag:gVjomO7E837h/plD/8948Q==,type:str] -#ENC[AES256_GCM,data:7CoH3Rv284IHofVG9bruk5NYF9S4oeGqupZFQ38OpfCde1p9ks7WGicFmfrMQDNTLG1YoR3TD/u/XQ+19SoP+XTcSlhoYwo2mx19ETWnww4=,iv:BvgKjGm1YB97I8EQG74uaoDk1QraWxTVtZoiNGkdx6U=,tag:vbS882eevc0rzOA9pGtzng==,type:comment] +#ENC[AES256_GCM,data:HTG/nKNl9NMicZVt5nU=,iv:MfRqX6tzdl6SC61xjRxTrVRpTWGmmqslL/Vdy88Jtyo=,tag:NhOgm3+qJelmQaAAnITFKA==,type:comment] +RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:hy3B,iv:kouDI24Gac/S7aQMXRcl+emwE6/WU+F9egNhQ+MayrA=,tag:iZXV92kqnS0MppvW6Km5oQ==,type:str] +RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:vE8=,iv:lS6cQX3VzHeVrlYHQTXYGgib1rYI9G4XoW/f5YSjVWs=,tag:3Bn8PIktDxD7HvUTHw6mnw==,type:str] +#ENC[AES256_GCM,data:KRlMK35PPFBTe7FOkbanuskbA4oFj51Fg290lRtwyHKoJxi7fHg7cueojwCiRSJestRguwV8g9UP4MC9bKzWssdFqvfdr7XEUuA3a+WWD9I=,iv:RZhJJS6tNZHecxn/862nnl8dg8OwsVYB/R0yPxYMXgw=,tag:dqXgcU8OSyJzOPJp+7Z+cA==,type:comment] LITESTREAM_R2_BUCKET= LITESTREAM_R2_ACCESS_KEY_ID= LITESTREAM_R2_SECRET_ACCESS_KEY= LITESTREAM_R2_ENDPOINT= -#ENC[AES256_GCM,data:1BAAp6TJWo4w47vsPtE=,iv:mMZHNSUPmFYK+jLv7DJ4QqZ+wu4mm+QJJxNKxVSiXaM=,tag:rqg9uSb26e3YmL2KIGhsHg==,type:comment] -DUCKDB_PATH=ENC[AES256_GCM,data:QupAI4arYuggsPTVJ2E9BGt3Yhco,iv:nuXFZUleLIIVf5bXXKuQ174psZZjR8AgwNo677A1Kx0=,tag:+9bn+OTtBCmxZqPt0cYCzg==,type:str] -SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:okf9kDEFoPYreiBk1rEoB/6dAFUr,iv:iE+AvfUPX2jxKKBQG9iVMu4LkDcNTxLB9sHY+bmLrso=,tag:GeVRiwbScPrUSsZ1WtfN1g==,type:str] -LANDING_DIR=ENC[AES256_GCM,data:cMnL22EkJGT+jAgM,iv:nItexm3BrtgUVU7TBjHKxBIMuXnuEoAvZcEFFGTmhSI=,tag:E+uXdhTGjKRpjCtRXgcUCA==,type:str] -#ENC[AES256_GCM,data:Cwg5GJ0vOj3LfvtUHVoePytBmvCqJZbqJ5AWhdHcbVFpFVVDYo6GQARMlaLvlEgTe+4mxiR1BgDn6jTokWvynBxgVvQbNLFUINY=,iv:p0fjB0/TQRwdtSxTqFZPtAz9CFRvNjYwxqFLDw9wiko=,tag:EyUfEr0z4PtwvWSo9LbLkA==,type:comment] -REPO_DIR=ENC[AES256_GCM,data:/A==,iv:gTBC1pW2sn/8ZwU2UWErJSecIGmnl5voru0T/klGHuw=,tag:NlFer7SSK9KyAFhrzwv/DQ==,type:str] -WORKFLOWS_PATH=ENC[AES256_GCM,data:lu7JrtxM1TGqY4EErXD13hHMxr4XJg51h2uzXAAFxQ==,iv:j2azHovy7Cu6CREn+cw5hNZkknA5m++lukJskWi7OXU=,tag:d+FSm+hx06SfpD9o7pwr8Q==,type:str] +#ENC[AES256_GCM,data:4To0MRZIt3HxO7qjh4E=,iv:/caczOlTPECDF6mA1PKO8Xm4NeR1RZjgpt2Vuq+rfkQ=,tag:S/UGMqHZQX/Q20N+Ah30WQ==,type:comment] +DUCKDB_PATH=ENC[AES256_GCM,data:sql4dtOLeX1aY/kdaxAzCk47hm3t,iv:S63x40+5blcF8qYxMjqUhs2moukuy2yEQRPbUvXZSYo=,tag:lTLYjtyZNiv06o/hm6Grxg==,type:str] +SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:xE05ajjqmYggI9oz4w1GBucUn0bI,iv:/C3D+iWSNk1XJ/xclTzdJTqOHR12Gwmo1xIxH/4nyL0=,tag:eNcgrb+QvL/y1jE8mb0DHg==,type:str] +LANDING_DIR=ENC[AES256_GCM,data:PNPOE7/MV/iQ24mf,iv:lg486nzb/vlOyTHVQ0HEO4fK18IEJNnuSc/CrQwUsHk=,tag:zecZp+Xfw+dL5GtUeIOg/A==,type:str] +#ENC[AES256_GCM,data:bsiiPYvTz0LtdIgopkPEtcgmtDzZU0W6uton/sqm++5UymV33B0m47LIpdH9xQurQtmoZwMCBkAe0FCqqz62D1dAIH1Q6lzzLqg=,iv:rr7aShvtJtAnBzcbr/O0wOONpDBzwbR/Wbx/YPPsKpM=,tag:YH1wdokUuudFvagnPuT8aA==,type:comment] +REPO_DIR=ENC[AES256_GCM,data:vg==,iv:TNMZ6lrajWy6C9q89/AbRkBawBc2YaGsn2elbO8V2Wk=,tag:va77fkt8VDpPG8pZu490uw==,type:str] +WORKFLOWS_PATH=ENC[AES256_GCM,data:PehxEUMb1K3F1557BY3IqKD7sbJcoaIjnQvboBRJ1g==,iv:WfniguOksC3onCSyDlBpfKC8bE9DAt7evoeYX0K0lvE=,tag:sdRWDqkk9dtuESvfbRBfCQ==,type:str] ALERT_WEBHOOK_URL= NTFY_TOKEN= -#ENC[AES256_GCM,data:cz9SOffDAaXZRjw=,iv:7D7YZAyEk5CNlTiL1+KnbPbqfRYMxdtSk40LWErVOfg=,tag:JRHq+gaWSl2h71biklMMwg==,type:comment] +#ENC[AES256_GCM,data:BCyQYjRnTx8yW9A=,iv:4OPCP+xzRLUJrpoFewVnbZRKnZH4sAbV76SM//2k5wU=,tag:HxwEp7VFVZUN/VjPiL/+Vw==,type:comment] PROXY_URLS= -EXTRACT_WORKERS=ENC[AES256_GCM,data:sA==,iv:cZ3Ga4VPJfTlKrKeHIXadW7kZI9RqkkriKHAxGT5mqw=,tag:4kTYW7QlCwTkFWHxsl59zQ==,type:str] -RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:RV8=,iv:Jrom3R7f1NYbEL+lzypeUmifhGaayY3+uzjRuBxzoxs=,tag:pjYanguedj9RMnDGKiF7Yw==,type:str] +RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:YWM=,iv:iY5+uMazLAFdwyLT7Gr7MaF1QHBIgHuoi6nF2VbSsOA=,tag:dc6AmuJdTQ55gVe16uzs6A==,type:str] PROXY_URLS_FALLBACK= CIRCUIT_BREAKER_THRESHOLD= -#ENC[AES256_GCM,data:cPEkskKyxJENcKo0sFupC9R9qq5bQp7sYT9TdQy06FfHPoDQuWw+JuZ5Bs6pOli4NNQBrPX9fdvRwL1TsqmfajHxrSfGgr3l,iv:wF3I5Y8UzYrrJrIeokXn0P2E51eXnqVFdo8lXpKHZGk=,tag:oTPO6+UjI3ZBY5gOf9RLsQ==,type:comment] +#ENC[AES256_GCM,data:ZcX/OEbrMfKizIQYq3CYGnvzeTEX7KsmQaz2+Jj1rG5tbTy2aljQBIEkjtiwuo8NsNAD+FhIGRGVfBmKe1CAKME1MuiCbgSG,iv:4BSkeD3jZFawP09qECcqyuiWcDnCNSgbIjBATYhazq4=,tag:Ep1d2Uk700MOlWcLWaQ/ig==,type:comment] GSC_SERVICE_ACCOUNT_PATH= GSC_SITE_URL= BING_WEBMASTER_API_KEY= BING_SITE_URL= -#ENC[AES256_GCM,data:CHMw0ywPH0adeYkcwSndr3JdWyw8f9AdQXQYG62SRCg=,iv:V73eodJud6Z7U8FvuTM52s/2nqNkFz9eneYpPC8OzJM=,tag:obnBC2ERQ19xj8gI8jOosQ==,type:comment] -GEONAMES_USERNAME=ENC[AES256_GCM,data:mLFHQ76lAa2Ygtc=,iv:XWAkwQ075Ph+8qSNmD36PyvPomROENvb3SebVEuVoZg=,tag:cOyW28h6h8nhTbaBZqhmZQ==,type:str] +#ENC[AES256_GCM,data:ECsuDMQipS6YmFpSm1vqCsR2fUW2zN1Mg9VcUlw0roM=,iv:j+F6Akx2bklGMkFTux230YcZjMibA+Qp+qvgkGXl4Jw=,tag:7aO0wbmP/qB73wLgtiSJ2w==,type:comment] +GEONAMES_USERNAME=ENC[AES256_GCM,data:aSkVdLNrhiF6tlg=,iv:eemFGwDIv3EG/P3lVHGZj96MieIsr85e4xYmEIpZyfM=,tag:McpZMNOIO3FDkSebae2gOQ==,type:str] CENSUS_API_KEY= -sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB2L2dtZFVnU3BlODk3WXRQ\nOGxSaTBZSXpBc1lpcUkxSHI5dmlyYUo1K0NFClpycmREMTQxNnIyWnpHWVZOWmFx\nUTNRZDFzcFArMVAvckNBbXJMRVBudHcKLS0tIFRhWWQrNkdVTzlucG5nOVJXUVc2\nREFpWHdpV081TFZON1R2ZDlGNHVsWjQKSgmj4hrVEvrIizGmTpgj93ct1a3lUYXl\nBbuPUT8k+Hj5UmP+SoZqNS3kh2a1Nvr17K6e4PjfRAcEfM0UgNA7Jw==\n-----END AGE ENCRYPTED FILE-----\n +sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxNWNmUzVNUGdWRnE0ZFpF\nM0JQZWZ3UDdEVzlwTmIxakxOZXBkT2x2ZlNrClRtV2M3S2daSGxUZmFDSWQ2Nmh4\neU51QndFcUxlSE00RFovOVJTcDZmUUUKLS0tIDcvL3hRMDRoMWZZSXljNzA3WG5o\nMWFic21MV0krMzlIaldBTVU0ZDdlTE0K7euGQtA+9lHNws+x7TMCArZamm9att96\nL8cXoUDWe5fNI5+M1bXReqVfNwPTwZsV6j/+ZtYKybklIzWz02Ex4A==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T15:38:25Z -sops_mac=ENC[AES256_GCM,data:eZOqrSiA4f6mYUaYPS6TD6vL2ON1DsLchIjzSE7bcGpZuaTspItlkBNUR6bsiPnhZ+RCv0xfFMvWallLJe4Y/8ftlQCeq2fGLJ30ZlktgrBocXw5ZYUcJz99NjAXf5gvXoq7Bn5DPnX81ju2a2D8YIhGCZ4YzzE+ae0b44MK9zg=,iv:BG/M2ugqhacqm7dEaHmH+v3dTbu7aKojRjsQBSy/8vE=,tag:1zSJgTCzBUKVsahvdriQrg==,type:str] +sops_lastmodified=2026-02-24T21:27:19Z +sops_mac=ENC[AES256_GCM,data:KuL4wOGAEnMeXEDUKH7MXPhRFln4jTMKJAikTmkyYYxlFsxbTy3o+i5wwpfEZ7oqq/76v7XE2rhg9KMMLfnbZ2rLH9I/6kJRDtlZUUBCdKI6FCRnFbsgmzhuoXMHuFrj4B054u/C8QN2YwL7Mke+Gs9fglxvBrmhN58JAIOaxew=,iv:qu7rdFffw8IBHRP9a1tpPlRexg0b2f6lcpLu9AVbl5k=,tag:h7NbJ4bl/B8/CGVM/iW1Uw==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 diff --git a/.env.prod.sops b/.env.prod.sops index 0c1afbc..b4bf3c4 100644 --- a/.env.prod.sops +++ b/.env.prod.sops @@ -1,62 +1,61 @@ -#ENC[AES256_GCM,data:ucMhtQ==,iv:sGhlYo+lSjTp5nwwZTSgMqT64ut4T80hx5CVT+g82lY=,tag:RMnG4i6LpK1Y5Bw4gFPqjw==,type:comment] -APP_NAME=ENC[AES256_GCM,data:qruXhXS0DfkEbgM=,iv:tlyiDXsNaIj5vHBaO0dE7mVi2c/IBsLBpsEgibS8DyA=,tag:qnWKM1VcaO3JsIfHz7qo/w==,type:str] -SECRET_KEY=ENC[AES256_GCM,data:9bXIuM5FOXgp,iv:/df/NFPVVNpCtWSWdxfn5UkEeRmbtmJ1coMHvG6c414=,tag:wOtxv/ftApPR07ywoIvO4w==,type:str] -BASE_URL=ENC[AES256_GCM,data:C7l7voU3GttiIRbAZ3/dhHootWm7wQ==,iv:HyDI4yfZkBuRuvUUao//mu6nkfW/lyKDdXS472pJuK8=,tag:IXC5Xpe1YcEOCCTsnBuNDg==,type:str] -DEBUG=ENC[AES256_GCM,data:Aq7nwAo=,iv:px2NR94oiodO8FbCa+VMNptNR51sHavOmiQBQ667pVg=,tag:vHIwuM+sg2Lpvw+ZUMZVRw==,type:str] -#ENC[AES256_GCM,data:Bf/QsEOoDh1gfJIHwA==,iv:nSoT2Bnk9y2VxoL2opvUrSBImRstydELaSk2IO9NYPE=,tag:YAqhk8/qpW4UsMsjdS+RAw==,type:comment] -ADMIN_EMAILS=ENC[AES256_GCM,data:CDh9bd1OesYs,iv:C6Dwn2h6BYXc21VBFYplpfhwNj8TPevSRvkPCarL7eo=,tag:BibcNChGFh1b2jCF67Nn0A==,type:str] -#ENC[AES256_GCM,data:56tQlB/WNuue,iv:lh7+zHQuoAC9jDEgI6/g41H/9gj544nDOwAYcFWjpQs=,tag:XIVb5LJKxlSQz5qPidwJQw==,type:comment] -DATABASE_PATH=ENC[AES256_GCM,data:itpuoRhwDXFgEIg=,iv:2/UkQmyyzd3jaUgcxbMCmsflN9ubY+T/y3U9DIj0+3I=,tag:mLONYoV1YRldPS4fjmWo0A==,type:str] -#ENC[AES256_GCM,data:3AMoLpE=,iv:w3+deBCRpccgTepZ7/j36pkzUIWAmaM3KuJVpeN1qiA=,tag:V9cfSO0NOKy1WLVQ4OSQYA==,type:comment] -MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:QZ8=,iv:0KWT8VtITdDyBTM41wK4Xe29vbwbVXq5JI+Bk1C0zLk=,tag:y3DG33dd3YLhCC3RHfvJJw==,type:str] -SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:J0Y=,iv:CTSBzRzrn5EHO7eNQsZH55vPx1l00WfaQ2tQOQPbNxY=,tag:jvP5sAHd/MkGtzQ5tJc3jw==,type:str] -#ENC[AES256_GCM,data:4AzExSrkvf6tPpvTuvhD,iv:WZBGtFORarHjnBVZbyIzeeY4qFePnrEfUVFIGshiytI=,tag:grHkOyLwjcti7sHvzSdVSg==,type:comment] -RESEND_API_KEY=ENC[AES256_GCM,data:7s9gnzHzzYNy,iv:+2c0tLkLGwx15iLBzlCK+NO0XLLOoar90KRXqN/HwuQ=,tag:2gYY2jxVRYvt5MpBCWb3Jg==,type:str] -EMAIL_FROM=ENC[AES256_GCM,data:66oS65zkYQJ4BXh9clfF861fwsR09gVgRT0hwZegpnURwA==,iv:9fTh7YU+DzIP6Cj+unxuzJa0mnHsHcwNfvhI6zffjC0=,tag:ZLaPF45Ns0F3xNynXW42uw==,type:str] -LEADS_EMAIL=ENC[AES256_GCM,data:SkoYDLDQOCWLHGPFd6HQ1cCU/VHuU9jK9FSjNMHKxnCbXw==,iv:kt4tkw2BJF+Fz5oquje3OvMosqculK8Kxm6h30LWxj0=,tag:PSy1qSkCV+Z/z8dYGLV3BA==,type:str] -RESEND_WEBHOOK_SECRET=ENC[AES256_GCM,data:M0BNhZKQvfdT,iv:XIGForPlTQl3NO+hUp90D7sW+wZz4CmCWvPKHedL9MA=,tag:wbxHr0YBKKMOt8tGYG0dVQ==,type:str] -#ENC[AES256_GCM,data:XU7LmjtgBw==,iv:mTo7c7tQ4bCrUpRjfpkl/eTMv9qgVVwG2BwDJjDENng=,tag:pJlSVRi3sj1CgapPKDmcMw==,type:comment] -PADDLE_API_KEY=ENC[AES256_GCM,data:F/VGqmpu5Pja,iv:RIIaP3LsvnQ3pPQ4OpXlzz2N3vYnQEyrwFrukpHY3qs=,tag:MZJYLxddceccoAPgmW2tgA==,type:str] -PADDLE_CLIENT_TOKEN=ENC[AES256_GCM,data:sMUzWxIx835F,iv:mXc20kRmnofJf4Th2O4sjJoyTAlKHUbmbkd3tl19VkA=,tag:a3/N0KT3v1bo/o65sTuB+A==,type:str] -PADDLE_WEBHOOK_SECRET=ENC[AES256_GCM,data:AQ5VbLqVyoVr,iv:tqloKV/Vfo3P05QZDsc8p0bfnwcylsQgVjhF17AyE7U=,tag:7gpQEQVcxOu8YgN/518u/w==,type:str] -PADDLE_NOTIFICATION_SETTING_ID=ENC[AES256_GCM,data:ZBhOIvcGpGWY,iv:ByQcuU88DjAUEs4x++8+3E80vyiDWqbA6VR4bG5oZuU=,tag:TXnesiQ7K+baSOH6XlzBPA==,type:str] -PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:8Ayb93UcbMDSAg==,iv:DydRKcY3zHa30+L6g/2ooDZbtyMHy3yJ1ETRssqDkFs=,tag:1nqgd26CAeB6HQEGHQr16g==,type:str] -#ENC[AES256_GCM,data:c1C8AUiw,iv:vU1muGR79S+rr5dTQbzDEYZ5WZdpB2zaHEcEvPIgPYU=,tag:vr07Boz2lZtBscvlHGt10g==,type:comment] -UMAMI_API_URL=ENC[AES256_GCM,data:uZMJ+vSyXZkgJPRwY67HTzKCA5qA9vMp4lmCBw==,iv:/SuiWslYHayfB4eaJ4rOtqv/CFBt2GbtOe/83ZYvCxM=,tag:y0aJ/GVhZigxAHOklmLg9Q==,type:str] -UMAMI_API_TOKEN=ENC[AES256_GCM,data:aYqT3Xwytvrl,iv:MRei8ZxgohwsbDyP2xruEDdiZQaGA41IlSeb4oqr93A=,tag:0VnkdreND5HggCA0LS+sRg==,type:str] -#ENC[AES256_GCM,data:3QFdBeiJqY3UfkClvPs=,iv:2OUB0JZbwSwFOiNo4GO5fTF6WwSD++Gy9iy6EoH8VGM=,tag:mEszf5Zgvv4bUGE7Iqhf+g==,type:comment] -RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:gPCG,iv:AC3g0YDWdQRexRbod3m8UXNKzy/qn0C4LOy5kNCC3cc=,tag:pX9JwjdauMQkkcddYvk9Dg==,type:str] -RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:S2Y=,iv:PzZDhpC06Vh8qZ2/ImgIIp8ENUhEIHkyzTi3Ob+PWw0=,tag:5hpdFo0nJaSUoA63inq3jg==,type:str] -#ENC[AES256_GCM,data:DS4SYOlYWDPAzQe9TbFKC4hSfPgt,iv:HI0pGyQnnBIo6Ufb5QlT6539QVgLNd3Q1E0nVZT2YNw=,tag:VbcykF5tPDULflTGSBw9Bg==,type:comment] -LITESTREAM_R2_BUCKET=ENC[AES256_GCM,data:N/buXMd8Bmen,iv:AFvbKwo+oipuFOB4noaks//IQ92I7gvalUgLYJmp3h0=,tag:6nljZEU+01I2A6bYmHf3XA==,type:str] -LITESTREAM_R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:AJujAQyPg+7i,iv:x3sC5WeoUtnJGN01J7p+8W5q8QCbI7g8kA6+njdgsHA=,tag:WUAjGE8rTizRppwl4zR3Xw==,type:str] -LITESTREAM_R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:0uAA0+aYhDPS,iv:h3+1Tb7Z8JU+N5MsNOYy9oKfMZirFL1j8q0sCwol3bE=,tag:kUcuX79/CkFx8vXRq9LzaA==,type:str] -LITESTREAM_R2_ENDPOINT=ENC[AES256_GCM,data:LevPHY6+xQzE,iv:YKSlo15j2JOpaDFE3fodV2xzrFPma23LoRtbmrcAwKk=,tag:lRKpLojIj91NP3rQj0lujg==,type:str] -#ENC[AES256_GCM,data:hCAkcPu59qPCG+gdRZc=,iv:fsXe8zekkwZsVyKhD15gJUy+nrmlkE3y5GTSxnrsSBI=,tag:QzuZ3hjg0RP5ydk9LLqZtQ==,type:comment] -DUCKDB_PATH=ENC[AES256_GCM,data:7ovwg981vYaSxbPLhliEGiE+f/8V,iv:8+sT3EGhN3qPAGXehVXFibxGqebShA4+fpV8PMH89RY=,tag:Ed6JR6X661riZ19K1Rka4g==,type:str] -SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:gxDrCU7+J4hvzJGXPjxsvjZdJx7R,iv:ig+32NjlalAebrl24/V2L9cvtAGxCFLci/b9nBVsmrk=,tag:oJVU7BX7qMp++tGGYNxzqg==,type:str] -LANDING_DIR=ENC[AES256_GCM,data:Pz7vkNm/xxdO+kn9,iv:QleME3pY9gwgDmW6Vly1LVRMWhdkD04BXyzO3gFX3YM=,tag:QOpMsiXccD7xPsNJ/uuHjg==,type:str] -#ENC[AES256_GCM,data:AQyf+O/gIVE40EU=,iv:uJwMEr44+W4hjRUMPoIhofJfWvXxJDQStRLyjGDdw2w=,tag:Ry07aeuhb6d4CXhpcFkv6A==,type:comment] -REPO_DIR=ENC[AES256_GCM,data:yKOuO4XPNcPM14ZnKgLxpg==,iv:JJIkIHqDw7xessJZtwdxhp56UT9f7KtAHT1Hyi7Zd3A=,tag:PYMuH418TtcMuLBwrxDGsA==,type:str] -WORKFLOWS_PATH=ENC[AES256_GCM,data:tunk/tbrbvhMHONwRHV1A2l2da50L4n4CeJly3fz9g==,iv:WvxDglP2QRpgOl+RelF2a+JuPvjwusvet4xZs2s+tp8=,tag:CJuzy6A0vUbXd2e5+BXNEQ==,type:str] -ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:oHSBbRbq9tCueTxoEJiTmKVJoBIERiV75rsHhUts2Jo=,iv:qm7O2GJ3Rlp3LE1PdQXZPzlO0lropGEe5Wr+28F05Cs=,tag:5Ha/2D4G3NjiqkCE1NjrEw==,type:str] -NTFY_TOKEN=ENC[AES256_GCM,data:jIWqTX+iEzUvjJbSKJIZ7ZzZn0YbM7+4RU1W16D6j2U=,iv:R6be5ijLBLUNZjxHO2YkbgjjQDZdjD8gGRJedcFRI60=,tag:K2GL+O5TjdnIgC5l7pG0tg==,type:str] -SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:HQ==,iv:ZBSitlE8BIhM63+f3niABpM5kUmd07cg66icuVlGzc4=,tag:vxMBFQiuXmaKfeQtkx8oAQ==,type:str] -#ENC[AES256_GCM,data:5IbMzndnVQPyc8g=,iv:jrHRyj1c+AUJ5Jb8Omb3aliG0j0q850wIjA9tIqKbzI=,tag:n1if0sch0E1mBOp3qr2+zg==,type:comment] -PROXY_URLS=ENC[AES256_GCM,data:g8/iWdCPfTVt,iv:ZGvbYkfZk64Y2CC4vuGCj7TpRJGsSOP/psjz2pgKzow=,tag:GYvlKB1xn328bmpBN24W4g==,type:str] -EXTRACT_WORKERS=ENC[AES256_GCM,data:GQ==,iv:mUT35A3XBGaBox2PImgeWZyQx1AMQcPTnS4NJi1QnlI=,tag:dG/ZF2xlGsEJBBvfyG0hZg==,type:str] -RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:vi4=,iv:2oPRzyrFgIwOGjEH13P/7VQACA3xqDOz8+O7cDUnPwI=,tag:IFh7z+ZF+zjoADH0gFrPXg==,type:str] -#ENC[AES256_GCM,data:cG6Bl04wlNIwNTjP5TSykDI=,iv:qOrJNlej+elCvc7paLRL50opAD7zSeHYmIwAeFuH11s=,tag:P1dTLqa/DSMjIFvDTZZqhQ==,type:comment] -GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:wqtE1aoqcK6v,iv:QejEYIbxDcYDiFC9Wdes81cYx2NL3b9o0v0XVRh7G1w=,tag:d3u6Rf8ptOhM/4pwxtg+6A==,type:str] -GSC_SITE_URL=ENC[AES256_GCM,data:25zMmaOduZznbTQwO29huAo7xFq3Ow==,iv:ahq19rDzdx5PB/5YyHxZc7EPAeHya7loe3cdGm5ot+M=,tag:aVmC7IKkqYo8lAH1T1XDQA==,type:str] -BING_WEBMASTER_API_KEY=ENC[AES256_GCM,data:pKZGODRwoo4D,iv:tPsrHU1B10lUkVJza2hub0lGAQ15xFZVePZ8RPI1XWo=,tag:zdWhYljkhnKAKRObOfII2A==,type:str] -BING_SITE_URL=ENC[AES256_GCM,data:VyIcIcxveu+dWz3zY6h/JVLjPw7ysw==,iv:Wj1TU7r9Izfyexp4WdoByRP+l01ZWml1mNgp6ys28EQ=,tag:fbC8Q7yFGeZBdiiDBADLwQ==,type:str] -#ENC[AES256_GCM,data:CDAjB0UL7OjgRPMmu97Z5HHjE4o4idn4Pb3N8/y8KHc=,iv:5ELBfYuFLZblCNMjPpZ10UxQqp9CzAIZQt7iSQwdR54=,tag:NguW5ISLMS5xpXSWfpJaIA==,type:comment] -GEONAMES_USERNAME=ENC[AES256_GCM,data:eaQPCcEreqBdHcw=,iv:CKD9cnL2BOn/yJM5EQs0Y044bAN3d4I6bRyTqhIQkns=,tag:82w/yCiGfKsV8zhpINL29g==,type:str] +#ENC[AES256_GCM,data:8qKvOA==,iv:Xci2F8lcBpT7dmhzaDe6sfrtQi+yQD7e2CQsYLAdCnY=,tag:3duziYwr7PoGQILUuY8nBA==,type:comment] +APP_NAME=ENC[AES256_GCM,data:ldJf4P0iD9ziMVg=,iv:hiVl2whhd02yZCafzBfbxX5/EU/suvzO4kSiWho2oUo=,tag:qzrr57sTPX8HPyDVwVL4sw==,type:str] +SECRET_KEY=ENC[AES256_GCM,data:Pll2sBGZsUJ0,iv:Dz+rq47dV3TmJXIQu+P+TmKXKFYsxbkY7/5js1cPrWA=,tag:IVAValYSELDRUMisbMwbAQ==,type:str] +BASE_URL=ENC[AES256_GCM,data:50k/RqlZ1EHqGM4UkSmTaCsuJgyU4w==,iv:f8zKr2jkts4RsawA97hzICHwj9Quzgp+Dw8AhQ7GSWA=,tag:9KhNvwmoOtDyuIql7okeew==,type:str] +DEBUG=ENC[AES256_GCM,data:O0/uRF4=,iv:cZ+vyUuXjQOYYRf4l8lWS3JIWqL/w3pnlCTDPAZpB1E=,tag:OmJE9oJpzYzth0xwaMqADQ==,type:str] +#ENC[AES256_GCM,data:xmJc6WTb3yumHzvLeA==,iv:9jKuYaDgm4zR/DTswIMwsajV0s5UTe+AOX4Sue0GPCs=,tag:b/7H9js1HmFYjuQE4zJz8w==,type:comment] +ADMIN_EMAILS=ENC[AES256_GCM,data:dtEDXPbN5Y5q,iv:k1GSkJh+L4kOM8V0cGYnz0/CsmvwdVRNHk0qpBulSS0=,tag:rUpVgROj2qD8a5IufnBrJw==,type:str] +#ENC[AES256_GCM,data:S7Pdg9tcom3N,iv:OjmYk3pqbZHKPS1Y06w1y8BE7CU0y6Vx2wnio9tEhus=,tag:YAOGbrHQ+UOcdSQFWdiCDA==,type:comment] +DATABASE_PATH=ENC[AES256_GCM,data:qxQs7dG0RWMA1rs=,iv:5ZUyk02hCPQESr2vFz3mfnUhUF74LbO6YK5+HFBbxUQ=,tag:daQxiWAhzCB2cScjzjYwaA==,type:str] +#ENC[AES256_GCM,data:aWgKm9Y=,iv:8iT6GHSzWhM+fRX9PIY9wAs7lXj/ADS6eZK9BBSEdaQ=,tag:aSLsj52ybnod7Qfmx9BLQA==,type:comment] +MAGIC_LINK_EXPIRY_MINUTES=ENC[AES256_GCM,data:YSE=,iv:GYm1EWku7+OG+fCIbUHWsfYbnEQVNhlBmWBC1OCV1NA=,tag:L2kdm7tMWOO/cf+VDd+OdQ==,type:str] +SESSION_LIFETIME_DAYS=ENC[AES256_GCM,data:9Og=,iv:3nStZVZVB24aAtNrtLXZ0oIehTDyu2IzdXoMH59t+3o=,tag:+FQ4n1XeSS12zUGXt/1RKQ==,type:str] +#ENC[AES256_GCM,data:mtqp/c5zZxlcB4HrOrfi,iv:eJaN+ZnAIaNHF5iovcz0QynILq9GjqVcwoyN2ZhLmpI=,tag:WyXU7ho5T/CE609id9dOzA==,type:comment] +RESEND_API_KEY=ENC[AES256_GCM,data:U5aEnItbJ/Af,iv:7BTFimeMbPtK6ANXMr7VwO5TJ7IaRk+HAOZy+TEXMVI=,tag:sDhW5icVloSck1iafu3H0A==,type:str] +EMAIL_FROM=ENC[AES256_GCM,data:BTGeWUjG9qCBvRQr9kK5sfdzQ1CfuNgpkU/AL3Qu6GJ2ng==,iv:0XjqD8hCqleSJR2FrDajlnUul8o4GkK0f1MOP96MRkw=,tag:0PwZwxuBbUFYdiRYTlDffg==,type:str] +LEADS_EMAIL=ENC[AES256_GCM,data:jkpWqodUgR2QoB96zvE5aH/tA9Sh0nPcl75P3i43ecFILw==,iv:vNtB/9gdrTDm6vNIjnH6JShYyqmG7h9jd2uzwFwjhO8=,tag:cG5T3CwQfZO/jTYFnwJSgA==,type:str] +RESEND_WEBHOOK_SECRET=ENC[AES256_GCM,data:EQpvkWFyt8H7,iv:6QiZIDo5Ps39vf9MKkiqSJir7BH9zhoLREJ425y3FIs=,tag:kjO4dczb2E5FKfO6OVaQvw==,type:str] +#ENC[AES256_GCM,data:HW8JOkd7Hw==,iv:Qfwm2ZHT8TKANrLrRQqHnceQVUTiuzT2hSjLN8hSq5Q=,tag:hvVLmGGUBRlsm2qy9jxIvA==,type:comment] +PADDLE_API_KEY=ENC[AES256_GCM,data:d3rKjWFrFepp,iv:TGjG9VTC4pZFgnp5daE+jBrRCUJddqgRaV7rQ61llhU=,tag:KKaYPfUgLC58zhC8s3B4cQ==,type:str] +PADDLE_CLIENT_TOKEN=ENC[AES256_GCM,data:JPmeLZx16WuV,iv:52EczBQM+fvEQuzoY8Aon0RBZiLzf1vrbT9Kx+b/WUE=,tag:+abUTzCgxulamobp13PbWQ==,type:str] +PADDLE_WEBHOOK_SECRET=ENC[AES256_GCM,data:fk2PbtpwoGRB,iv:QOhOd4rKmVjMA1EUQUjSj/y/OM7I435K/s4aqShjQNw=,tag:RIfbUCXAQGmCiE9FODHgpA==,type:str] +PADDLE_NOTIFICATION_SETTING_ID=ENC[AES256_GCM,data:igRsm8JOO1SP,iv:vQgOZcMHt6YoE+U2d6tT8sILOwsTx3glHVBBatR6Sk8=,tag:1tApDyZmZNiwd3bVm0uZGw==,type:str] +PADDLE_ENVIRONMENT=ENC[AES256_GCM,data:A1qXlv+9hjdIug==,iv:nu9kRQZgGLFXXT2I5GaRzp13YgQxU2ucr9azEA4XTUQ=,tag:RBxwE2j9v/RCiEMIa+6ICw==,type:str] +#ENC[AES256_GCM,data:F3dSfSGV,iv:Zjzmp9Vb+LBkqV6xBIMF2cK8ON9crH3fHcOog4+LOpo=,tag:7V8E9ChwYY9ceTaYdg3Lbw==,type:comment] +UMAMI_API_URL=ENC[AES256_GCM,data:4nJZc/opX4rsqAxO6XxD1Es5ySMh7nUtcGt6Kg==,iv:DcmhRe1IJKS0tOFgdJQQv2A1kO5K8VVT8aW0Vq5hVlY=,tag:Sglu4nnAiLIzr+ovJ/hEKQ==,type:str] +UMAMI_API_TOKEN=ENC[AES256_GCM,data:Xv1eTWtiJ6PL,iv:9sYsI2dJaQt6gpC/ev0b2dSk48PzuojTg18xXnBSWvk=,tag:DAMDHk0b9IG7T9MpkpzAkQ==,type:str] +#ENC[AES256_GCM,data:wAePRqqMZL2oCJB812A=,iv:jaLmjd0GW2dnEQ3KgWcvAs7Q7aDwlCexM9W7pH27kss=,tag:h7/yIdc13+3pmqyCc0OPkg==,type:comment] +RATE_LIMIT_REQUESTS=ENC[AES256_GCM,data:W3Nt,iv:ycMAxrPq44S6qezQIa50rc7GDplo1YvAO6VUERGQUxA=,tag:uzendLuSVbmSPcVPEgLiqQ==,type:str] +RATE_LIMIT_WINDOW=ENC[AES256_GCM,data:r8o=,iv:m5uKo3N8mb7FWI70SgaaHSyC3CNeD8XxjEx8ENit9uI=,tag:gKXEXsIwtBr3sm7xqLRHIw==,type:str] +#ENC[AES256_GCM,data:E6JgKjxuqFdPtVEv6Xiz1kqcT4ar,iv:hL7P7/X7nEqFwnlf72QEeHhViQ17HZbsCP/M4gcTJiA=,tag:FjCPSvrBboCWjfIS/fab0A==,type:comment] +LITESTREAM_R2_BUCKET=ENC[AES256_GCM,data:opg8kQY3PKnZ,iv:lPHUBDwHgBulOyt9WWgZhBQae8t2WKYvLHSFQrG3N/w=,tag:qtyIz4fbh40aLp7ZawBJiA==,type:str] +LITESTREAM_R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:6jaEysPtRal7,iv:s5aLx7LdZ3ZLA9oL5vXXDfDDGI7gd5/CukNrMpPLJNk=,tag:Igp3bqW52raBfEeUaUvZ7A==,type:str] +LITESTREAM_R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:QfXhwh9L2rhr,iv:OaYlzTiu4onCNu5HfytYTCJa5p2QLShhO5j5Y038IOs=,tag:i13aQ2ICePyCU/Ob+EA7Nw==,type:str] +LITESTREAM_R2_ENDPOINT=ENC[AES256_GCM,data:hLneNsFmgQ6+,iv:RNefJ3QbviHPURxcK2xYJU7qWpMfWInCxYQ/4xDIwfw=,tag:FhMiHGrNcsXaSmdG4NXgfQ==,type:str] +#ENC[AES256_GCM,data:YGV2exKdGOUkblNZZos=,iv:NuabFM/gNHIzYmDMRZ2tglFYdMPVFuHFGd+AAWvvu6Q=,tag:gZRoNNEmjL9v3nC8j9YkHw==,type:comment] +DUCKDB_PATH=ENC[AES256_GCM,data:GgOEQ5B1KeQrVavhoMU/JGXcVu3H,iv:XY8JiaosxaUDv5PwizrZFWuNKMSOeuE3cfVyp51r++8=,tag:RnoDE5+7WQolFLejfRZ//w==,type:str] +SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:U2X9KmlgnWXM9uCfhHCJ03HMGCLm,iv:KHHdBTq+ct4AG7Jt4zLog/5jbDC7LvHA6KzWNTDS/Yw=,tag:m5uIG/bS4vaBooSYoYa6SA==,type:str] +LANDING_DIR=ENC[AES256_GCM,data:NkEmV8LOwEiN9Sal,iv:mQHBVT6lNoEEEVbl7a5bNN5qoF/LvTyWXQvvkv/z/B0=,tag:IgA5A1nfF91fOBdYxEN71g==,type:str] +#ENC[AES256_GCM,data:jvZYm7ceM4jtNRg=,iv:nuv65SDTZiaVukVZ40seBZevpqP8uiKCgJyQcIrY524=,tag:cq6gB3vmJzJWIXCLHaIc9g==,type:comment] +REPO_DIR=ENC[AES256_GCM,data:ae8i6PpGFaiYFA/gGIhczg==,iv:nmsIRMPJYocIO6Z2Gz4OIzAOvSpdgDYmUaIr2hInFo0=,tag:EmAYG5NujnHg8lPaO/uAnQ==,type:str] +WORKFLOWS_PATH=ENC[AES256_GCM,data:sGU4l68Pbb1thsPyG104mWXWD+zJGTIcR/TqVbPmew==,iv:+xhGkX+ep4kFEAU65ELdDrfjrl/WyuaOi35JI3OB/zM=,tag:brauZhFq8twPXmvhZKjhDQ==,type:str] +ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:4sXQk8zklruC525J279TUUatdDJQ43qweuoPhtpI82Y=,iv:1NT5IsslsZjo/0xU9OGFf717G56FnSkKSZ2L1+U3peU=,tag:bhZ67zlDiq7VaY47LFWOVw==,type:str] +NTFY_TOKEN=ENC[AES256_GCM,data:YlOxhsRJ8P1y4kk6ugWm41iyRCsM6oAWjvbU9lGcD0A=,iv:JZXOvi3wTOPV9A46c7fMiqbszNCvXkOgh9i/H1hob24=,tag:8xnPimgy7sesOAnxhaXmpg==,type:str] +SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:mg==,iv:KgqMVYj12FjOzWxtA1T0r0pqCDJ6MtHzMjE+4W/W+s4=,tag:czFaOqhHG8nqrQ8AZ8QiGw==,type:str] +#ENC[AES256_GCM,data:hzAZvCWc4RTk290=,iv:RsSI4OpAOQGcFVpfXDZ6t705yWmlO0JEWwWF5uQu9As=,tag:UPqFtA2tXiSa0vzJAv8qXg==,type:comment] +PROXY_URLS=ENC[AES256_GCM,data:L2Oobpi6Pq8m,iv:14mXi+8mLv2e20IKVL0VlxZiHW/1BmeQP4a6ns5930g=,tag:pVJasNjv6N/UApVm+KD+XA==,type:str] +RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:L2s=,iv:fV3mCKmK5fxUmIWRePELBDAPTb8JZqasVIhnAl55kYw=,tag:XL+PO6sblz/7WqHC3dtk1w==,type:str] +#ENC[AES256_GCM,data:RC+t2vqLwLjapdAUql8rQls=,iv:Kkiz3ND0g0MRAgcPJysIYMzSQS96Rq+3YP5yO7yWfIY=,tag:Y6TbZd81ihIwn+U515qd1g==,type:comment] +GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:Vki6yHk+gd4n,iv:rxzKvwrGnAkLcpS41EZ097E87NrIpNZGFfl4iXFvr40=,tag:EZkBJpCq5rSpKYVC4H3JHQ==,type:str] +GSC_SITE_URL=ENC[AES256_GCM,data:K0i1xRym+laMP6kgOMEfUyoAn2eNgQ==,iv:kyb+grzFq1e5CG/0NJRO3LkSXexOuCK07uJYApAdWsA=,tag:faljHqYjGTgrR/Zbh27/Yw==,type:str] +BING_WEBMASTER_API_KEY=ENC[AES256_GCM,data:kSQxJOpsYCuJ,iv:Kc4jJpOd64PATeBjidNHTwBr/bNnCeqsTrUqAAYM5Vs=,tag:4jBxqgpyomzMLwiC9XpfVQ==,type:str] +BING_SITE_URL=ENC[AES256_GCM,data:M33VI97DyxH8gRR3ZUXoXg4QrEv5og==,iv:GxZtwfbBVihUbp6XNQKzAalhO1GfQF1l1j1MeEIBCFQ=,tag:9njlBp4v684PeFl3HebyIg==,type:str] +#ENC[AES256_GCM,data:OTUMKNkRW0zrupNppXthwE1oieILhNjM+cjx5hFn69g=,iv:48ID2qtSe9ggD2X+G/iUqp3v2uwEc7fZw8lxHIvVXmk=,tag:okBn0Npk1K9dDOFWA/AB1A==,type:comment] +GEONAMES_USERNAME=ENC[AES256_GCM,data:UXd/S2TzXPiGmLY=,iv:OMURM5E6SFEsaqroUlH76DEnr7C/ujNk9UQnbWT0hK4=,tag:VsjjS12QDbudiEhdAQ/OCQ==,type:str] CENSUS_API_KEY= -sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBWdVBTa1owM0JrbFJXcmg2\nZVNzRHJuK0MzY3gwbVdGZW1oWFh4VU84a3hjCmZWQnAybjVFSituRVE2eWt3QkpI\ndzAzMEpXeld6UEFraEZLUjJGSEordGMKLS0tIGtEbGd3ak82UnJiRjFDQXJvYkVO\nL0xYVW5Ya0U0QUYzckI2MWZyLzU0OUkKK7Q+mN6ew8pdpN7Z3zMQhWm/Lgkzu8Hi\n8i74oE6TfyKFQkhaCu4jOcBfYWTytMe38ZYLI0ApS5AeIsr/ZVtWGA==\n-----END AGE ENCRYPTED FILE-----\n +sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqck9GdHVkUmIzNnlvMW5k\nVkNtazZ0ZytzZ25vMU5SckdFLzcrTFNYOVZZCmNjbU9yV0lTRlB5cEpMVC81QTdu\nS2ZDc0ZkNnRBNFhFMEN1bjY3YVhwZEEKLS0tIGE5TEdYenVOV1IwcE0wYnlKNElF\ncXV1K0xuczZzZ3JnL1lrSC9QWHIwNGsKfW4ARke6Cj83BpQc8weayL3v8SVgQ+Fp\n99aVWp103O1fumksR1w4u0X7fSNRrgAmpY/yyZuEvsoIY8ELFVcqgQ==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T15:04:59Z -sops_mac=ENC[AES256_GCM,data:BsSyxkjwN8SNSC8fE3iNZPYIAPgrnbMYLGN/waGNkzH11VfcGAw5vQaPR6Il1PBMrx8gJ9daxRbvTW/DkY+G8VTpF3HWB3IoJPRewTLNUdkDSCxwhGuzfTzT1f7FKeNNVxsCEhJZGMYbDiYzcnbU1owgcHivBfCYl0DF0VM8cS0=,iv:aV5Af5nrhaI9NE2ouGnr20s6mRD9VPHLNcdfola9Ybw=,tag:YD08kkuFLksllK4Q9cfYfQ==,type:str] +sops_lastmodified=2026-02-24T21:29:26Z +sops_mac=ENC[AES256_GCM,data:zYvusl8/pvL6FwXAtsKi4BhuiDt8KaZPNHXkw0ywIOgNFG5mvcQozcDj42+TIo+Yuum1o7WHqshKc70w0Mq4fskq3TsjVnjWgw7xYRr5s3ylN5ZknbbCoMP4cp6YrkNCe/8hR64miguYqqEQlf9NdgL52uamF5lV5irI/EtLouw=,iv:RcL2b8ccnMxKhXxAocTG9G6gv2BkTb++MUpkFK8MfbM=,tag:+0avRrQjNOHDUeAV1dLW3g==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py index a80636a..8feb5c4 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py @@ -10,7 +10,13 @@ API notes (discovered 2026-02): - `size=100` is the maximum effective page size - ~14K venues globally as of Feb 2026 -Rate: 1 req / 2 s (see docs/data-sources-inventory.md §1.2). +Parallel mode: when PROXY_URLS is set, fires batch_size = len(proxy_urls) +pages concurrently. Each page gets its own fresh session + proxy. Pages beyond +the last one return empty lists (safe — just triggers the done condition). +Without proxies, falls back to single-threaded with THROTTLE_SECONDS between +pages. + +Rate: 1 req / 2 s per IP (see docs/data-sources-inventory.md §1.2). Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.json.gz """ @@ -18,11 +24,13 @@ Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.json.gz import json import sqlite3 import time +from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path import niquests -from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging +from ._shared import HTTP_TIMEOUT_SECONDS, USER_AGENT, run_extractor, setup_logging +from .proxy import load_proxy_urls, make_round_robin_cycler from .utils import landing_path, write_gzip_atomic logger = setup_logging("padelnomics.extract.playtomic_tenants") @@ -35,6 +43,30 @@ PAGE_SIZE = 100 MAX_PAGES = 500 # safety bound — ~50K venues max, well above current ~14K +def _fetch_one_page(proxy_url: str | None, page: int) -> tuple[int, list[dict]]: + """Fetch a single page using a fresh session with the given proxy. + + Returns (page, tenants_list). Raises on HTTP error. + """ + s = niquests.Session() + s.headers["User-Agent"] = USER_AGENT + if proxy_url: + s.proxies = {"http": proxy_url, "https": proxy_url} + params = {"sport_ids": "PADEL", "size": PAGE_SIZE, "page": page} + resp = s.get(PLAYTOMIC_TENANTS_URL, params=params, timeout=HTTP_TIMEOUT_SECONDS) + resp.raise_for_status() + tenants = resp.json() + assert isinstance(tenants, list), f"Expected list from Playtomic API, got {type(tenants)}" + return (page, tenants) + + +def _fetch_pages_parallel(pages: list[int], next_proxy) -> list[tuple[int, list[dict]]]: + """Fetch multiple pages concurrently. Returns [(page_num, tenants_list), ...].""" + with ThreadPoolExecutor(max_workers=len(pages)) as pool: + futures = [pool.submit(_fetch_one_page, next_proxy(), p) for p in pages] + return [f.result() for f in as_completed(futures)] + + def extract( landing_dir: Path, year_month: str, @@ -46,43 +78,65 @@ def extract( dest_dir = landing_path(landing_dir, "playtomic", year, month) dest = dest_dir / "tenants.json.gz" + proxy_urls = load_proxy_urls() + next_proxy = make_round_robin_cycler(proxy_urls) if proxy_urls else None + batch_size = len(proxy_urls) if proxy_urls else 1 + + if next_proxy: + logger.info("Parallel mode: %d pages per batch (%d proxies)", batch_size, len(proxy_urls)) + else: + logger.info("Serial mode: 1 page at a time (no proxies)") + all_tenants: list[dict] = [] seen_ids: set[str] = set() + page = 0 + done = False - for page in range(MAX_PAGES): - params = { - "sport_ids": "PADEL", - "size": PAGE_SIZE, - "page": page, - } + while not done and page < MAX_PAGES: + batch_end = min(page + batch_size, MAX_PAGES) + pages_to_fetch = list(range(page, batch_end)) - logger.info("GET page=%d (total so far: %d)", page, len(all_tenants)) + if next_proxy and len(pages_to_fetch) > 1: + logger.info( + "Fetching pages %d-%d in parallel (%d workers, total so far: %d)", + page, batch_end - 1, len(pages_to_fetch), len(all_tenants), + ) + results = _fetch_pages_parallel(pages_to_fetch, next_proxy) + else: + # Serial: reuse the shared session, throttle between pages + page_num = pages_to_fetch[0] + logger.info("GET page=%d (total so far: %d)", page_num, len(all_tenants)) + params = {"sport_ids": "PADEL", "size": PAGE_SIZE, "page": page_num} + resp = session.get(PLAYTOMIC_TENANTS_URL, params=params, timeout=HTTP_TIMEOUT_SECONDS) + resp.raise_for_status() + tenants = resp.json() + assert isinstance(tenants, list), ( + f"Expected list from Playtomic API, got {type(tenants)}" + ) + results = [(page_num, tenants)] - resp = session.get(PLAYTOMIC_TENANTS_URL, params=params, timeout=HTTP_TIMEOUT_SECONDS) - resp.raise_for_status() + # Process pages in order so the done-detection on < PAGE_SIZE is deterministic + for p, tenants in sorted(results): + new_count = 0 + for tenant in tenants: + tid = tenant.get("tenant_id") or tenant.get("id") + if tid and tid not in seen_ids: + seen_ids.add(tid) + all_tenants.append(tenant) + new_count += 1 - tenants = resp.json() - assert isinstance(tenants, list), ( - f"Expected list from Playtomic API, got {type(tenants)}" - ) + logger.info( + "page=%d got=%d new=%d total=%d", p, len(tenants), new_count, len(all_tenants), + ) - new_count = 0 - for tenant in tenants: - tid = tenant.get("tenant_id") or tenant.get("id") - if tid and tid not in seen_ids: - seen_ids.add(tid) - all_tenants.append(tenant) - new_count += 1 + # Last page — fewer than PAGE_SIZE results means we've exhausted the list + if len(tenants) < PAGE_SIZE: + done = True + break - logger.info( - "page=%d got=%d new=%d total=%d", page, len(tenants), new_count, len(all_tenants) - ) - - # Last page — fewer than PAGE_SIZE results means we've exhausted the list - if len(tenants) < PAGE_SIZE: - break - - time.sleep(THROTTLE_SECONDS) + page = batch_end + if not next_proxy: + time.sleep(THROTTLE_SECONDS) payload = json.dumps({"tenants": all_tenants, "count": len(all_tenants)}).encode() bytes_written = write_gzip_atomic(dest, payload) From 1ef22770aa5637ee37c1524d86301183f4e54da8 Mon Sep 17 00:00:00 2001 From: Deeman Date: Tue, 24 Feb 2026 22:31:19 +0100 Subject: [PATCH 70/98] docs: update CHANGELOG for extraction performance improvements Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8184e00..3006338 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). queries, geometry columns). - **SOPS secrets** — `GEONAMES_USERNAME=padelnomics` and `CENSUS_API_KEY` added to both `.env.dev.sops` and `.env.prod.sops`. +- **Crash-safe partial JSONL** — `utils.load_partial_results()` and `flush_partial_batch()` + provide a generic opt-in mechanism for incremental progress flushing during long extractions. + Any extractor processing items one-by-one can flush every N records and resume from a + `.partial.jsonl` sidecar file after a crash. - **Methodology page updated** — `/en/market-score` now documents both scores with: Two Scores intro section, component cards for each score (4 Marktreife + 5 Marktpotenzial), score band interpretations, expanded FAQ (7 entries). Section headings use the padelnomics @@ -42,6 +46,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). First "padelnomics Market Score" mention in each article template now links to the methodology page (hub-and-spoke internal linking). +### Changed +- **`EXTRACT_WORKERS` env var removed** — worker count is now derived from `PROXY_URLS` length + (one worker per proxy). No proxies → single-threaded. No manual tuning needed. +- **Playtomic tenants extractor** — parallel batch page fetching when proxies are configured. + Each page in a batch fires concurrently using its own session + proxy. Expected speedup: + ~2.5 min → ~15 s with 10 Webshare datacenter proxies. +- **Playtomic availability extractor** — three performance changes: + 1. No per-request `time.sleep()` on success when a proxy is active (throttle only when + running direct). Retry/backoff sleeps for 429 and 5xx responses are unchanged. + 2. Worker count auto-detected from proxy count (drops `EXTRACT_WORKERS`). + 3. True crash resumption via `.partial.jsonl` sidecar: progress flushed every 50 venues, + resume skips already-fetched venues and merges prior results into the final file. + ### Fixed - **`datetime.utcnow()` deprecation warnings** — replaced all 94 occurrences across 22 files (source + tests) with `utcnow()` / `utcnow_iso()` helpers From c84a5ffdd16ac7545df595db3424e6134f2e198d Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:31:14 +0100 Subject: [PATCH 71/98] =?UTF-8?q?feat(db):=20migration=200022=20=E2=80=94?= =?UTF-8?q?=20add=20response=20tracking=20to=20lead=5Fforwards?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds status_updated_at, supplier_note, and cta_token columns to the lead_forwards table. cta_token gets a unique partial index for fast one-click email CTA lookups. Co-Authored-By: Claude Sonnet 4.6 --- .../versions/0022_lead_forward_tracking.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 web/src/padelnomics/migrations/versions/0022_lead_forward_tracking.py diff --git a/web/src/padelnomics/migrations/versions/0022_lead_forward_tracking.py b/web/src/padelnomics/migrations/versions/0022_lead_forward_tracking.py new file mode 100644 index 0000000..1ac7abe --- /dev/null +++ b/web/src/padelnomics/migrations/versions/0022_lead_forward_tracking.py @@ -0,0 +1,12 @@ +"""Migration 0022: Add response tracking columns to lead_forwards.""" + +FORWARD_STATUSES = ["sent", "viewed", "contacted", "quoted", "won", "lost", "no_response"] + + +def up(conn) -> None: + conn.execute("ALTER TABLE lead_forwards ADD COLUMN status_updated_at TEXT") + conn.execute("ALTER TABLE lead_forwards ADD COLUMN supplier_note TEXT") + conn.execute("ALTER TABLE lead_forwards ADD COLUMN cta_token TEXT") + conn.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_lead_forwards_cta_token ON lead_forwards(cta_token) WHERE cta_token IS NOT NULL" + ) From 7af612504b97dcd79e5e9f85cca29e82beeb9e1c Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:31:23 +0100 Subject: [PATCH 72/98] feat(marketplace): lead matching notifications + weekly digest + CTA tracking - notify_matching_suppliers task: on lead verification, finds growth/pro suppliers whose service_area matches the lead country and sends an instant alert email (LIMIT 20 suppliers per lead) - send_weekly_lead_digest task: every Monday 08:00 UTC, sends paid suppliers a table of new matching leads from the past 7 days they haven't seen yet (LIMIT 5 per supplier) - One-click CTA token: forward emails now include a "Mark as contacted" footer link; clicking sets forward status to 'contacted' immediately - cta_token stored on lead_forwards after email send - Supplier lead_respond endpoint: HTMX status update for forwarded leads (sent / viewed / contacted / quoted / won / lost / no_response) - Supplier lead_cta_contacted endpoint: handles one-click email CTA, redirects to dashboard leads tab - leads/routes.py: enqueue notify_matching_suppliers on quote verification Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/leads/routes.py | 1 + web/src/padelnomics/suppliers/routes.py | 51 +++++++ web/src/padelnomics/worker.py | 176 +++++++++++++++++++++++- 3 files changed, 225 insertions(+), 3 deletions(-) diff --git a/web/src/padelnomics/leads/routes.py b/web/src/padelnomics/leads/routes.py index 265fc74..fb8cb04 100644 --- a/web/src/padelnomics/leads/routes.py +++ b/web/src/padelnomics/leads/routes.py @@ -556,6 +556,7 @@ async def verify_quote(): from ..worker import enqueue await enqueue("send_welcome", {"email": contact_email, "lang": g.get("lang", "en")}) + await enqueue("notify_matching_suppliers", {"lead_id": lead["id"], "lang": g.get("lang", "en")}) return await render_template( "quote_submitted.html", diff --git a/web/src/padelnomics/suppliers/routes.py b/web/src/padelnomics/suppliers/routes.py index 7846887..0ae31b2 100644 --- a/web/src/padelnomics/suppliers/routes.py +++ b/web/src/padelnomics/suppliers/routes.py @@ -646,6 +646,57 @@ async def unlock_lead(token: str): ) +FORWARD_STATUSES = ["sent", "viewed", "contacted", "quoted", "won", "lost", "no_response"] + + +@bp.route("/leads//respond", methods=["POST"]) +@_lead_tier_required +@csrf_protect +async def lead_respond(token: str): + """Update response status on a forwarded lead. HTMX or full-page.""" + supplier = g.supplier + form = await request.form + new_status = form.get("status", "") + note = form.get("note", "").strip() + + if new_status not in FORWARD_STATUSES: + return "Invalid status", 422 + + lead_row = await fetch_one( + "SELECT id FROM lead_requests WHERE token = ?", (token,) + ) + if not lead_row: + return "Lead not found", 404 + + from ..core import utcnow_iso + await execute( + """UPDATE lead_forwards + SET status = ?, supplier_note = ?, status_updated_at = ? + WHERE lead_id = ? AND supplier_id = ?""", + (new_status, note or None, utcnow_iso(), lead_row["id"], supplier["id"]), + ) + return "", 204 + + +@bp.route("/leads/cta/") +async def lead_cta_contacted(cta_token: str): + """One-click CTA from forward email: mark as contacted, redirect to dashboard.""" + row = await fetch_one( + "SELECT id, lead_id, supplier_id, status FROM lead_forwards WHERE cta_token = ?", + (cta_token,), + ) + if not row: + return redirect(url_for("suppliers.dashboard")) + + if row["status"] == "sent": + from ..core import utcnow_iso + await execute( + "UPDATE lead_forwards SET status = 'contacted', status_updated_at = ? WHERE id = ?", + (utcnow_iso(), row["id"]), + ) + return redirect(url_for("suppliers.dashboard") + "?tab=leads") + + # ============================================================================= # Supplier Dashboard # ============================================================================= diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index f30fd17..b543d10 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -5,6 +5,7 @@ Background task worker - SQLite-based queue (no Redis needed). import asyncio import json import logging +import secrets import traceback from datetime import datetime, timedelta @@ -498,6 +499,15 @@ async def handle_send_lead_forward_email(payload: dict) -> None: logger.warning("No email for supplier %s, skipping lead forward", supplier_id) return + # Generate one-click "I've contacted this lead" CTA token + cta_token = secrets.token_urlsafe(24) + cta_url = f"{config.BASE_URL}/suppliers/leads/cta/{cta_token}" + body += ( + f'

    ' + f'' + f'✓ Mark as contacted

    ' + ) + await send_email( to=to_email, subject=subject, @@ -506,11 +516,11 @@ async def handle_send_lead_forward_email(payload: dict) -> None: email_type="lead_forward", ) - # Update email_sent_at on lead_forward + # Update email_sent_at and store cta_token on lead_forward now = utcnow_iso() await execute( - "UPDATE lead_forwards SET email_sent_at = ? WHERE lead_id = ? AND supplier_id = ?", - (now, lead_id, supplier_id), + "UPDATE lead_forwards SET email_sent_at = ?, cta_token = ? WHERE lead_id = ? AND supplier_id = ?", + (now, cta_token, lead_id, supplier_id), ) @@ -550,6 +560,159 @@ async def handle_send_lead_matched_notification(payload: dict) -> None: ) +@task("notify_matching_suppliers") +async def handle_notify_matching_suppliers(payload: dict) -> None: + """Notify growth/pro suppliers whose service_area matches a newly verified lead.""" + lead_id = payload["lead_id"] + lang = payload.get("lang", "en") + + lead = await fetch_one( + "SELECT * FROM lead_requests WHERE id = ? AND status = 'new' AND verified_at IS NOT NULL", + (lead_id,), + ) + if not lead or not lead.get("country"): + return + + country = lead["country"] + heat = (lead["heat_score"] or "cool").upper() + + # Find matching suppliers: paid tier, have credits, service_area includes lead country + # service_area is comma-separated country codes (e.g. "DE,AT,CH") + matching = await fetch_all( + """SELECT id, name, contact_email, contact, tier + FROM suppliers + WHERE tier IN ('growth', 'pro') + AND credit_balance > 0 + AND (service_area = ? OR service_area LIKE ? OR service_area LIKE ? OR service_area LIKE ?) + LIMIT 20""", + (country, f"{country},%", f"%,{country}", f"%,{country},%"), + ) + if not matching: + return + + courts = lead["court_count"] or "?" + timeline = lead["timeline"] or "" + facility_type = lead["facility_type"] or "padel" + + for supplier in matching: + to_email = supplier.get("contact_email") or supplier.get("contact") or "" + if not to_email: + continue + + body = ( + f'

    ' + f'New [{heat}] lead in {country}

    ' + f'
    ' + f'

    A new project brief has been submitted that matches your service area.

    ' + f'' + f'' + f'' + f'' + f'' + f'' + f'' + f'' + f'' + f'
    Facility{facility_type}
    Courts{courts}
    Country{country}
    Timeline{timeline or "-"}
    ' + f'

    ' + f'Contact details are available after unlocking. Credits required: {lead.get("credit_cost", "?")}.

    ' + f'{_email_button(f"{config.BASE_URL}/suppliers/leads", "View lead feed")}' + ) + + await send_email( + to=to_email, + subject=f"[{heat}] New {facility_type} project in {country} — {courts} courts", + html=_email_wrap(body, lang, preheader=f"New matching lead in {country}"), + from_addr=EMAIL_ADDRESSES["leads"], + email_type="lead_match_notify", + ) + + +@task("send_weekly_lead_digest") +async def handle_send_weekly_lead_digest(payload: dict) -> None: + """Weekly digest to active suppliers: new matching leads in their area.""" + # Find paid suppliers with credits + active_suppliers = await fetch_all( + "SELECT id, name, service_area, contact_email, contact FROM suppliers WHERE tier IN ('growth','pro') AND credit_balance > 0" + ) + for supplier in active_suppliers: + to_email = supplier.get("contact_email") or supplier.get("contact") or "" + if not to_email: + continue + + service_area_raw = (supplier.get("service_area") or "").strip() + if not service_area_raw: + continue + countries = [c.strip() for c in service_area_raw.split(",") if c.strip()] + if not countries: + continue + + placeholders = ",".join("?" * len(countries)) + new_leads = await fetch_all( + f"""SELECT id, heat_score, country, court_count, facility_type, timeline, credit_cost, created_at + FROM lead_requests + WHERE lead_type = 'quote' AND status = 'new' AND verified_at IS NOT NULL + AND country IN ({placeholders}) + AND created_at >= datetime('now', '-7 days') + AND NOT EXISTS ( + SELECT 1 FROM lead_forwards WHERE lead_id = lead_requests.id AND supplier_id = ? + ) + ORDER BY + CASE heat_score WHEN 'hot' THEN 0 WHEN 'warm' THEN 1 ELSE 2 END, + created_at DESC + LIMIT 5""", + tuple(countries) + (supplier["id"],), + ) + if not new_leads: + continue + + lead_rows_html = "" + for ld in new_leads: + heat = (ld["heat_score"] or "cool").upper() + heat_colors = {"HOT": "#DC2626", "WARM": "#EA580C", "COOL": "#2563EB"} + hc = heat_colors.get(heat, "#2563EB") + badge = ( + f'{heat}' + ) + lead_rows_html += ( + f'' + f'' + f'{badge} {ld["facility_type"] or "Padel"}, {ld["court_count"] or "?"} courts' + f'{ld["country"] or "-"}' + f'{ld["timeline"] or "-"}' + f'' + ) + + body = ( + f'

    ' + f'Your weekly lead digest — {len(new_leads)} new {"lead" if len(new_leads) == 1 else "leads"}

    ' + f'
    ' + f'

    New matching leads in your service area this week:

    ' + f'' + f'' + f'' + f'' + f'' + f'' + f'{lead_rows_html}' + f'
    ProjectCountryTimeline
    ' + f'{_email_button(f"{config.BASE_URL}/suppliers/leads", "Unlock leads →")}' + ) + + area_summary = ", ".join(countries[:3]) + if len(countries) > 3: + area_summary += f" +{len(countries) - 3}" + + await send_email( + to=to_email, + subject=f"{len(new_leads)} new padel {'lead' if len(new_leads) == 1 else 'leads'} in {area_summary}", + html=_email_wrap(body, "en", preheader=f"{len(new_leads)} new leads matching your service area"), + from_addr=EMAIL_ADDRESSES["leads"], + email_type="weekly_digest", + ) + + @task("send_supplier_enquiry_email") async def handle_send_supplier_enquiry_email(payload: dict) -> None: """Relay a directory enquiry form submission to the supplier's contact email.""" @@ -823,6 +986,7 @@ async def run_scheduler() -> None: last_credit_refill = None last_seo_sync_date = None + last_weekly_digest = None while True: try: @@ -850,6 +1014,12 @@ async def run_scheduler() -> None: last_seo_sync_date = today_date scheduler_logger.info("Queued SEO metric syncs for %s", today_date) + # Weekly lead digest — every Monday after 8am UTC + if today.weekday() == 0 and today.hour >= 8 and last_weekly_digest != today_date: + await enqueue("send_weekly_lead_digest", {}) + last_weekly_digest = today_date + scheduler_logger.info("Queued weekly lead digest for %s", today_date) + await asyncio.sleep(3600) # 1 hour except Exception as e: From 5867c611f8fffbdee738baac1b1254ee0d5e7ae7 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:31:44 +0100 Subject: [PATCH 73/98] feat(admin): marketplace dashboard + HTMX lead management improvements Admin marketplace (/admin/marketplace): - Lead funnel cards: total / verified-new / unlocked / won / conversion rate - Credit economy: issued / consumed / outstanding / 30-day burn - Supplier engagement: active count / avg unlocks / response rate - Feature flag toggles (lead_unlock, supplier_signup) with next= redirect - Live activity stream (HTMX partial): last 50 lead / unlock / credit events Admin leads list (/admin/leads): - Summary cards: total / new+unverified / hot pipeline credits / forward rate - Search filter (name, email, company) with HTMX live update - Period pills: Today / 7d / 30d / All - get_leads() now returns (rows, total_count); get_lead_stats() includes _total, _new_unverified, _hot_pipeline, _forward_rate Admin lead detail (/admin/leads/): - Inline HTMX status change returning updated status badge partial - Inline HTMX forward form returning updated forward history partial (replaces full-page reload on every status/forward action) - Forward history table shows supplier, status, credit_cost, sent_at Quote form extended with optional fields: - build_context, glass_type, lighting_type, location_status, financing_status, services_needed, additional_info (captured in lead detail view but not required for heat scoring) Sidebar nav: "Marketplace" tab added between Leads and Suppliers Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 276 ++++++++++++++++-- .../admin/templates/admin/base_admin.html | 6 +- .../admin/templates/admin/lead_detail.html | 168 +++++------ .../admin/templates/admin/lead_form.html | 116 ++++++-- .../admin/templates/admin/leads.html | 88 +++++- .../admin/templates/admin/marketplace.html | 142 +++++++++ .../admin/partials/lead_forward_history.html | 40 +++ .../admin/partials/lead_results.html | 72 ++++- .../admin/partials/lead_status_badge.html | 21 ++ .../admin/partials/marketplace_activity.html | 32 ++ 10 files changed, 806 insertions(+), 155 deletions(-) create mode 100644 web/src/padelnomics/admin/templates/admin/marketplace.html create mode 100644 web/src/padelnomics/admin/templates/admin/partials/lead_forward_history.html create mode 100644 web/src/padelnomics/admin/templates/admin/partials/lead_status_badge.html create mode 100644 web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 464d983..64e103c 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -380,9 +380,10 @@ HEAT_OPTIONS = ["hot", "warm", "cool"] async def get_leads( status: str = None, heat: str = None, country: str = None, + search: str = None, days: int = None, page: int = 1, per_page: int = 50, -) -> list[dict]: - """Get leads with optional filters.""" +) -> tuple[list[dict], int]: + """Get leads with optional filters. Returns (leads, total_count).""" wheres = ["lead_type = 'quote'"] params: list = [] @@ -395,16 +396,27 @@ async def get_leads( if country: wheres.append("country = ?") params.append(country) + if search: + term = f"%{search}%" + wheres.append("(contact_name LIKE ? OR contact_email LIKE ? OR contact_company LIKE ?)") + params.extend([term, term, term]) + if days: + wheres.append("created_at >= datetime('now', ?)") + params.append(f"-{days} days") where = " AND ".join(wheres) - offset = (page - 1) * per_page - params.extend([per_page, offset]) + count_row = await fetch_one( + f"SELECT COUNT(*) as cnt FROM lead_requests WHERE {where}", tuple(params) + ) + total = count_row["cnt"] if count_row else 0 - return await fetch_all( + offset = (page - 1) * per_page + rows = await fetch_all( f"""SELECT * FROM lead_requests WHERE {where} ORDER BY created_at DESC LIMIT ? OFFSET ?""", - tuple(params), + tuple(params) + (per_page, offset), ) + return rows, total async def get_lead_detail(lead_id: int) -> dict | None: @@ -426,11 +438,32 @@ async def get_lead_detail(lead_id: int) -> dict | None: async def get_lead_stats() -> dict: - """Get lead conversion funnel counts.""" + """Get lead conversion funnel counts + summary card metrics.""" rows = await fetch_all( "SELECT status, COUNT(*) as cnt FROM lead_requests WHERE lead_type = 'quote' GROUP BY status" ) - return {r["status"]: r["cnt"] for r in rows} + by_status = {r["status"]: r["cnt"] for r in rows} + + # Summary card aggregates + agg = await fetch_one( + """SELECT + COUNT(*) as total, + SUM(CASE WHEN status IN ('new', 'pending_verification') THEN 1 ELSE 0 END) as new_unverified, + SUM(CASE WHEN heat_score = 'hot' AND status = 'new' THEN credit_cost ELSE 0 END) as hot_pipeline, + SUM(CASE WHEN status = 'forwarded' THEN 1 ELSE 0 END) as forwarded + FROM lead_requests WHERE lead_type = 'quote'""" + ) + total = agg["total"] or 0 + forwarded = agg["forwarded"] or 0 + forward_rate = round((forwarded / total) * 100) if total > 0 else 0 + + return { + **by_status, + "_total": total, + "_new_unverified": agg["new_unverified"] or 0, + "_hot_pipeline": agg["hot_pipeline"] or 0, + "_forward_rate": forward_rate, + } @bp.route("/leads") @@ -440,10 +473,15 @@ async def leads(): status = request.args.get("status", "") heat = request.args.get("heat", "") country = request.args.get("country", "") + search = request.args.get("search", "") + days_str = request.args.get("days", "") + days = int(days_str) if days_str.isdigit() else None page = max(1, int(request.args.get("page", "1") or "1")) + per_page = 50 - lead_list = await get_leads( - status=status or None, heat=heat or None, country=country or None, page=page, + lead_list, total = await get_leads( + status=status or None, heat=heat or None, country=country or None, + search=search or None, days=days, page=page, per_page=per_page, ) lead_stats = await get_lead_stats() @@ -461,7 +499,11 @@ async def leads(): current_status=status, current_heat=heat, current_country=country, + current_search=search, + current_days=days_str, page=page, + per_page=per_page, + total=total, ) @@ -472,12 +514,28 @@ async def lead_results(): status = request.args.get("status", "") heat = request.args.get("heat", "") country = request.args.get("country", "") + search = request.args.get("search", "") + days_str = request.args.get("days", "") + days = int(days_str) if days_str.isdigit() else None page = max(1, int(request.args.get("page", "1") or "1")) + per_page = 50 - lead_list = await get_leads( - status=status or None, heat=heat or None, country=country or None, page=page, + lead_list, total = await get_leads( + status=status or None, heat=heat or None, country=country or None, + search=search or None, days=days, page=page, per_page=per_page, + ) + return await render_template( + "admin/partials/lead_results.html", + leads=lead_list, + page=page, + per_page=per_page, + total=total, + current_status=status, + current_heat=heat, + current_country=country, + current_search=search, + current_days=days_str, ) - return await render_template("admin/partials/lead_results.html", leads=lead_list) @bp.route("/leads/") @@ -528,11 +586,18 @@ async def lead_new(): contact_name = form.get("contact_name", "").strip() contact_email = form.get("contact_email", "").strip() facility_type = form.get("facility_type", "indoor") + build_context = form.get("build_context", "") + glass_type = form.get("glass_type", "") + lighting_type = form.get("lighting_type", "") court_count = int(form.get("court_count", 6) or 6) country = form.get("country", "") city = form.get("city", "").strip() + location_status = form.get("location_status", "") timeline = form.get("timeline", "") budget_estimate = int(form.get("budget_estimate", 0) or 0) + financing_status = form.get("financing_status", "") + services_needed = form.get("services_needed", "").strip() + additional_info = form.get("additional_info", "").strip() stakeholder_type = form.get("stakeholder_type", "") heat_score = form.get("heat_score", "warm") status = form.get("status", "new") @@ -550,14 +615,18 @@ async def lead_new(): lead_id = await execute( """INSERT INTO lead_requests - (lead_type, facility_type, court_count, country, location, timeline, - budget_estimate, stakeholder_type, heat_score, status, + (lead_type, facility_type, build_context, glass_type, lighting_type, + court_count, country, location, location_status, timeline, + budget_estimate, financing_status, services_needed, additional_info, + stakeholder_type, heat_score, status, contact_name, contact_email, contact_phone, contact_company, credit_cost, verified_at, created_at) - VALUES ('quote', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", + VALUES ('quote', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", ( - facility_type, court_count, country, city, timeline, - budget_estimate, stakeholder_type, heat_score, status, + facility_type, build_context, glass_type, lighting_type, + court_count, country, city, location_status, timeline, + budget_estimate, financing_status, services_needed, additional_info, + stakeholder_type, heat_score, status, contact_name, contact_email, form.get("contact_phone", ""), form.get("contact_company", ""), credit_cost, verified_at, now, @@ -612,6 +681,174 @@ async def lead_forward(lead_id: int): return redirect(url_for("admin.lead_detail", lead_id=lead_id)) +@bp.route("/leads//status-htmx", methods=["POST"]) +@role_required("admin") +@csrf_protect +async def lead_status_htmx(lead_id: int): + """HTMX: Update lead status, return updated status badge partial.""" + form = await request.form + new_status = form.get("status", "") + if new_status not in LEAD_STATUSES: + return Response("Invalid status", status=422) + + await execute( + "UPDATE lead_requests SET status = ? WHERE id = ?", (new_status, lead_id) + ) + return await render_template( + "admin/partials/lead_status_badge.html", status=new_status, lead_id=lead_id, + ) + + +@bp.route("/leads//forward-htmx", methods=["POST"]) +@role_required("admin") +@csrf_protect +async def lead_forward_htmx(lead_id: int): + """HTMX: Forward lead to supplier, return updated forward history partial.""" + form = await request.form + supplier_id_str = form.get("supplier_id", "") + if not supplier_id_str.isdigit(): + return Response("Select a supplier.", status=422) + supplier_id = int(supplier_id_str) + + existing = await fetch_one( + "SELECT 1 FROM lead_forwards WHERE lead_id = ? AND supplier_id = ?", + (lead_id, supplier_id), + ) + if existing: + return Response("Already forwarded to this supplier.", status=422) + + now = utcnow_iso() + await execute( + """INSERT INTO lead_forwards (lead_id, supplier_id, credit_cost, status, created_at) + VALUES (?, ?, 0, 'sent', ?)""", + (lead_id, supplier_id, now), + ) + await execute( + "UPDATE lead_requests SET unlock_count = unlock_count + 1, status = 'forwarded' WHERE id = ?", + (lead_id,), + ) + from ..worker import enqueue + await enqueue("send_lead_forward_email", {"lead_id": lead_id, "supplier_id": supplier_id}) + + lead = await get_lead_detail(lead_id) + return await render_template( + "admin/partials/lead_forward_history.html", + forwards=lead["forwards"] if lead else [], + ) + + +@bp.route("/marketplace") +@role_required("admin") +async def marketplace_dashboard(): + """Marketplace health dashboard.""" + # Lead funnel + funnel = await fetch_one( + """SELECT + COUNT(*) as total, + SUM(CASE WHEN status = 'new' AND verified_at IS NOT NULL THEN 1 ELSE 0 END) as verified_new, + SUM(CASE WHEN status = 'forwarded' THEN 1 ELSE 0 END) as forwarded_count, + SUM(CASE WHEN status = 'closed_won' THEN 1 ELSE 0 END) as won_count + FROM lead_requests WHERE lead_type = 'quote'""" + ) + total = funnel["total"] or 0 + won = funnel["won_count"] or 0 + conversion_rate = round((won / total) * 100, 1) if total > 0 else 0 + unlocked_count = (await fetch_one( + "SELECT COUNT(DISTINCT lead_id) as cnt FROM lead_forwards" + ) or {}).get("cnt", 0) + + # Credit economy + credit_agg = await fetch_one( + """SELECT + SUM(CASE WHEN delta > 0 THEN delta ELSE 0 END) as total_issued, + SUM(CASE WHEN event_type = 'lead_unlock' THEN ABS(delta) ELSE 0 END) as total_consumed, + SUM(CASE WHEN event_type = 'lead_unlock' + AND created_at >= datetime('now', '-30 days') + THEN ABS(delta) ELSE 0 END) as monthly_burn + FROM credit_ledger""" + ) + outstanding = (await fetch_one( + "SELECT SUM(credit_balance) as bal FROM suppliers WHERE tier != 'free'" + ) or {}).get("bal", 0) or 0 + + # Supplier engagement + supplier_agg = await fetch_one( + """SELECT + COUNT(*) as active_count, + ROUND(AVG(unlock_count), 1) as avg_unlocks + FROM ( + SELECT s.id, COUNT(lf.id) as unlock_count + FROM suppliers s + LEFT JOIN lead_forwards lf ON lf.supplier_id = s.id + WHERE s.tier != 'free' AND s.credit_balance > 0 + GROUP BY s.id + )""" + ) + response_agg = await fetch_one( + """SELECT + COUNT(*) as total, + SUM(CASE WHEN status != 'sent' THEN 1 ELSE 0 END) as responded + FROM lead_forwards""" + ) + resp_total = (response_agg or {}).get("total", 0) or 0 + resp_responded = (response_agg or {}).get("responded", 0) or 0 + response_rate = round((resp_responded / resp_total) * 100) if resp_total > 0 else 0 + + # Feature flags + flags = await fetch_all( + "SELECT name, enabled FROM feature_flags WHERE name IN ('lead_unlock', 'supplier_signup')" + ) + flag_map = {f["name"]: bool(f["enabled"]) for f in flags} + + return await render_template( + "admin/marketplace.html", + funnel={ + "total": total, + "verified_new": funnel["verified_new"] or 0, + "unlocked": unlocked_count, + "won": won, + "conversion_rate": conversion_rate, + }, + credits={ + "issued": (credit_agg or {}).get("total_issued", 0) or 0, + "consumed": (credit_agg or {}).get("total_consumed", 0) or 0, + "outstanding": outstanding, + "monthly_burn": (credit_agg or {}).get("monthly_burn", 0) or 0, + }, + suppliers={ + "active": (supplier_agg or {}).get("active_count", 0) or 0, + "avg_unlocks": (supplier_agg or {}).get("avg_unlocks", 0) or 0, + "response_rate": response_rate, + }, + flags=flag_map, + ) + + +@bp.route("/marketplace/activity") +@role_required("admin") +async def marketplace_activity(): + """HTMX: Recent marketplace activity stream.""" + rows = await fetch_all( + """SELECT 'lead' as event_type, id as ref_id, + contact_name as actor, status as detail, + country as extra, created_at + FROM lead_requests WHERE lead_type = 'quote' + UNION ALL + SELECT 'unlock' as event_type, lf.id as ref_id, + s.name as actor, lf.status as detail, + CAST(lf.credit_cost AS TEXT) as extra, lf.created_at + FROM lead_forwards lf + JOIN suppliers s ON s.id = lf.supplier_id + UNION ALL + SELECT 'credit' as event_type, id as ref_id, + CAST(supplier_id AS TEXT) as actor, event_type as detail, + CAST(delta AS TEXT) as extra, created_at + FROM credit_ledger + ORDER BY created_at DESC LIMIT 50""" + ) + return await render_template("admin/partials/marketplace_activity.html", events=rows) + + # ============================================================================= # Supplier Management # ============================================================================= @@ -895,7 +1132,8 @@ async def flag_toggle(): ) state = "enabled" if new_enabled else "disabled" await flash(f"Flag '{flag_name}' {state}.", "success") - return redirect(url_for("admin.flags")) + next_url = form.get("next", "") or url_for("admin.flags") + return redirect(next_url) # ============================================================================= diff --git a/web/src/padelnomics/admin/templates/admin/base_admin.html b/web/src/padelnomics/admin/templates/admin/base_admin.html index 687335e..06883f3 100644 --- a/web/src/padelnomics/admin/templates/admin/base_admin.html +++ b/web/src/padelnomics/admin/templates/admin/base_admin.html @@ -63,7 +63,11 @@ Dashboard -
    Leads
    +
    Marketplace
    + + + Dashboard + Leads diff --git a/web/src/padelnomics/admin/templates/admin/lead_detail.html b/web/src/padelnomics/admin/templates/admin/lead_detail.html index b376dff..423a1fe 100644 --- a/web/src/padelnomics/admin/templates/admin/lead_detail.html +++ b/web/src/padelnomics/admin/templates/admin/lead_detail.html @@ -2,122 +2,126 @@ {% set admin_page = "leads" %} {% block title %}Lead #{{ lead.id }} - Admin - {{ config.APP_NAME }}{% endblock %} +{% block admin_head %} + +{% endblock %} + {% block admin_content %}
    ← All Leads -

    Lead #{{ lead.id }} - {% if lead.heat_score == 'hot' %}HOT - {% elif lead.heat_score == 'warm' %}WARM - {% else %}COOL{% endif %} -

    +
    +

    Lead #{{ lead.id }}

    + {{ (lead.heat_score or 'cool') | upper }} +
    +
    + +
    +
    + + {{ lead.status | replace('_', ' ') }} + +
    +
    + + + +
    - -
    - - - -

    Project Brief

    -
    -
    Facility
    -
    {{ lead.facility_type or '-' }}
    -
    Courts
    -
    {{ lead.court_count or '-' }}
    -
    Glass
    -
    {{ lead.glass_type or '-' }}
    -
    Lighting
    -
    {{ lead.lighting_type or '-' }}
    -
    Build Context
    -
    {{ lead.build_context or '-' }}
    -
    Location
    -
    {{ lead.location or '-' }}, {{ lead.country or '-' }}
    -
    Timeline
    -
    {{ lead.timeline or '-' }}
    -
    Phase
    -
    {{ lead.location_status or '-' }}
    -
    Budget
    -
    {{ lead.budget_estimate or '-' }}
    -
    Financing
    -
    {{ lead.financing_status or '-' }}
    -
    Services
    -
    {{ lead.services_needed or '-' }}
    -
    Additional Info
    -
    {{ lead.additional_info or '-' }}
    -
    Credit Cost
    -
    {{ lead.credit_cost or '-' }} credits
    +
    +
    Facility
    {{ lead.facility_type or '-' }}
    +
    Build Context
    {{ lead.build_context or '-' }}
    +
    Courts
    {{ lead.court_count or '-' }}
    +
    Glass
    {{ lead.glass_type or '-' }}
    +
    Lighting
    {{ lead.lighting_type or '-' }}
    +
    Location
    {{ lead.location or '-' }}, {{ lead.country or '-' }}
    +
    Phase
    {{ lead.location_status or '-' }}
    +
    Timeline
    {{ lead.timeline or '-' }}
    +
    Budget
    {% if lead.budget_estimate %}€{{ "{:,}".format(lead.budget_estimate | int) }}{% else %}-{% endif %}
    +
    Financing
    {{ lead.financing_status or '-' }}
    +
    Services
    {{ lead.services_needed or '-' }}
    +
    Additional Info
    {{ lead.additional_info or '-' }}
    +
    Credit Cost
    {{ lead.credit_cost or '-' }} credits
    - +

    Contact

    -
    Name
    -
    {{ lead.contact_name or '-' }}
    -
    Email
    -
    {{ lead.contact_email or '-' }}
    -
    Phone
    -
    {{ lead.contact_phone or '-' }}
    -
    Company
    -
    {{ lead.contact_company or '-' }}
    -
    Role
    -
    {{ lead.stakeholder_type or '-' }}
    -
    Created
    -
    {{ lead.created_at or '-' }}
    -
    Verified
    -
    {{ lead.verified_at or 'Not verified' }}
    +
    Name
    {{ lead.contact_name or '-' }}
    +
    Email
    {{ lead.contact_email or '-' }}
    +
    Phone
    {{ lead.contact_phone or '-' }}
    +
    Company
    {{ lead.contact_company or '-' }}
    +
    Role
    {{ lead.stakeholder_type or '-' }}
    +
    Created
    {{ lead.created_at or '-' }}
    +
    Verified
    {{ lead.verified_at or 'Not verified' }}
    - +

    Forward to Supplier

    -
    + - +
    - {% if lead.forwards %}
    -

    Forward History

    -
    - - - - - - {% for f in lead.forwards %} - - - - - - - {% endfor %} - -
    SupplierCreditsStatusSent
    {{ f.supplier_name }}{{ f.credit_cost }}{{ f.status }}{{ f.created_at[:16] if f.created_at else '-' }}
    +

    Forward History + ({{ lead.forwards | length }} total) +

    +
    +
    + {% include "admin/partials/lead_forward_history.html" with context %} +
    - {% endif %} {% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/lead_form.html b/web/src/padelnomics/admin/templates/admin/lead_form.html index 857b965..cc6a2e0 100644 --- a/web/src/padelnomics/admin/templates/admin/lead_form.html +++ b/web/src/padelnomics/admin/templates/admin/lead_form.html @@ -3,7 +3,7 @@ {% block title %}New Lead - Admin - {{ config.APP_NAME }}{% endblock %} {% block admin_content %} -
    +
    ← All Leads

    Create Lead

    @@ -21,8 +21,7 @@
    - -
    +
    @@ -31,6 +30,14 @@
    +
    + + +

    @@ -40,18 +47,44 @@
    +
    +
    + +
    +
    + +
    - - + + +
    +
    + +
    @@ -59,8 +92,8 @@
    @@ -72,41 +105,68 @@
    - - + + {% for v in ['secured','searching','evaluating'] %} + + {% endfor %}
    - - + {% for v, label in [('asap','ASAP'),('3-6mo','3–6 Months'),('6-12mo','6–12 Months'),('12+mo','12+ Months')] %} + + {% endfor %}
    +
    + + +
    +
    + + +
    +
    + +
    + + +
    + +
    + + +
    + +
    + +

    Classification

    +
    diff --git a/web/src/padelnomics/admin/templates/admin/leads.html b/web/src/padelnomics/admin/templates/admin/leads.html index 3a3b2ad..988b3a7 100644 --- a/web/src/padelnomics/admin/templates/admin/leads.html +++ b/web/src/padelnomics/admin/templates/admin/leads.html @@ -1,25 +1,68 @@ {% extends "admin/base_admin.html" %} {% set admin_page = "leads" %} -{% block title %}Lead Management - Admin - {{ config.APP_NAME }}{% endblock %} +{% block title %}Leads - Admin - {{ config.APP_NAME }}{% endblock %} + +{% block admin_head %} + +{% endblock %} {% block admin_content %} -
    +
    -

    Lead Management

    -

    - {{ leads | length }} leads shown - {% if lead_stats %} - · {{ lead_stats.get('new', 0) }} new - · {{ lead_stats.get('forwarded', 0) }} forwarded - {% endif %} -

    -
    -
    - + New Lead - Back to Dashboard +

    Leads

    +

    {{ total }} leads found

    + + New Lead
    + +
    +
    +

    Total Leads

    +

    {{ lead_stats._total }}

    +
    +
    +

    New / Unverified

    +

    {{ lead_stats._new_unverified }}

    +

    awaiting action

    +
    +
    +

    Hot Pipeline

    +

    {{ lead_stats._hot_pipeline }}

    +

    credits (hot leads)

    +
    +
    +

    Forward Rate

    +

    {{ lead_stats._forward_rate }}%

    +

    {{ lead_stats.get('forwarded', 0) }} forwarded

    +
    +
    +
    +
    + + +
    +
    +
    + +
    + {% for label, val in [('Today', '1'), ('7d', '7'), ('30d', '30'), ('All', '')] %} + + + {% endfor %} +
    +
    +
    +

    Marketplace

    +

    Lead funnel, credit economy, and supplier engagement

    +
    + All Leads +
    + + +

    Lead Funnel

    +
    +
    +

    Total Leads

    +

    {{ funnel.total }}

    +
    +
    +

    Verified New

    +

    {{ funnel.verified_new }}

    +

    ready to unlock

    +
    +
    +

    Unlocked

    +

    {{ funnel.unlocked }}

    +

    by suppliers

    +
    +
    +

    Conversion

    +

    {{ funnel.conversion_rate }}%

    +

    {{ funnel.won }} won

    +
    +
    + + +
    +
    +

    Credit Economy

    +
    +
    +

    Issued (all time)

    +

    {{ "{:,}".format(credits.issued | int) }}

    +
    +
    +

    Consumed (all time)

    +

    {{ "{:,}".format(credits.consumed | int) }}

    +
    +
    +

    Outstanding balance

    +

    {{ "{:,}".format(credits.outstanding | int) }}

    +
    +
    +

    Monthly burn (30d)

    +

    {{ "{:,}".format(credits.monthly_burn | int) }}

    +
    +
    +
    + +
    +

    Supplier Engagement

    +
    +
    +

    Active suppliers

    +

    {{ suppliers.active }}

    +

    growth/pro w/ credits

    +
    +
    +

    Avg unlocks / supplier

    +

    {{ suppliers.avg_unlocks }}

    +
    +
    +

    Response rate

    +

    {{ suppliers.response_rate }}%

    +

    replied or updated status

    +
    +
    +

    Leads pipeline

    +

    {{ funnel.verified_new }}

    +

    available to unlock

    +
    +
    +
    +
    + + +
    +

    Feature Flags

    +
    + {% for flag_name, flag_label in [('lead_unlock', 'Lead Unlock (self-service)'), ('supplier_signup', 'Supplier Signup')] %} + {% set is_on = flags.get(flag_name, false) %} + + + + + + + {% endfor %} + All flags → +
    +
    + + +
    +
    +

    Loading activity stream…

    +
    +
    +{% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/partials/lead_forward_history.html b/web/src/padelnomics/admin/templates/admin/partials/lead_forward_history.html new file mode 100644 index 0000000..e17be97 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/lead_forward_history.html @@ -0,0 +1,40 @@ +{# HTMX swap target: forward history table after a successful forward #} +{% if forwards %} + + + + + + {% for f in forwards %} + + + + + + + + {% endfor %} + +
    SupplierCreditsStatusResponseSent
    {{ f.supplier_name }}{{ f.credit_cost }} + {% set fwd_status = f.status or 'sent' %} + {% if fwd_status == 'won' %} + Won + {% elif fwd_status == 'lost' %} + Lost + {% elif fwd_status in ('contacted', 'quoted') %} + {{ fwd_status | capitalize }} + {% else %} + {{ fwd_status | capitalize }} + {% endif %} + + {% if f.supplier_note %} + {{ f.supplier_note | truncate(40) }} + {% elif f.status_updated_at %} + {{ (f.status_updated_at or '')[:10] }} + {% else %} + + {% endif %} + {{ (f.created_at or '')[:16] }}
    +{% else %} +

    No forwards yet.

    +{% endif %} diff --git a/web/src/padelnomics/admin/templates/admin/partials/lead_results.html b/web/src/padelnomics/admin/templates/admin/partials/lead_results.html index cab3969..c3ccbd3 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/lead_results.html +++ b/web/src/padelnomics/admin/templates/admin/partials/lead_results.html @@ -1,3 +1,28 @@ +{% set page = page | default(1) %} +{% set per_page = per_page | default(50) %} +{% set total = total | default(0) %} +{% set start = (page - 1) * per_page + 1 %} +{% set end = [page * per_page, total] | min %} +{% set has_prev = page > 1 %} +{% set has_next = (page * per_page) < total %} + +{% macro heat_badge(score) %} + {{ (score or 'cool') | upper }} +{% endmacro %} + +{% macro status_badge(status) %} + {{ status | replace('_', ' ') }} +{% endmacro %} + +{# Hidden inputs carry current filters for pagination hx-include #} + + {% if leads %}
    @@ -18,23 +43,15 @@ {% for lead in leads %} - + - - + + @@ -42,6 +59,39 @@
    #{{ lead.id }} - {% if lead.heat_score == 'hot' %} - HOT - {% elif lead.heat_score == 'warm' %} - WARM - {% else %} - COOL - {% endif %} - {{ heat_badge(lead.heat_score) }} {{ lead.contact_name or '-' }}
    {{ lead.contact_email or '-' }}
    {{ lead.country or '-' }} {{ lead.court_count or '-' }}{{ lead.budget_estimate or '-' }}{{ lead.status }}{% if lead.budget_estimate %}€{{ "{:,}".format(lead.budget_estimate | int) }}{% else %}-{% endif %}{{ status_badge(lead.status) }} {{ lead.unlock_count or 0 }} {{ lead.created_at[:10] if lead.created_at else '-' }}
    + + +{% if total > per_page %} +
    + Showing {{ start }}–{{ end }} of {{ total }} +
    + {% if has_prev %} + + {% endif %} + Page {{ page }} + {% if has_next %} + + {% endif %} +
    +
    +{% else %} +

    Showing all {{ total }} results

    +{% endif %} + {% else %}

    No leads match the current filters.

    diff --git a/web/src/padelnomics/admin/templates/admin/partials/lead_status_badge.html b/web/src/padelnomics/admin/templates/admin/partials/lead_status_badge.html new file mode 100644 index 0000000..58f8ca7 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/lead_status_badge.html @@ -0,0 +1,21 @@ +{# HTMX swap target: returns updated status badge + inline form #} +
    +
    + + {{ status | replace('_', ' ') }} + + updated +
    +
    + + + +
    +
    diff --git a/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html b/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html new file mode 100644 index 0000000..09e7d06 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html @@ -0,0 +1,32 @@ +
    +

    Recent Activity

    + {% if events %} +
    + {% for ev in events %} +
    + +
    + {% if ev.event_type == 'lead' %} + New lead + {% if ev.actor %} from {{ ev.actor }}{% endif %} + {% if ev.extra %} — {{ ev.extra }}{% endif %} + {% if ev.detail %} ({{ ev.detail }}){% endif %} + {% elif ev.event_type == 'unlock' %} + {{ ev.actor }} unlocked a lead + {% if ev.extra %} — {{ ev.extra }} credits{% endif %} + {% if ev.detail and ev.detail != 'sent' %} ({{ ev.detail }}){% endif %} + {% elif ev.event_type == 'credit' %} + Credit event + {% if ev.extra and ev.extra | int > 0 %}+{{ ev.extra }} + {% elif ev.extra %}{{ ev.extra }}{% endif %} + {% if ev.detail %} ({{ ev.detail }}){% endif %} + {% endif %} +
    + {{ (ev.created_at or '')[:10] }} +
    + {% endfor %} +
    + {% else %} +

    No activity yet.

    + {% endif %} +
    From eca21dd147d9f3b59e768e0472087c6b00345e65 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:31:56 +0100 Subject: [PATCH 74/98] chore(secrets): update PROXY_URLS in dev sops (tiered proxy config) Co-Authored-By: Claude Sonnet 4.6 --- .env.dev.sops | 6 +++--- .env.prod.sops | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.env.dev.sops b/.env.dev.sops index aa5e8eb..8ceb15e 100644 --- a/.env.dev.sops +++ b/.env.dev.sops @@ -56,7 +56,7 @@ WORKFLOWS_PATH=ENC[AES256_GCM,data:PehxEUMb1K3F1557BY3IqKD7sbJcoaIjnQvboBRJ1g==, ALERT_WEBHOOK_URL= NTFY_TOKEN= #ENC[AES256_GCM,data:BCyQYjRnTx8yW9A=,iv:4OPCP+xzRLUJrpoFewVnbZRKnZH4sAbV76SM//2k5wU=,tag:HxwEp7VFVZUN/VjPiL/+Vw==,type:comment] -PROXY_URLS= +PROXY_URLS=ENC[AES256_GCM,data:CzRaK0piUQfvuYYsdz0i2MEQIphKi0BhNvHw9alo46aTH+kqEKvoS7dKEKzyU9VJ4TyNweInlVMxB962DsvRoBtnHwo/pUmYtVeEr2881clNgEiZVYRDFRdEbpULcLPDJa3ey1leqAAHlmiL0RQ6Qa57gPCOVBzVG6npGLKO+K8XVIb+BZMs9kEUOlw7iuqTJW5xPN/t4X/jHidEqfTSAl9b4vU4bsYVuY3yQrL+/V5QpTbyXlf+cMq3flpA3zE2Fxhalzg+c/wHMTrCksFwrCkrInW0kY9yPkA7usUWr1xwwaV3wIDoNQsLXpMd/3RztipNvKtOMRhRJOmjzP7BKhCJvvvKTV5p+mBCulFijbMQgArg3BqcFanfw3YZ4wPd4hp8q/vOhE/U9Wu0yrMmyWYFHYGQnFVARlBH7pwn/ez8W4KqRFveEAuev9CE7K7s5RqzPLelSkoa9UuiiULJ+t0LFgKlgxuLtQ8GdFdgsmBCxY/4U/xzvNdC82hD549z5nMWWlaUJm4onPWirT/RYm7j3v6z4mmNImI2W6rCNbvEvsXwWsciquVaBIgReA47p6/GTzZ9VZMyGr4PdzB87BJGAgX1W57WNdPAsRIF49XP2BU72RtRFxsUG8Ha2dc=,iv:a10Vpk7Zv8QqORuEcMlpcvtHO/zjBLaFphWPYBXwysc=,tag:8N66/R+CLqEZ45wj+tCt6w==,type:str] RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:YWM=,iv:iY5+uMazLAFdwyLT7Gr7MaF1QHBIgHuoi6nF2VbSsOA=,tag:dc6AmuJdTQ55gVe16uzs6A==,type:str] PROXY_URLS_FALLBACK= CIRCUIT_BREAKER_THRESHOLD= @@ -70,7 +70,7 @@ GEONAMES_USERNAME=ENC[AES256_GCM,data:aSkVdLNrhiF6tlg=,iv:eemFGwDIv3EG/P3lVHGZj9 CENSUS_API_KEY= sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxNWNmUzVNUGdWRnE0ZFpF\nM0JQZWZ3UDdEVzlwTmIxakxOZXBkT2x2ZlNrClRtV2M3S2daSGxUZmFDSWQ2Nmh4\neU51QndFcUxlSE00RFovOVJTcDZmUUUKLS0tIDcvL3hRMDRoMWZZSXljNzA3WG5o\nMWFic21MV0krMzlIaldBTVU0ZDdlTE0K7euGQtA+9lHNws+x7TMCArZamm9att96\nL8cXoUDWe5fNI5+M1bXReqVfNwPTwZsV6j/+ZtYKybklIzWz02Ex4A==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T21:27:19Z -sops_mac=ENC[AES256_GCM,data:KuL4wOGAEnMeXEDUKH7MXPhRFln4jTMKJAikTmkyYYxlFsxbTy3o+i5wwpfEZ7oqq/76v7XE2rhg9KMMLfnbZ2rLH9I/6kJRDtlZUUBCdKI6FCRnFbsgmzhuoXMHuFrj4B054u/C8QN2YwL7Mke+Gs9fglxvBrmhN58JAIOaxew=,iv:qu7rdFffw8IBHRP9a1tpPlRexg0b2f6lcpLu9AVbl5k=,tag:h7NbJ4bl/B8/CGVM/iW1Uw==,type:str] +sops_lastmodified=2026-02-24T21:48:18Z +sops_mac=ENC[AES256_GCM,data:RmSB5aS5Avl1jzeSmZPdDS6u+QPKDVD/1A55slXXdht96Knbh7IjaRsqggql9uixQO0/6WWkXsxhcKDWhsbYb0el2ATrLWXHaV6GQqfLq7RUynagcGTNHj8ipizQ93MqaDlXnI92ZOEHNcgvJzRuvRLJYhMErSyzwbUxtbaGMNM=,iv:o5wY+9uurzsTOMgmblGi0xcyYMsYGMfICmt4dSBlt2w=,tag:UKhqs3pedmvP/HjGJb0y4Q==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 diff --git a/.env.prod.sops b/.env.prod.sops index b4bf3c4..1caa399 100644 --- a/.env.prod.sops +++ b/.env.prod.sops @@ -1,10 +1,10 @@ #ENC[AES256_GCM,data:8qKvOA==,iv:Xci2F8lcBpT7dmhzaDe6sfrtQi+yQD7e2CQsYLAdCnY=,tag:3duziYwr7PoGQILUuY8nBA==,type:comment] APP_NAME=ENC[AES256_GCM,data:ldJf4P0iD9ziMVg=,iv:hiVl2whhd02yZCafzBfbxX5/EU/suvzO4kSiWho2oUo=,tag:qzrr57sTPX8HPyDVwVL4sw==,type:str] -SECRET_KEY=ENC[AES256_GCM,data:Pll2sBGZsUJ0,iv:Dz+rq47dV3TmJXIQu+P+TmKXKFYsxbkY7/5js1cPrWA=,tag:IVAValYSELDRUMisbMwbAQ==,type:str] +SECRET_KEY=ENC[AES256_GCM,data:hmlXm7NKVVFmeea4DnlrH/oSnsoaMAkUz42oWwFXOXL1XwAh3iemIKHUQOV2G4SPlmjfmEVQD64xbxaJW0OcPQ/8KqhrRYDsy0F/u0h7nmNQdwJrcvzcmbvjgcwU5IITPIr23d/W5PeSJzxhB93uaJ0+zFN2CyHfeewrJKafPfw=,iv:e+ZSLUO+dlt+ET8r/0/pf74UtGIBMkaVoJMWlJn1W5U=,tag:LdDCCrHcJnKLkKL/cY/R/Q==,type:str] BASE_URL=ENC[AES256_GCM,data:50k/RqlZ1EHqGM4UkSmTaCsuJgyU4w==,iv:f8zKr2jkts4RsawA97hzICHwj9Quzgp+Dw8AhQ7GSWA=,tag:9KhNvwmoOtDyuIql7okeew==,type:str] DEBUG=ENC[AES256_GCM,data:O0/uRF4=,iv:cZ+vyUuXjQOYYRf4l8lWS3JIWqL/w3pnlCTDPAZpB1E=,tag:OmJE9oJpzYzth0xwaMqADQ==,type:str] #ENC[AES256_GCM,data:xmJc6WTb3yumHzvLeA==,iv:9jKuYaDgm4zR/DTswIMwsajV0s5UTe+AOX4Sue0GPCs=,tag:b/7H9js1HmFYjuQE4zJz8w==,type:comment] -ADMIN_EMAILS=ENC[AES256_GCM,data:dtEDXPbN5Y5q,iv:k1GSkJh+L4kOM8V0cGYnz0/CsmvwdVRNHk0qpBulSS0=,tag:rUpVgROj2qD8a5IufnBrJw==,type:str] +ADMIN_EMAILS=ENC[AES256_GCM,data:R/2YTk8KDEpNQ71RN8Fm6miLZvXNJQ==,iv:kzmiaBK7KvnSjR5gx6lp7zEMzs5xRul6LBhmLf48bCU=,tag:csVZ0W1TxBAoJacQurW9VQ==,type:str] #ENC[AES256_GCM,data:S7Pdg9tcom3N,iv:OjmYk3pqbZHKPS1Y06w1y8BE7CU0y6Vx2wnio9tEhus=,tag:YAOGbrHQ+UOcdSQFWdiCDA==,type:comment] DATABASE_PATH=ENC[AES256_GCM,data:qxQs7dG0RWMA1rs=,iv:5ZUyk02hCPQESr2vFz3mfnUhUF74LbO6YK5+HFBbxUQ=,tag:daQxiWAhzCB2cScjzjYwaA==,type:str] #ENC[AES256_GCM,data:aWgKm9Y=,iv:8iT6GHSzWhM+fRX9PIY9wAs7lXj/ADS6eZK9BBSEdaQ=,tag:aSLsj52ybnod7Qfmx9BLQA==,type:comment] @@ -55,7 +55,7 @@ GEONAMES_USERNAME=ENC[AES256_GCM,data:UXd/S2TzXPiGmLY=,iv:OMURM5E6SFEsaqroUlH76D CENSUS_API_KEY= sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqck9GdHVkUmIzNnlvMW5k\nVkNtazZ0ZytzZ25vMU5SckdFLzcrTFNYOVZZCmNjbU9yV0lTRlB5cEpMVC81QTdu\nS2ZDc0ZkNnRBNFhFMEN1bjY3YVhwZEEKLS0tIGE5TEdYenVOV1IwcE0wYnlKNElF\ncXV1K0xuczZzZ3JnL1lrSC9QWHIwNGsKfW4ARke6Cj83BpQc8weayL3v8SVgQ+Fp\n99aVWp103O1fumksR1w4u0X7fSNRrgAmpY/yyZuEvsoIY8ELFVcqgQ==\n-----END AGE ENCRYPTED FILE-----\n sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a -sops_lastmodified=2026-02-24T21:29:26Z -sops_mac=ENC[AES256_GCM,data:zYvusl8/pvL6FwXAtsKi4BhuiDt8KaZPNHXkw0ywIOgNFG5mvcQozcDj42+TIo+Yuum1o7WHqshKc70w0Mq4fskq3TsjVnjWgw7xYRr5s3ylN5ZknbbCoMP4cp6YrkNCe/8hR64miguYqqEQlf9NdgL52uamF5lV5irI/EtLouw=,iv:RcL2b8ccnMxKhXxAocTG9G6gv2BkTb++MUpkFK8MfbM=,tag:+0avRrQjNOHDUeAV1dLW3g==,type:str] +sops_lastmodified=2026-02-24T21:37:45Z +sops_mac=ENC[AES256_GCM,data:FdIU0UvGEc/P7ETNOxYHqfsGMNCdBVqbxHVIrR1v4hAnTWYHelawJqifQOOArTyNGjfsIRGajct7CLADkGE/qVm6vSQO4m6w+veSGEO39Wvlfz6BrVSYMqWMjGuJsTj/TJGSZDBnyC//Jzf3pTTgXrcjM86aoLbqhT/Qbb0JIiE=,iv:fgP4Ro0Cd6u1n9G07UsMkQNDk3fCQPe5hixA3KXhcAk=,tag:2PEKkltbD5TICzZ3WgvXQA==,type:str] sops_unencrypted_suffix=_unencrypted sops_version=3.12.1 From d15787caebe3491395bf929916eb5ee9c8de52f9 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:33:20 +0100 Subject: [PATCH 75/98] =?UTF-8?q?fix(extract):=20recheck=20window=2090?= =?UTF-8?q?=E2=86=9260=20min=20=E2=80=94=20matches=20hourly=20schedule=20a?= =?UTF-8?q?nd=20min=20slot=20duration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit With hourly rechecks and 60-min minimum slots, a 90-min window causes each slot to be queried twice. 60-min window = each slot caught exactly once in the recheck immediately before it starts. Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/playtomic_availability.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index 180bcba..def6d3c 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -45,7 +45,7 @@ AVAILABILITY_URL = "https://api.playtomic.io/v1/availability" THROTTLE_SECONDS = 1 MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 -RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "90")) +RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "60")) CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10") # Parallel mode submits futures in batches so the circuit breaker can stop From be8872beb27652b73f47080c512c4e1356f6f847 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:35:12 +0100 Subject: [PATCH 76/98] revert: restore recheck window to 90 min MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Data analysis of 5,115 venues with slots shows 24.8% have a 90-min minimum slot duration. A 60-min window would miss those venues entirely with hourly rechecks. 90 min is correct — covers 30/60/90-min minimum venues. Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/playtomic_availability.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index def6d3c..180bcba 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -45,7 +45,7 @@ AVAILABILITY_URL = "https://api.playtomic.io/v1/availability" THROTTLE_SECONDS = 1 MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 -RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "60")) +RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "90")) CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10") # Parallel mode submits futures in batches so the circuit breaker can stop From b7c856826568e60b8b2f33c38db0f9d999437e85 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:37:17 +0100 Subject: [PATCH 77/98] =?UTF-8?q?fix(extract):=20recheck=20window=2090?= =?UTF-8?q?=E2=86=9260=20min=20(correct=20reasoning=20this=20time)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 60-min window + hourly rechecks = each slot caught exactly once, 0-60 min before it starts. 90-min window causes double-querying (T-90 and T-30). Slot duration is irrelevant — it doesn't affect when the slot appears in the window. Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/playtomic_availability.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index 180bcba..def6d3c 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -45,7 +45,7 @@ AVAILABILITY_URL = "https://api.playtomic.io/v1/availability" THROTTLE_SECONDS = 1 MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 -RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "90")) +RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "60")) CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10") # Parallel mode submits futures in batches so the circuit breaker can stop From 5ba4cabcd8e0cdb23280c4a1add9564db0d919d5 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:37:26 +0100 Subject: [PATCH 78/98] docs: update CHANGELOG and PROJECT.md for marketplace + lead forward tracking Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 16 ++++++++++++++++ PROJECT.md | 10 ++++------ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f1ae3b..6189c7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] ### Added +- **Marketplace admin dashboard** (`/admin/marketplace`) — single-screen health view for the two-sided market: + - **Lead funnel** — total / verified-new (ready to unlock) / unlocked / won / conversion rate + - **Credit economy** — total credits issued, consumed (lead unlocks), outstanding balance across all paid suppliers, 30-day burn rate + - **Supplier engagement** — active paid supplier count, avg lead unlocks per supplier, forward response rate + - **Feature flag toggles** — `lead_unlock` and `supplier_signup` flags togglable inline; sidebar nav entry added + - **Live activity stream** (HTMX partial) — last 50 events across leads, unlocks, and credit ledger in a single feed +- **Lead matching notifications** (`notify_matching_suppliers` worker task) — on quote verification, finds growth/pro suppliers whose `service_area` includes the lead's country and sends an instant alert email; bounded to 20 suppliers per lead +- **Weekly lead digest** (`send_weekly_lead_digest` worker task) — every Monday at 08:00 UTC, sends paid suppliers a summary table of new matching leads from the past 7 days they haven't unlocked yet (max 5 rows per email) +- **One-click CTA token** — lead-forward emails now include a "Mark as contacted" footer link backed by a unique `cta_token`; clicking it sets the forward status to `contacted` and redirects to the supplier dashboard; token stored on `lead_forwards` after send +- **Supplier `lead_respond` endpoint** — HTMX status update for forwarded leads: `sent / viewed / contacted / quoted / won / lost / no_response` +- **Supplier `lead_cta_contacted` endpoint** (`/suppliers/leads/cta/`) — one-click email handler; idempotent (only advances from `sent` → `contacted`) +- **Migration 0022** — adds `status_updated_at`, `supplier_note`, `cta_token` to `lead_forwards`; unique partial index on `cta_token` +- **Admin leads list improvements** — summary cards (total / new+unverified / hot pipeline credits / forward rate); text search across name, email, company; period filter pills (Today / 7d / 30d / All); `get_leads()` now returns `(rows, total_count)` and supports `search` + `days` params +- **Admin lead detail — HTMX inline actions** — status change returns an updated status badge partial; forward-to-supplier form returns an updated forward history table; no full-page reload +- **Quote form extended** — captures `build_context`, `glass_type`, `lighting_type`, `location_status`, `financing_status`, `services_needed`, `additional_info`; displayed in lead detail view + - **pSEO Engine admin tab** (`/admin/pseo`) — operational visibility for the programmatic SEO system: - **Content gap detection** — queries DuckDB serving tables vs SQLite articles to find rows with no matching article per language; per-template HTMX-loaded gap list - **Data freshness signals** — compares `_serving_meta.json` export timestamp vs `MAX(updated_at)` in articles; per-template status: 🟢 Fresh / 🟡 Stale / 🟣 No articles / ⚫ No data diff --git a/PROJECT.md b/PROJECT.md index c1bf877..6e94289 100644 --- a/PROJECT.md +++ b/PROJECT.md @@ -1,7 +1,7 @@ # Padelnomics — Project Tracker > Move tasks across columns as you work. Add new tasks at the top of the relevant column. -> Last updated: 2026-02-24. +> Last updated: 2026-02-25. --- @@ -108,6 +108,9 @@ - [x] Lead funnel stats on admin dashboard - [x] Email hub (`/admin/emails`) — sent log, inbox, compose, audiences, delivery event tracking via Resend webhooks - [x] **pSEO Engine tab** (`/admin/pseo`) — content gap detection, data freshness signals, article health checks (hreflang orphans, missing build files, broken scenario refs), generation job monitoring with live progress bars +- [x] **Marketplace admin dashboard** (`/admin/marketplace`) — lead funnel, credit economy, supplier engagement, live activity stream, inline feature flag toggles +- [x] **Lead matching notifications** — `notify_matching_suppliers` task on quote verification + `send_weekly_lead_digest` every Monday; one-click CTA token in forward emails +- [x] **Migration 0022** — `status_updated_at`, `supplier_note`, `cta_token` on `lead_forwards`; supplier respond endpoint; inline HTMX lead detail actions; extended quote form fields ### SEO & Legal - [x] Sitemap (both language variants, `` on all entries) @@ -134,11 +137,6 @@ --- -## In Progress 🔄 - -- [ ] **Dual market score system** — Marktreife-Score + Marktpotenzial-Score + expanded data pipeline (merging to master) -- [ ] **pSEO Engine** — implemented (worktree `pseo-engine`), pending merge to master - --- ## Next Up 📋 From d834bdc59a959a254328bab47ceed901b3e3e87b Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:39:30 +0100 Subject: [PATCH 79/98] feat(extract): recheck every 30 min with 30-min window for accurate occupancy Each slot is now rechecked once, at most 30 min before it starts. Worst-case miss: a booking made 29 min before start. Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/playtomic_availability.py | 2 +- infra/supervisor/workflows.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index def6d3c..e665ccd 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -45,7 +45,7 @@ AVAILABILITY_URL = "https://api.playtomic.io/v1/availability" THROTTLE_SECONDS = 1 MAX_VENUES_PER_RUN = 20_000 MAX_RETRIES_PER_VENUE = 2 -RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "60")) +RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "30")) CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10") # Parallel mode submits futures in batches so the circuit breaker can stop diff --git a/infra/supervisor/workflows.toml b/infra/supervisor/workflows.toml index fc2e9da..4f91a6b 100644 --- a/infra/supervisor/workflows.toml +++ b/infra/supervisor/workflows.toml @@ -29,5 +29,5 @@ depends_on = ["playtomic_tenants"] [playtomic_recheck] module = "padelnomics_extract.playtomic_availability" entry = "main_recheck" -schedule = "0 6-23 * * *" +schedule = "0,30 6-23 * * *" depends_on = ["playtomic_availability"] From db146846674495a1c613a3022c86cd8d4f7ac5e1 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:41:54 +0100 Subject: [PATCH 80/98] docs: update USER_FLOWS.md for marketplace + lead response flows - Flow 11: note CTA token in forward email + matching notification tasks - Flow 12 (new): supplier lead_respond endpoint + one-click CTA token flow - Flow 13 (was 12): add Marketplace admin dashboard row, update Leads row with search/filter/HTMX inline actions, note HTMX partials Co-Authored-By: Claude Sonnet 4.6 --- docs/USER_FLOWS.md | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/docs/USER_FLOWS.md b/docs/USER_FLOWS.md index 3f30b3b..645334e 100644 --- a/docs/USER_FLOWS.md +++ b/docs/USER_FLOWS.md @@ -169,23 +169,40 @@ Same as Flow 2 but arrives at `//leads/quote` directly (no planner state). |------|-----|-------| | 1 | View teased lead | `GET //suppliers/dashboard/leads` — lead shown with blurred contact info | | 2 | Unlock | `POST //suppliers/leads//unlock` — deducts 1 credit, reveals full lead | -| 3 | Receive email | `send_lead_forward_email` task enqueued — full project brief sent to supplier | +| 3 | Receive email | `send_lead_forward_email` task enqueued — full project brief sent to supplier with one-click CTA link | | 4 | Entrepreneur notified | `send_lead_matched_notification` task — notifies entrepreneur a supplier was matched | **Auth required:** Yes — `@_lead_tier_required` **Credit check:** Server-side check; if 0 credits → redirect to boosts tab +**Matching notification:** On quote verification, `notify_matching_suppliers` task auto-notifies growth/pro suppliers whose `service_area` matches the lead's country (max 20 per lead); `send_weekly_lead_digest` sends a Monday 08:00 UTC summary of new matching leads to all paid suppliers --- -## 12. Admin Flows +## 12. Supplier → Update Lead Response Status + +**Entry:** Supplier dashboard leads tab, or one-click CTA link in forward email + +| Step | URL | Notes | +|------|-----|-------| +| 1a | Click "Mark as contacted" in email | `GET /suppliers/leads/cta/` — one-click; advances status `sent` → `contacted`; redirects to `/suppliers/dashboard?tab=leads` | +| 1b | Update via dashboard | `POST //suppliers/leads//respond` — HTMX; sets `status` and optional `supplier_note`; returns 204 | + +**Auth required:** CTA link is unauthenticated (token is the credential); dashboard endpoint requires `@_lead_tier_required` +**Valid statuses:** `sent / viewed / contacted / quoted / won / lost / no_response` +**Idempotency:** CTA only advances `sent → contacted`; subsequent clicks are no-ops + +--- + +## 13. Admin Flows **Entry:** `/admin/` (requires `@role_required("admin")`) | Area | URL | What you can do | |------|-----|-----------------| | Dashboard | `GET /admin/` | Stats overview | +| Marketplace | `GET /admin/marketplace` | Lead funnel, credit economy, supplier engagement, live activity stream, inline feature flag toggles | | Users | `GET /admin/users`, `/admin/users/` | List, view, impersonate | -| Leads | `GET /admin/leads`, `/admin/leads/` | List, filter, view detail, change status, forward to supplier, create | +| Leads | `GET /admin/leads`, `/admin/leads/` | List (search + period filter + summary cards), view detail, HTMX inline status change + forward to supplier | | Suppliers | `GET /admin/suppliers`, `/admin/suppliers/` | List, view, adjust credits, change tier, create | | Feedback | `GET /admin/feedback` | View all submitted feedback | | Email Sent Log | `GET /admin/emails`, `/admin/emails/` | List all outgoing emails (filter by type/event/search), detail with API-enriched HTML preview | @@ -197,6 +214,7 @@ Same as Flow 2 but arrives at `//leads/quote` directly (no planner state). | Articles | `GET /admin/articles` | CRUD, publish/unpublish, rebuild HTML | | Task Queue | `GET /admin/tasks` | View worker tasks, retry/delete failed | +**HTMX partials:** `lead_status_badge.html` (status change), `lead_forward_history.html` (forward history), `marketplace_activity.html` (activity stream) **Dev shortcut:** `/auth/dev-login?email=` where email is in `config.ADMIN_EMAILS` --- From 607dc35a9d4a47dc511052049a18f49c197a7486 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:44:33 +0100 Subject: [PATCH 81/98] =?UTF-8?q?docs:=20add=20ADMIN.md=20=E2=80=94=20comp?= =?UTF-8?q?rehensive=20admin=20panel=20guide?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Covers all 10 admin sections: Dashboard, Marketplace (new), Leads, Suppliers, Flags, Feedback, Emails (sent log, inbox, compose, audiences), pSEO Engine, SEO Hub, CMS (Templates, Scenarios, Articles), Tasks, Users. Co-Authored-By: Claude Sonnet 4.6 --- docs/ADMIN.md | 208 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 docs/ADMIN.md diff --git a/docs/ADMIN.md b/docs/ADMIN.md new file mode 100644 index 0000000..f4f43e1 --- /dev/null +++ b/docs/ADMIN.md @@ -0,0 +1,208 @@ +# Admin Panel Guide + +The admin panel lives at `/admin/` and is restricted to users whose email is in `ADMIN_EMAILS`. +Dev shortcut: `GET /auth/dev-login?email=` (DEBUG mode only). + +Sidebar navigation (left to right in layout): +**Dashboard → Marketplace → Leads → Suppliers → Flags → Feedback → Emails → pSEO → SEO** + +CMS sections (Templates, Scenarios, Articles) are linked from the sidebar too. + +--- + +## Dashboard `/admin/` + +Quick-glance overview: user count, lead funnel summary, recent tasks, and credit economy totals. Entry point for everything else. + +--- + +## Marketplace `/admin/marketplace` + +Single-screen health view for the two-sided market. Useful for daily ops check and spotting stalled leads or low supplier engagement. + +**Lead Funnel** (top cards) +| Card | Meaning | +|------|---------| +| Total Leads | All quote-type leads ever submitted | +| Verified New | Verified leads with status `new` — ready to be unlocked by a supplier | +| Unlocked | Distinct leads that have at least one `lead_forward` record | +| Won | Leads with status `closed_won` | +| Conversion Rate | Won ÷ Total | + +**Credit Economy** +| Card | Meaning | +|------|---------| +| Issued | Sum of all positive credit ledger entries (purchases + refills) | +| Consumed | Credits spent on lead unlocks (absolute value of `lead_unlock` entries) | +| Outstanding | Current `credit_balance` sum across all paid-tier suppliers | +| 30-day Burn | Credits consumed in the last 30 days | + +**Supplier Engagement** +| Card | Meaning | +|------|---------| +| Active | Paid-tier suppliers with `credit_balance > 0` | +| Avg Unlocks | Average lead forwards per active supplier | +| Response Rate | Forwards where `status != 'sent'` ÷ total forwards | + +**Feature Flags** — `lead_unlock` and `supplier_signup` toggles are inline on this page. Clicking saves immediately and refreshes (no full-page reload). Also accessible at `/admin/flags`. + +**Activity Stream** — HTMX partial loaded on page open, showing the last 50 events across three tables: new leads, lead unlocks, credit ledger entries. Three dot colours: blue = lead created, green = unlock, amber = credit event. + +--- + +## Leads `/admin/leads` + +**List view** (`/admin/leads`) + +Summary cards at top: total leads / new+unverified / hot pipeline credits / forward rate. + +Filters (HTMX live — updates table without page reload): +- **Search** — matches `contact_name`, `contact_email`, `contact_company` +- **Status** — new / pending_verification / contacted / forwarded / closed_won / closed_lost +- **Heat** — hot / warm / cool +- **Country** — ISO country code +- **Period** pills — Today / 7d / 30d / All + +**Detail view** (`/admin/leads/`) + +Full lead record including all extended quote fields (build context, glass type, lighting, location status, financing, services needed, notes). + +Inline HTMX actions (no full-page reload): +- **Status change** — dropdown + save button → swaps the status badge in place +- **Forward to supplier** — select supplier + send → appends row to forward history table + +Forward history table shows: supplier name, current forward status, credit cost, sent timestamp. + +**Create lead** (`/admin/leads/new`) — manual lead entry for phone/offline enquiries. + +**Heat scoring** — set automatically on submission: hot = 35 credits (≥6 courts + confirmed budget + short timeline), warm = 20, cool = 8. + +--- + +## Suppliers `/admin/suppliers` + +**List view** — search by name/email, filter by tier (free/basic/growth/pro) and country. Table shows tier badge, credit balance, and listing status. + +**Detail view** (`/admin/suppliers/`) +- Adjust credit balance (add or subtract, reason logged to `credit_ledger`) +- Change subscription tier +- View all lead forwards for this supplier with forward statuses +- Impersonate supplier (enter their session to debug dashboard issues) + +**Create supplier** (`/admin/suppliers/new`) — manual supplier onboarding. + +--- + +## Feature Flags `/admin/flags` + +Toggle on/off without redeploy. Current flags: + +| Flag | Controls | +|------|---------| +| `markets` | Market score pages visible to public | +| `payments` | Paddle checkout enabled | +| `planner_export` | PDF export tab visible in planner | +| `supplier_signup` | Supplier signup wizard accessible | +| `lead_unlock` | Suppliers can unlock leads (spend credits) | + +Flags can also be toggled inline on the Marketplace dashboard for the two most-used flags. + +--- + +## Feedback `/admin/feedback` + +All submissions from the on-page feedback widget (thumbs up/down + optional text). Filterable by page and rating. Rate-limited to 1 per IP per page per hour. + +--- + +## Emails + +### Sent Log `/admin/emails` + +Every outgoing email recorded in `email_sent`. Filter by type (magic_link, lead_forward, weekly_digest, etc.), delivery event (delivered/bounced/opened/clicked), or free-text search on recipient/subject. + +Clicking a row opens the **detail view** — shows metadata, Resend delivery event timeline, and a sandboxed HTML preview of the message body fetched live from Resend API. + +### Inbox `/admin/emails/inbox` + +Inbound emails received via Resend inbound routing. Unread count shown as a badge in the sidebar. Detail view renders the HTML body in a sandboxed iframe with an inline reply form. + +### Compose `/admin/emails/compose` + +Send one-off transactional or plain emails. Select from-address (leads@, hello@, etc.) and optionally wrap in the branded email shell. + +### Audiences `/admin/emails/audiences` + +Lists all Resend audiences (waitlist, planner nurture, etc.) with contact counts. Drill into an audience to view contacts and remove individuals. + +--- + +## pSEO Engine `/admin/pseo` + +Operational visibility for the programmatic SEO content pipeline. Four sub-tabs: + +**Content Gaps** — for each template, shows DuckDB serving rows that have no matching article in the requested language. Use this to prioritise what to generate next. + +**Health Checks** — per-article sanity checks: +- hreflang orphans (EN article exists, DE missing) +- missing HTML build files on disk +- broken `[scenario:slug]` references in article markdown + +**Freshness** — compares `_serving_meta.json` export timestamp vs `MAX(updated_at)` across articles per template. Status: 🟢 Fresh / 🟡 Stale / 🟣 No articles / ⚫ No data. + +**Jobs** — live generation job monitor. Progress bars poll every 2s while jobs run. Error drilldown via `
    ` on failed jobs. + +Generation is triggered from the **Templates** section (see below) — pSEO Engine is read-only observability. + +--- + +## SEO Hub `/admin/seo` + +Aggregated SEO metrics from Google Search Console, Bing Webmaster Tools, and Umami. Three tabs: + +- **Search** — keyword performance (impressions, clicks, CTR, position) synced daily +- **Funnel** — Umami pageview → planner → quote conversion funnel +- **Scorecard** — per-article GSC impressions/clicks overlay on article metadata + +**Sync** button triggers an immediate background sync of all configured sources (otherwise syncs daily via scheduler). + +Requires `GSC_CLIENT_SECRETS_JSON`, `GSC_PROPERTY_URL`, `BING_API_KEY`, and `UMAMI_*` env vars. + +--- + +## CMS + +### Templates `/admin/templates` + +Each template = a Jinja2 Markdown template file + a DuckDB data source query. Templates produce articles at scale. + +Actions per template: +- **Edit** — modify template body, data query, or metadata +- **Preview** — render a single row through the template without saving +- **Generate** — bulk generate articles for all (or specific) data rows; runs as background task; progress visible in pSEO Engine → Jobs + +### Scenarios `/admin/scenarios` + +Public scenario cards shown on the landing page (e.g. "6-court indoor club in Munich"). Each has a name, description, financial state blob (pre-fills the planner), and a live PDF preview. + +### Articles `/admin/articles` + +Generated article records. Filter by template, language, country, published status. + +Per-article actions: **Edit** markdown inline, **Publish/Unpublish** (HTMX, no page reload), **Rebuild HTML** (re-runs Markdown → HTML without re-generating content), **Delete**. + +**Rebuild All** button at top re-processes every published article's Markdown into HTML — use after template or CSS changes. + +--- + +## Tasks `/admin/tasks` + +Worker queue state. Shows pending / running / failed tasks with payload and error log. Actions: **Retry** (re-enqueues), **Delete** (removes from queue). + +Failed tasks do not auto-retry — manual retry is intentional so you can inspect the error first. + +--- + +## Users `/admin/users` + +List all users (search by email/name). Detail view shows role, subscription state, scenarios, and recent activity. **Impersonate** button logs you in as that user — "Stop impersonating" in the top bar returns you to your admin session. From 3c0f57c0fd44d6596c5da36cfad2821acd032d57 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:53:19 +0100 Subject: [PATCH 82/98] feat(leads): 2-hour admin review window before leads appear in supplier feed New visible_from column on lead_requests set to NOW + 2h on both the direct insert (logged-in user) and the email verification update. Supplier feed, notify_matching_suppliers, and send_weekly_lead_digest all filter on visible_from <= datetime('now'), so no lead surfaces to suppliers before the window expires. Migration 0023 adds the column and backfills existing verified leads with created_at so they remain immediately visible. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/leads/routes.py | 6 +++--- .../migrations/versions/0023_lead_visible_from.py | 9 +++++++++ web/src/padelnomics/suppliers/routes.py | 2 +- web/src/padelnomics/worker.py | 3 ++- 4 files changed, 15 insertions(+), 5 deletions(-) create mode 100644 web/src/padelnomics/migrations/versions/0023_lead_visible_from.py diff --git a/web/src/padelnomics/leads/routes.py b/web/src/padelnomics/leads/routes.py index fb8cb04..c65d568 100644 --- a/web/src/padelnomics/leads/routes.py +++ b/web/src/padelnomics/leads/routes.py @@ -346,8 +346,8 @@ async def quote_request(): previous_supplier_contact, services_needed, additional_info, contact_name, contact_email, contact_phone, contact_company, stakeholder_type, - heat_score, status, credit_cost, token, created_at) - VALUES (?, 'quote', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", + heat_score, status, credit_cost, token, created_at, visible_from) + VALUES (?, 'quote', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now', '+2 hours'))""", ( user_id, form.get("court_count", 0), @@ -522,7 +522,7 @@ async def verify_quote(): credit_cost = compute_credit_cost(dict(lead)) now = utcnow_iso() await execute( - "UPDATE lead_requests SET status = 'new', verified_at = ?, credit_cost = ? WHERE id = ?", + "UPDATE lead_requests SET status = 'new', verified_at = ?, credit_cost = ?, visible_from = datetime('now', '+2 hours') WHERE id = ?", (now, credit_cost, lead["id"]), ) diff --git a/web/src/padelnomics/migrations/versions/0023_lead_visible_from.py b/web/src/padelnomics/migrations/versions/0023_lead_visible_from.py new file mode 100644 index 0000000..96fe2b3 --- /dev/null +++ b/web/src/padelnomics/migrations/versions/0023_lead_visible_from.py @@ -0,0 +1,9 @@ +"""Migration 0023: Add visible_from to lead_requests for 2-hour admin review window.""" + + +def up(conn) -> None: + conn.execute("ALTER TABLE lead_requests ADD COLUMN visible_from TEXT") + # Backfill: existing verified leads are already past review — make them visible immediately + conn.execute( + "UPDATE lead_requests SET visible_from = created_at WHERE status = 'new' AND verified_at IS NOT NULL" + ) diff --git a/web/src/padelnomics/suppliers/routes.py b/web/src/padelnomics/suppliers/routes.py index 0ae31b2..f38c442 100644 --- a/web/src/padelnomics/suppliers/routes.py +++ b/web/src/padelnomics/suppliers/routes.py @@ -513,7 +513,7 @@ async def signup_success(): async def _get_lead_feed_data(supplier, country="", heat="", timeline="", q="", limit=50): """Shared query for lead feed — used by standalone and dashboard.""" - wheres = ["lr.lead_type = 'quote'", "lr.status = 'new'", "lr.verified_at IS NOT NULL"] + wheres = ["lr.lead_type = 'quote'", "lr.status = 'new'", "lr.verified_at IS NOT NULL", "lr.visible_from <= datetime('now')"] params: list = [] if country: diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index b543d10..387987a 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -567,7 +567,7 @@ async def handle_notify_matching_suppliers(payload: dict) -> None: lang = payload.get("lang", "en") lead = await fetch_one( - "SELECT * FROM lead_requests WHERE id = ? AND status = 'new' AND verified_at IS NOT NULL", + "SELECT * FROM lead_requests WHERE id = ? AND status = 'new' AND verified_at IS NOT NULL AND visible_from <= datetime('now')", (lead_id,), ) if not lead or not lead.get("country"): @@ -652,6 +652,7 @@ async def handle_send_weekly_lead_digest(payload: dict) -> None: f"""SELECT id, heat_score, country, court_count, facility_type, timeline, credit_cost, created_at FROM lead_requests WHERE lead_type = 'quote' AND status = 'new' AND verified_at IS NOT NULL + AND visible_from <= datetime('now') AND country IN ({placeholders}) AND created_at >= datetime('now', '-7 days') AND NOT EXISTS ( From 55f179ba54f79c3cbcb786334dd00b1b50e4e9bc Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 09:56:05 +0100 Subject: [PATCH 83/98] fix(transform): increase geonames object size limit and remove stale column ref MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - stg_population_geonames: add maximum_object_size=40MB to read_json() call; geonames cities_global.json.gz is ~30MB, exceeding DuckDB's 16MB default - dim_locations: remove stale 'population_year AS population_year' column ref; stg_population_geonames has ref_year, not population_year — caused BinderException Co-Authored-By: Claude Sonnet 4.6 --- .../sqlmesh_padelnomics/models/foundation/dim_locations.sql | 1 - .../models/staging/stg_population_geonames.sql | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/transform/sqlmesh_padelnomics/models/foundation/dim_locations.sql b/transform/sqlmesh_padelnomics/models/foundation/dim_locations.sql index c6177d3..13b4a0c 100644 --- a/transform/sqlmesh_padelnomics/models/foundation/dim_locations.sql +++ b/transform/sqlmesh_padelnomics/models/foundation/dim_locations.sql @@ -35,7 +35,6 @@ locations AS ( admin1_code, admin2_code, population, - population_year AS population_year, ref_year FROM staging.stg_population_geonames WHERE lat IS NOT NULL AND lon IS NOT NULL diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql b/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql index 6f40d10..699c90e 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql @@ -28,7 +28,8 @@ WITH parsed AS ( SELECT UNNEST(rows) AS row FROM read_json( @LANDING_DIR || '/geonames/*/*/cities_global.json.gz', - auto_detect = true + auto_detect = true, + maximum_object_size = 40000000 ) ) WHERE (row ->> 'geoname_id') IS NOT NULL From e5960c08ff34352d1deb72e81f365dd695d4b942 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 10:15:25 +0100 Subject: [PATCH 84/98] feat(admin): cross-section links across leads, suppliers, marketplace, emails MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Lead detail: - contact_email → 📧 email log (pre-filtered), mailto, Send Email compose - country → leads list filtered by that country Supplier detail: - contact_email → 📧 email log (pre-filtered), mailto, Send Email compose - claimed_by → user detail page (was plain "User #N") Marketplace dashboard: - Funnel card numbers are now links: Total → /leads, Verified New → /leads?status=new, Unlocked → /leads?status=forwarded, Won → /leads?status=closed_won - Active suppliers number links to /suppliers Marketplace activity stream: - lead events → link to lead_detail - unlock events → supplier name links to supplier_detail, "lead #N" links to lead_detail - credit events → supplier name links to supplier_detail (query now joins suppliers table for name; ref2_id exposes supplier_id and lead_id per event) Email detail: - Reverse-lookup to_addr against lead_requests + suppliers; renders linked "Lead #N" / "Supplier Name" chips next to the To field Email compose: - Accepts ?to= query param to pre-fill recipient (enables Send Email links) Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 25 +++++++++++++------ .../admin/templates/admin/email_detail.html | 10 +++++++- .../admin/templates/admin/lead_detail.html | 15 +++++++++-- .../admin/templates/admin/marketplace.html | 10 ++++---- .../admin/partials/marketplace_activity.html | 7 +++--- .../templates/admin/supplier_detail.html | 11 ++++++-- 6 files changed, 58 insertions(+), 20 deletions(-) diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 64e103c..32bf36d 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -829,21 +829,22 @@ async def marketplace_dashboard(): async def marketplace_activity(): """HTMX: Recent marketplace activity stream.""" rows = await fetch_all( - """SELECT 'lead' as event_type, id as ref_id, + """SELECT 'lead' as event_type, id as ref_id, NULL as ref2_id, contact_name as actor, status as detail, country as extra, created_at FROM lead_requests WHERE lead_type = 'quote' UNION ALL - SELECT 'unlock' as event_type, lf.id as ref_id, + SELECT 'unlock' as event_type, lf.lead_id as ref_id, lf.supplier_id as ref2_id, s.name as actor, lf.status as detail, CAST(lf.credit_cost AS TEXT) as extra, lf.created_at FROM lead_forwards lf JOIN suppliers s ON s.id = lf.supplier_id UNION ALL - SELECT 'credit' as event_type, id as ref_id, - CAST(supplier_id AS TEXT) as actor, event_type as detail, - CAST(delta AS TEXT) as extra, created_at - FROM credit_ledger + SELECT 'credit' as event_type, id as ref_id, supplier_id as ref2_id, + s.name as actor, cl.event_type as detail, + CAST(cl.delta AS TEXT) as extra, cl.created_at + FROM credit_ledger cl + JOIN suppliers s ON s.id = cl.supplier_id ORDER BY created_at DESC LIMIT 50""" ) return await render_template("admin/partials/marketplace_activity.html", events=rows) @@ -1285,10 +1286,19 @@ async def email_detail(email_id: int): except Exception: logger.warning("Failed to fetch email body from Resend for %s", email["resend_id"], exc_info=True) + related_lead = await fetch_one( + "SELECT id FROM lead_requests WHERE contact_email = ? LIMIT 1", (email["to_addr"],) + ) + related_supplier = await fetch_one( + "SELECT id, name FROM suppliers WHERE contact_email = ? LIMIT 1", (email["to_addr"],) + ) + return await render_template( "admin/email_detail.html", email=email, enriched_html=enriched_html, + related_lead=related_lead, + related_supplier=related_supplier, ) @@ -1408,8 +1418,9 @@ async def email_compose(): email_addresses=EMAIL_ADDRESSES, ) + prefill_to = request.args.get("to", "") return await render_template( - "admin/email_compose.html", data={}, email_addresses=EMAIL_ADDRESSES, + "admin/email_compose.html", data={"to": prefill_to}, email_addresses=EMAIL_ADDRESSES, ) diff --git a/web/src/padelnomics/admin/templates/admin/email_detail.html b/web/src/padelnomics/admin/templates/admin/email_detail.html index 597851a..46ed4ce 100644 --- a/web/src/padelnomics/admin/templates/admin/email_detail.html +++ b/web/src/padelnomics/admin/templates/admin/email_detail.html @@ -14,7 +14,15 @@

    Details

    To
    -
    {{ email.to_addr }}
    +
    + {{ email.to_addr }} + {% if related_lead %} + Lead #{{ related_lead.id }} + {% endif %} + {% if related_supplier %} + {{ related_supplier.name }} + {% endif %} +
    From
    {{ email.from_addr }}
    Subject
    diff --git a/web/src/padelnomics/admin/templates/admin/lead_detail.html b/web/src/padelnomics/admin/templates/admin/lead_detail.html index 423a1fe..f6ef05e 100644 --- a/web/src/padelnomics/admin/templates/admin/lead_detail.html +++ b/web/src/padelnomics/admin/templates/admin/lead_detail.html @@ -62,7 +62,10 @@
    Courts
    {{ lead.court_count or '-' }}
    Glass
    {{ lead.glass_type or '-' }}
    Lighting
    {{ lead.lighting_type or '-' }}
    -
    Location
    {{ lead.location or '-' }}, {{ lead.country or '-' }}
    +
    Location
    +
    {{ lead.location or '-' }}{% if lead.country %}, + {{ lead.country }} + {% else %}-{% endif %}
    Phase
    {{ lead.location_status or '-' }}
    Timeline
    {{ lead.timeline or '-' }}
    Budget
    {% if lead.budget_estimate %}€{{ "{:,}".format(lead.budget_estimate | int) }}{% else %}-{% endif %}
    @@ -79,7 +82,15 @@

    Contact

    Name
    {{ lead.contact_name or '-' }}
    -
    Email
    {{ lead.contact_email or '-' }}
    +
    Email
    +
    + {{ lead.contact_email or '-' }} + {% if lead.contact_email %} + 📧 + + Send email + {% endif %} +
    Phone
    {{ lead.contact_phone or '-' }}
    Company
    {{ lead.contact_company or '-' }}
    Role
    {{ lead.stakeholder_type or '-' }}
    diff --git a/web/src/padelnomics/admin/templates/admin/marketplace.html b/web/src/padelnomics/admin/templates/admin/marketplace.html index 39e56b2..37a35aa 100644 --- a/web/src/padelnomics/admin/templates/admin/marketplace.html +++ b/web/src/padelnomics/admin/templates/admin/marketplace.html @@ -38,22 +38,22 @@

    Total Leads

    -

    {{ funnel.total }}

    + {{ funnel.total }}

    Verified New

    -

    {{ funnel.verified_new }}

    + {{ funnel.verified_new }}

    ready to unlock

    Unlocked

    -

    {{ funnel.unlocked }}

    + {{ funnel.unlocked }}

    by suppliers

    Conversion

    {{ funnel.conversion_rate }}%

    -

    {{ funnel.won }} won

    +

    {{ funnel.won }} won

    @@ -86,7 +86,7 @@

    Active suppliers

    -

    {{ suppliers.active }}

    + {{ suppliers.active }}

    growth/pro w/ credits

    diff --git a/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html b/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html index 09e7d06..569dd93 100644 --- a/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html +++ b/web/src/padelnomics/admin/templates/admin/partials/marketplace_activity.html @@ -7,16 +7,17 @@
    {% if ev.event_type == 'lead' %} - New lead + New lead {% if ev.actor %} from {{ ev.actor }}{% endif %} {% if ev.extra %} — {{ ev.extra }}{% endif %} {% if ev.detail %} ({{ ev.detail }}){% endif %} {% elif ev.event_type == 'unlock' %} - {{ ev.actor }} unlocked a lead + {{ ev.actor }} + unlocked lead #{{ ev.ref_id }} {% if ev.extra %} — {{ ev.extra }} credits{% endif %} {% if ev.detail and ev.detail != 'sent' %} ({{ ev.detail }}){% endif %} {% elif ev.event_type == 'credit' %} - Credit event + {{ ev.actor }} {% if ev.extra and ev.extra | int > 0 %}+{{ ev.extra }} {% elif ev.extra %}{{ ev.extra }}{% endif %} {% if ev.detail %} ({{ ev.detail }}){% endif %} diff --git a/web/src/padelnomics/admin/templates/admin/supplier_detail.html b/web/src/padelnomics/admin/templates/admin/supplier_detail.html index 67fcc3d..abdb49f 100644 --- a/web/src/padelnomics/admin/templates/admin/supplier_detail.html +++ b/web/src/padelnomics/admin/templates/admin/supplier_detail.html @@ -45,7 +45,14 @@
    {% if supplier.website %}{{ supplier.website }}{% else %}-{% endif %}
    Contact
    {{ supplier.contact_name or '-' }}
    - {{ supplier.contact_email or '-' }}
    + + {{ supplier.contact_email or '-' }} + {% if supplier.contact_email %} + 📧 + + Send email + {% endif %} +
    Tagline
    {{ supplier.tagline or '-' }}
    Description
    @@ -73,7 +80,7 @@
    Enquiries
    {{ enquiry_count }}
    Claimed By
    -
    {% if supplier.claimed_by %}User #{{ supplier.claimed_by }}{% else %}Unclaimed{% endif %}
    +
    {% if supplier.claimed_by %}User #{{ supplier.claimed_by }}{% else %}Unclaimed{% endif %}
    Created
    {{ supplier.created_at or '-' }}
    From 6bede60ef83f98b928b7fcdc7ac8aa5d58aa657e Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 11:50:17 +0100 Subject: [PATCH 85/98] feat(extract): add compress_jsonl_atomic() utility Streams a JSONL working file to .jsonl.gz in 1MB chunks (constant memory), atomic rename via .tmp sibling, deletes source on success. Companion to write_gzip_atomic() for extractors that stream records incrementally. Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/utils.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/utils.py b/extract/padelnomics_extract/src/padelnomics_extract/utils.py index 15777f0..451c365 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/utils.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/utils.py @@ -174,3 +174,23 @@ def write_gzip_atomic(path: Path, data: bytes) -> int: tmp.write_bytes(compressed) tmp.rename(path) return len(compressed) + + +def compress_jsonl_atomic(jsonl_path: Path, dest_path: Path) -> int: + """Compress a JSONL working file to .jsonl.gz atomically, then delete the source. + + Streams compression in 1MB chunks (constant memory regardless of file size). + Atomic via .tmp rename — readers never see a partial .jsonl.gz. + Deletes the uncompressed working file after successful compression. + Returns compressed bytes written. + """ + assert jsonl_path.exists(), f"source must exist: {jsonl_path}" + assert jsonl_path.stat().st_size > 0, f"source must not be empty: {jsonl_path}" + tmp = dest_path.with_suffix(dest_path.suffix + ".tmp") + with open(jsonl_path, "rb") as f_in, gzip.open(tmp, "wb") as f_out: + while chunk := f_in.read(1_048_576): # 1 MB chunks + f_out.write(chunk) + bytes_written = tmp.stat().st_size + tmp.rename(dest_path) + jsonl_path.unlink() + return bytes_written From 9bef055e6d58dbb0e4bc792ed90edfd51b1dc752 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:07:53 +0100 Subject: [PATCH 86/98] feat(extract): convert playtomic_tenants to JSONL output MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - playtomic_tenants.py: write each tenant as a JSONL line after dedup, compress via compress_jsonl_atomic → tenants.jsonl.gz - playtomic_availability.py: update _load_tenant_ids() to prefer tenants.jsonl.gz, fall back to tenants.json.gz (transition) - stg_playtomic_venues.sql: UNION ALL jsonl+blob CTEs for transition; JSONL reads top-level columns directly, no UNNEST(tenants) needed - stg_playtomic_resources.sql: same UNION ALL pattern, single UNNEST for resources in JSONL path vs double UNNEST in blob path - stg_playtomic_opening_hours.sql: same UNION ALL pattern, opening_hours as top-level JSON column in JSONL path Co-Authored-By: Claude Sonnet 4.6 --- .../playtomic_availability.py | 38 +++++++--- .../padelnomics_extract/playtomic_tenants.py | 18 +++-- .../staging/stg_playtomic_opening_hours.sql | 29 +++++++- .../staging/stg_playtomic_resources.sql | 67 +++++++++++------ .../models/staging/stg_playtomic_venues.sql | 74 ++++++++++++++----- 5 files changed, 166 insertions(+), 60 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index e665ccd..e086855 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -65,28 +65,44 @@ _thread_local = threading.local() # --------------------------------------------------------------------------- def _load_tenant_ids(landing_dir: Path) -> list[str]: - """Read tenant IDs from the most recent tenants.json.gz file.""" + """Read tenant IDs from the most recent tenants file (JSONL or blob format).""" assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" playtomic_dir = landing_dir / "playtomic" if not playtomic_dir.exists(): return [] - tenant_files = sorted(playtomic_dir.glob("*/*/tenants.json.gz"), reverse=True) + # Prefer JSONL (new format), fall back to blob (old format) + tenant_files = sorted(playtomic_dir.glob("*/*/tenants.jsonl.gz"), reverse=True) + if not tenant_files: + tenant_files = sorted(playtomic_dir.glob("*/*/tenants.json.gz"), reverse=True) if not tenant_files: return [] latest = tenant_files[0] logger.info("Loading tenant IDs from %s", latest) - - with gzip.open(latest, "rb") as f: - data = json.loads(f.read()) - - tenants = data.get("tenants", []) ids = [] - for t in tenants: - tid = t.get("tenant_id") or t.get("id") - if tid: - ids.append(tid) + + with gzip.open(latest, "rt") as f: + if latest.name.endswith(".jsonl.gz"): + # JSONL: one tenant object per line + for line in f: + line = line.strip() + if not line: + continue + try: + record = json.loads(line) + tid = record.get("tenant_id") or record.get("id") + if tid: + ids.append(tid) + except json.JSONDecodeError: + break # truncated last line + else: + # Blob: {"tenants": [...]} + data = json.loads(f.read()) + for t in data.get("tenants", []): + tid = t.get("tenant_id") or t.get("id") + if tid: + ids.append(tid) logger.info("Loaded %d tenant IDs", len(ids)) return ids diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py index 8feb5c4..ea95eca 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py @@ -1,8 +1,8 @@ """Playtomic tenants extractor — venue listings via unauthenticated API. Paginates through the global tenant list (sorted by UUID) using the `page` -parameter. Deduplicates on tenant_id and writes a single consolidated JSON -to the landing zone. +parameter. Deduplicates on tenant_id and writes a gzipped JSONL file to the +landing zone (one tenant object per line). API notes (discovered 2026-02): - bbox params (min_latitude etc.) are silently ignored by the API @@ -18,7 +18,7 @@ pages. Rate: 1 req / 2 s per IP (see docs/data-sources-inventory.md §1.2). -Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.json.gz +Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.jsonl.gz """ import json @@ -31,7 +31,7 @@ import niquests from ._shared import HTTP_TIMEOUT_SECONDS, USER_AGENT, run_extractor, setup_logging from .proxy import load_proxy_urls, make_round_robin_cycler -from .utils import landing_path, write_gzip_atomic +from .utils import compress_jsonl_atomic, landing_path logger = setup_logging("padelnomics.extract.playtomic_tenants") @@ -76,7 +76,7 @@ def extract( """Fetch all Playtomic venues via global pagination. Returns run metrics.""" year, month = year_month.split("/") dest_dir = landing_path(landing_dir, "playtomic", year, month) - dest = dest_dir / "tenants.json.gz" + dest = dest_dir / "tenants.jsonl.gz" proxy_urls = load_proxy_urls() next_proxy = make_round_robin_cycler(proxy_urls) if proxy_urls else None @@ -138,8 +138,12 @@ def extract( if not next_proxy: time.sleep(THROTTLE_SECONDS) - payload = json.dumps({"tenants": all_tenants, "count": len(all_tenants)}).encode() - bytes_written = write_gzip_atomic(dest, payload) + # Write each tenant as a JSONL line, then compress atomically + working_path = dest.with_suffix(".working.jsonl") + with open(working_path, "w") as f: + for tenant in all_tenants: + f.write(json.dumps(tenant, separators=(",", ":")) + "\n") + bytes_written = compress_jsonl_atomic(working_path, dest) logger.info("%d unique venues -> %s", len(all_tenants), dest) return { diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_opening_hours.sql b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_opening_hours.sql index 08aa810..42e7bf9 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_opening_hours.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_opening_hours.sql @@ -5,8 +5,11 @@ -- DuckDB auto-infers opening_hours as STRUCT, so we access each day by literal -- key (no dynamic access) and UNION ALL to unpivot. -- --- Source: data/landing/playtomic/{year}/{month}/tenants.json.gz --- Each tenant has opening_hours: {MONDAY: {opening_time, closing_time}, ...} +-- Supports two landing formats (UNION ALL during migration): +-- New: tenants.jsonl.gz — one tenant per line, opening_hours is a top-level JSON column +-- Old: tenants.json.gz — {"tenants": [...]} blob (UNNEST required) +-- +-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz MODEL ( name staging.stg_playtomic_opening_hours, @@ -15,7 +18,22 @@ MODEL ( grain (tenant_id, day_of_week) ); -WITH venues AS ( +WITH +-- New format: one tenant per JSONL line +jsonl_venues AS ( + SELECT + tenant_id, + opening_hours AS oh + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz', + format = 'newline_delimited', + columns = {tenant_id: 'VARCHAR', opening_hours: 'JSON'} + ) + WHERE tenant_id IS NOT NULL + AND opening_hours IS NOT NULL +), +-- Old format: blob +blob_venues AS ( SELECT tenant ->> 'tenant_id' AS tenant_id, tenant -> 'opening_hours' AS oh @@ -30,6 +48,11 @@ WITH venues AS ( WHERE (tenant ->> 'tenant_id') IS NOT NULL AND (tenant -> 'opening_hours') IS NOT NULL ), +venues AS ( + SELECT * FROM jsonl_venues + UNION ALL + SELECT * FROM blob_venues +), -- Unpivot by UNION ALL — 7 literal key accesses unpivoted AS ( SELECT tenant_id, 'MONDAY' AS day_of_week, 1 AS day_number, diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_resources.sql b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_resources.sql index 0907d6a..b6f6353 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_resources.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_resources.sql @@ -1,9 +1,12 @@ -- Individual court (resource) records from Playtomic venues. --- Reads resources array from the landing zone JSON directly (double UNNEST: --- tenants → resources) to extract court type, size, surface, and booking config. +-- Reads resources array from the landing zone to extract court type, size, +-- surface, and booking config. -- --- Source: data/landing/playtomic/{year}/{month}/tenants.json.gz --- Each tenant has a resources[] array of court objects. +-- Supports two landing formats (UNION ALL during migration): +-- New: tenants.jsonl.gz — one tenant per line, resources is a top-level JSON column +-- Old: tenants.json.gz — {"tenants": [...]} blob (double UNNEST: tenants → resources) +-- +-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz MODEL ( name staging.stg_playtomic_resources, @@ -12,36 +15,56 @@ MODEL ( grain (tenant_id, resource_id) ); -WITH raw AS ( - SELECT UNNEST(tenants) AS tenant - FROM read_json( - @LANDING_DIR || '/playtomic/*/*/tenants.json.gz', - format = 'auto', - maximum_object_size = 134217728 - ) -), -unnested AS ( +WITH +-- New format: one tenant per JSONL line — single UNNEST for resources +jsonl_unnested AS ( SELECT - tenant ->> 'tenant_id' AS tenant_id, - UPPER(tenant -> 'address' ->> 'country_code') AS country_code, - UNNEST(from_json(tenant -> 'resources', '["JSON"]')) AS resource_json - FROM raw + tenant_id, + UPPER(address ->> 'country_code') AS country_code, + UNNEST(from_json(resources, '["JSON"]')) AS resource_json + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz', + format = 'newline_delimited', + columns = {tenant_id: 'VARCHAR', address: 'JSON', resources: 'JSON'} + ) + WHERE tenant_id IS NOT NULL + AND resources IS NOT NULL +), +-- Old format: blob — double UNNEST (tenants → resources) +blob_unnested AS ( + SELECT + tenant ->> 'tenant_id' AS tenant_id, + UPPER(tenant -> 'address' ->> 'country_code') AS country_code, + UNNEST(from_json(tenant -> 'resources', '["JSON"]')) AS resource_json + FROM ( + SELECT UNNEST(tenants) AS tenant + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/tenants.json.gz', + format = 'auto', + maximum_object_size = 134217728 + ) + ) WHERE (tenant ->> 'tenant_id') IS NOT NULL AND (tenant -> 'resources') IS NOT NULL +), +unnested AS ( + SELECT * FROM jsonl_unnested + UNION ALL + SELECT * FROM blob_unnested ) SELECT tenant_id, - resource_json ->> 'resource_id' AS resource_id, + resource_json ->> 'resource_id' AS resource_id, country_code, - NULLIF(TRIM(resource_json ->> 'name'), '') AS resource_name, - resource_json ->> 'sport_id' AS sport_id, + NULLIF(TRIM(resource_json ->> 'name'), '') AS resource_name, + resource_json ->> 'sport_id' AS sport_id, CASE WHEN LOWER(resource_json ->> 'is_active') IN ('true', '1') - THEN TRUE ELSE FALSE END AS is_active, + THEN TRUE ELSE FALSE END AS is_active, LOWER(resource_json -> 'properties' ->> 'resource_type') AS resource_type, LOWER(resource_json -> 'properties' ->> 'resource_size') AS resource_size, LOWER(resource_json -> 'properties' ->> 'resource_feature') AS resource_feature, CASE WHEN LOWER(resource_json -> 'booking_settings' ->> 'is_bookable_online') IN ('true', '1') - THEN TRUE ELSE FALSE END AS is_bookable_online + THEN TRUE ELSE FALSE END AS is_bookable_online FROM unnested WHERE (resource_json ->> 'resource_id') IS NOT NULL AND (resource_json ->> 'sport_id') = 'PADEL' diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_venues.sql b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_venues.sql index de579b5..6240462 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_venues.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_venues.sql @@ -1,10 +1,13 @@ -- Playtomic padel venue records — full metadata extraction. --- Reads landing zone JSON, unnests tenant array, extracts all venue metadata +-- Reads landing zone tenants files, extracts all venue metadata -- including address, opening hours, court resources, VAT rate, and facilities. -- Deduplicates on tenant_id (keeps most recent extraction). -- --- Source: data/landing/playtomic/{year}/{month}/tenants.json.gz --- Format: {"tenants": [{tenant_id, tenant_name, address, resources, opening_hours, ...}]} +-- Supports two landing formats (UNION ALL during migration): +-- New: tenants.jsonl.gz — one tenant JSON object per line (no UNNEST needed) +-- Old: tenants.json.gz — {"tenants": [{...}]} blob (UNNEST required) +-- +-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz MODEL ( name staging.stg_playtomic_venues, @@ -13,9 +16,52 @@ MODEL ( grain tenant_id ); -WITH parsed AS ( +WITH +-- New format: one tenant per JSONL line — no UNNEST, access columns directly +jsonl_parsed AS ( + SELECT + tenant_id, + tenant_name, + slug, + tenant_type, + tenant_status, + playtomic_status, + booking_type, + address ->> 'street' AS street, + address ->> 'city' AS city, + address ->> 'postal_code' AS postal_code, + UPPER(address ->> 'country_code') AS country_code, + address ->> 'timezone' AS timezone, + address ->> 'administrative_area' AS administrative_area, + TRY_CAST(address -> 'coordinate' ->> 'lat' AS DOUBLE) AS lat, + TRY_CAST(address -> 'coordinate' ->> 'lon' AS DOUBLE) AS lon, + TRY_CAST(vat_rate AS DOUBLE) AS vat_rate, + default_currency, + TRY_CAST(booking_settings ->> 'booking_ahead_limit' AS INTEGER) AS booking_ahead_limit_minutes, + opening_hours AS opening_hours_json, + resources AS resources_json, + created_at, + CAST(is_playtomic_partner AS VARCHAR) AS is_playtomic_partner_raw, + filename AS source_file, + CURRENT_DATE AS extracted_date + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz', + format = 'newline_delimited', + filename = true, + columns = { + tenant_id: 'VARCHAR', tenant_name: 'VARCHAR', slug: 'VARCHAR', + tenant_type: 'VARCHAR', tenant_status: 'VARCHAR', playtomic_status: 'VARCHAR', + booking_type: 'VARCHAR', address: 'JSON', vat_rate: 'DOUBLE', + default_currency: 'VARCHAR', booking_settings: 'JSON', + opening_hours: 'JSON', resources: 'JSON', + created_at: 'VARCHAR', is_playtomic_partner: 'VARCHAR' + } + ) + WHERE tenant_id IS NOT NULL +), +-- Old format: {"tenants": [...]} blob — keep for transition until old files rotate out +blob_parsed AS ( SELECT - -- Identity tenant ->> 'tenant_id' AS tenant_id, tenant ->> 'tenant_name' AS tenant_name, tenant ->> 'slug' AS slug, @@ -23,8 +69,6 @@ WITH parsed AS ( tenant ->> 'tenant_status' AS tenant_status, tenant ->> 'playtomic_status' AS playtomic_status, tenant ->> 'booking_type' AS booking_type, - - -- Address tenant -> 'address' ->> 'street' AS street, tenant -> 'address' ->> 'city' AS city, tenant -> 'address' ->> 'postal_code' AS postal_code, @@ -33,22 +77,13 @@ WITH parsed AS ( tenant -> 'address' ->> 'administrative_area' AS administrative_area, TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lat' AS DOUBLE) AS lat, TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lon' AS DOUBLE) AS lon, - - -- Commercial - TRY_CAST(tenant ->> 'vat_rate' AS DOUBLE) AS vat_rate, + TRY_CAST(tenant ->> 'vat_rate' AS DOUBLE) AS vat_rate, tenant ->> 'default_currency' AS default_currency, - - -- Booking settings (venue-level) TRY_CAST(tenant -> 'booking_settings' ->> 'booking_ahead_limit' AS INTEGER) AS booking_ahead_limit_minutes, - - -- Opening hours and resources stored as JSON for downstream models tenant -> 'opening_hours' AS opening_hours_json, tenant -> 'resources' AS resources_json, - - -- Metadata tenant ->> 'created_at' AS created_at, tenant ->> 'is_playtomic_partner' AS is_playtomic_partner_raw, - filename AS source_file, CURRENT_DATE AS extracted_date FROM ( @@ -62,6 +97,11 @@ WITH parsed AS ( ) WHERE (tenant ->> 'tenant_id') IS NOT NULL ), +parsed AS ( + SELECT * FROM jsonl_parsed + UNION ALL + SELECT * FROM blob_parsed +), deduped AS ( SELECT *, ROW_NUMBER() OVER (PARTITION BY tenant_id ORDER BY source_file DESC) AS rn From daf1945d5b9bd5eda32bdfba842db2ce916206e5 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 11:58:29 +0100 Subject: [PATCH 87/98] =?UTF-8?q?feat(emails):=20subtask=201-2=20=E2=80=94?= =?UTF-8?q?=20email=5Ftemplates.py=20foundation=20+=203=20simple=20templat?= =?UTF-8?q?es?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add email_templates.py: standalone Jinja2 env, render_email_template(), EMAIL_TEMPLATE_REGISTRY with sample_data functions for all 11 email types - Add templates/emails/_base.html: direct transliteration of _email_wrap() - Add templates/emails/_macros.html: email_button, heat_badge, heat_badge_sm, section_heading, info_box macros - Add magic_link.html, welcome.html, supplier_enquiry.html templates - Refactor 3 handlers in worker.py to use render_email_template() Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/email_templates.py | 306 ++++++++++++++++++ .../padelnomics/templates/emails/_base.html | 54 ++++ .../padelnomics/templates/emails/_macros.html | 61 ++++ .../templates/emails/magic_link.html | 12 + .../templates/emails/supplier_enquiry.html | 19 ++ .../padelnomics/templates/emails/welcome.html | 25 ++ web/src/padelnomics/worker.py | 62 ++-- 7 files changed, 500 insertions(+), 39 deletions(-) create mode 100644 web/src/padelnomics/email_templates.py create mode 100644 web/src/padelnomics/templates/emails/_base.html create mode 100644 web/src/padelnomics/templates/emails/_macros.html create mode 100644 web/src/padelnomics/templates/emails/magic_link.html create mode 100644 web/src/padelnomics/templates/emails/supplier_enquiry.html create mode 100644 web/src/padelnomics/templates/emails/welcome.html diff --git a/web/src/padelnomics/email_templates.py b/web/src/padelnomics/email_templates.py new file mode 100644 index 0000000..220000c --- /dev/null +++ b/web/src/padelnomics/email_templates.py @@ -0,0 +1,306 @@ +""" +Standalone Jinja2 email template renderer. + +Used by both the worker (outside Quart request context) and admin gallery routes. +Creates a module-level Environment pointing at the same templates/ directory +used by the web app, so templates share the same file tree. + +Usage: + from .email_templates import render_email_template, EMAIL_TEMPLATE_REGISTRY + + html = render_email_template("emails/magic_link.html", lang="en", link=link, expiry_minutes=15) +""" + +from pathlib import Path + +import jinja2 + +from .core import config, utcnow +from .i18n import get_translations + +_TEMPLATES_DIR = Path(__file__).parent / "templates" + +# Standalone environment — not tied to Quart's request context. +# autoescape=True: user-supplied data (names, emails, messages) is auto-escaped. +# Trusted HTML sections use the `| safe` filter explicitly. +_env = jinja2.Environment( + loader=jinja2.FileSystemLoader(str(_TEMPLATES_DIR)), + autoescape=True, + undefined=jinja2.StrictUndefined, +) + + +def _tformat(s: str, **kwargs) -> str: + """Jinja filter: interpolate {placeholders} into a translation string. + + Mirrors the `tformat` filter registered in app.py so email templates + and web templates use the same syntax: + {{ t.some_key | tformat(name=supplier.name, count=n) }} + """ + if not kwargs: + return s + return s.format(**kwargs) + + +_env.filters["tformat"] = _tformat + + +def render_email_template(template_name: str, lang: str = "en", **kwargs) -> str: + """Render an email template with standard context injected. + + Args: + template_name: Path relative to templates/ (e.g. "emails/magic_link.html"). + lang: Language code ("en" or "de"). Used for translations + html lang attr. + **kwargs: Additional context variables passed to the template. + + Returns: + Rendered HTML string containing a full document. + """ + assert lang in ("en", "de"), f"Unsupported lang: {lang!r}" + assert template_name.startswith("emails/"), f"Expected emails/ prefix: {template_name!r}" + + translations = get_translations(lang) + year = utcnow().year + + # Pre-interpolate footer strings so templates don't need to call tformat on them. + tagline = translations.get("email_footer_tagline", "") + copyright_text = translations.get("email_footer_copyright", "").format( + year=year, app_name=config.APP_NAME + ) + + context = { + "lang": lang, + "app_name": config.APP_NAME, + "base_url": config.BASE_URL, + "t": translations, + "tagline": tagline, + "copyright_text": copyright_text, + **kwargs, + } + + tmpl = _env.get_template(template_name) + rendered = tmpl.render(**context) + + assert "" in rendered, f"Template {template_name!r} must produce a DOCTYPE document" + assert "padelnomics" in rendered.lower(), f"Template {template_name!r} must include the wordmark" + return rendered + + +# ============================================================================= +# Template registry — used by admin gallery for sample preview rendering +# ============================================================================= + +def _magic_link_sample(lang: str) -> dict: + return { + "link": f"{config.BASE_URL}/auth/verify?token=sample_token_abc123", + "expiry_minutes": 15, + "preheader": get_translations(lang).get("email_magic_link_preheader", "").format(expiry_minutes=15), + } + + +def _quote_verification_sample(lang: str) -> dict: + t = get_translations(lang) + court_count = "4" + return { + "link": f"{config.BASE_URL}/{lang}/leads/verify?token=verify123&lead=lead456", + "first_name": "Alex", + "court_count": court_count, + "facility_type": "Indoor Padel Club", + "country": "Germany", + "preheader": t.get("email_quote_verify_preheader_courts", "").format(court_count=court_count), + } + + +def _welcome_sample(lang: str) -> dict: + t = get_translations(lang) + return { + "first_name": "Maria", + "preheader": t.get("email_welcome_preheader", ""), + } + + +def _waitlist_supplier_sample(lang: str) -> dict: + t = get_translations(lang) + return { + "plan_name": "Growth", + "preheader": t.get("email_waitlist_supplier_preheader", ""), + } + + +def _waitlist_general_sample(lang: str) -> dict: + t = get_translations(lang) + return { + "preheader": t.get("email_waitlist_general_preheader", ""), + } + + +def _lead_matched_sample(lang: str) -> dict: + t = get_translations(lang) + return { + "first_name": "Thomas", + "facility_type": "padel", + "court_count": "6", + "country": "Austria", + "preheader": t.get("email_lead_matched_preheader", ""), + } + + +def _lead_forward_sample(lang: str) -> dict: + return { + "heat": "HOT", + "country": "Spain", + "courts": "8", + "budget": "450000", + "facility_type": "Outdoor Padel Club", + "timeline": "Q3 2025", + "contact_email": "ceo@padelclub.es", + "contact_name": "Carlos Rivera", + "contact_phone": "+34 612 345 678", + "contact_company": "PadelClub Madrid SL", + "stakeholder_type": "Developer / Investor", + "build_context": "New build", + "glass_type": "Panoramic", + "lighting_type": "LED", + "location": "Madrid", + "location_status": "Site confirmed", + "financing_status": "Self-financed", + "services_needed": "Full turnkey construction", + "additional_info": "Seeking experienced international suppliers only.", + "cta_url": f"{config.BASE_URL}/suppliers/leads/cta/sample_cta_token", + "preheader": "Outdoor Padel Club project · Q3 2025 timeline — contact details inside", + "brief_rows": [ + ("Facility", "Outdoor Padel Club (New build)"), + ("Courts", "8 | Glass: Panoramic | Lighting: LED"), + ("Location", "Madrid, Spain"), + ("Timeline", "Q3 2025 | Budget: €450000"), + ("Phase", "Site confirmed | Financing: Self-financed"), + ("Services", "Full turnkey construction"), + ("Additional Info", "Seeking experienced international suppliers only."), + ], + } + + +def _lead_match_notify_sample(lang: str) -> dict: + return { + "heat": "WARM", + "country": "Netherlands", + "courts": "4", + "facility_type": "Indoor Padel", + "timeline": "Q1 2026", + "credit_cost": 2, + "preheader": "New matching lead in Netherlands", + } + + +def _weekly_digest_sample(lang: str) -> dict: + return { + "leads": [ + {"heat": "HOT", "facility_type": "Outdoor Padel", "court_count": "6", "country": "Germany", "timeline": "Q2 2025"}, + {"heat": "WARM", "facility_type": "Indoor Club", "court_count": "4", "country": "Austria", "timeline": "Q3 2025"}, + {"heat": "COOL", "facility_type": "Padel Centre", "court_count": "8", "country": "Switzerland", "timeline": "2026"}, + ], + "preheader": "3 new leads matching your service area", + } + + +def _business_plan_sample(lang: str) -> dict: + t = get_translations(lang) + return { + "download_url": f"{config.BASE_URL}/planner/export/sample_export_token", + "quote_url": f"{config.BASE_URL}/{lang}/leads/quote", + "preheader": t.get("email_business_plan_preheader", ""), + } + + +def _admin_compose_sample(lang: str) -> dict: + return { + "body_html": "

    Hello,

    This is a test message from the admin compose panel.

    Best regards,
    Padelnomics Team

    ", + "preheader": "Test message from admin", + } + + +# Registry entry shape: +# template: path relative to templates/ +# label: human-readable name shown in gallery +# description: one-line description +# email_type: email_type value stored in email_log (for cross-linking) +# sample_data: callable(lang) → dict of template context +EMAIL_TEMPLATE_REGISTRY: dict[str, dict] = { + "magic_link": { + "template": "emails/magic_link.html", + "label": "Magic Link", + "description": "Passwordless sign-in link sent to users requesting access.", + "email_type": "magic_link", + "sample_data": _magic_link_sample, + }, + "quote_verification": { + "template": "emails/quote_verification.html", + "label": "Quote Verification", + "description": "Email address verification for new project quote requests.", + "email_type": "quote_verification", + "sample_data": _quote_verification_sample, + }, + "welcome": { + "template": "emails/welcome.html", + "label": "Welcome", + "description": "Sent to new users after their first successful sign-in.", + "email_type": "welcome", + "sample_data": _welcome_sample, + }, + "waitlist_supplier": { + "template": "emails/waitlist_supplier.html", + "label": "Waitlist — Supplier", + "description": "Confirmation for suppliers who joined the Growth/Pro waitlist.", + "email_type": "waitlist", + "sample_data": _waitlist_supplier_sample, + }, + "waitlist_general": { + "template": "emails/waitlist_general.html", + "label": "Waitlist — General", + "description": "Confirmation for general sign-up waitlist submissions.", + "email_type": "waitlist", + "sample_data": _waitlist_general_sample, + }, + "lead_matched": { + "template": "emails/lead_matched.html", + "label": "Lead Matched", + "description": "Notifies the project owner that suppliers are now reviewing their brief.", + "email_type": "lead_matched", + "sample_data": _lead_matched_sample, + }, + "lead_forward": { + "template": "emails/lead_forward.html", + "label": "Lead Forward", + "description": "Full project brief sent to a supplier after they unlock a lead.", + "email_type": "lead_forward", + "sample_data": _lead_forward_sample, + }, + "lead_match_notify": { + "template": "emails/lead_match_notify.html", + "label": "Lead Match Notify", + "description": "Notifies matching suppliers that a new lead is available in their area.", + "email_type": "lead_match_notify", + "sample_data": _lead_match_notify_sample, + }, + "weekly_digest": { + "template": "emails/weekly_digest.html", + "label": "Weekly Digest", + "description": "Monday digest of new leads matching a supplier's service area.", + "email_type": "weekly_digest", + "sample_data": _weekly_digest_sample, + }, + "business_plan": { + "template": "emails/business_plan.html", + "label": "Business Plan Ready", + "description": "Notifies the user when their business plan PDF export is ready.", + "email_type": "business_plan", + "sample_data": _business_plan_sample, + }, + "admin_compose": { + "template": "emails/admin_compose.html", + "label": "Admin Compose", + "description": "Branded wrapper used for ad-hoc emails sent from the compose panel.", + "email_type": "admin_compose", + "sample_data": _admin_compose_sample, + }, +} diff --git a/web/src/padelnomics/templates/emails/_base.html b/web/src/padelnomics/templates/emails/_base.html new file mode 100644 index 0000000..f15d81d --- /dev/null +++ b/web/src/padelnomics/templates/emails/_base.html @@ -0,0 +1,54 @@ + + + + + + {{ app_name }} + + + + {%- if preheader %} + {# Hidden preheader trick: visible text + invisible padding to prevent + email clients from pulling body text into the preview. #} + {{ preheader }}{% for _ in range(30) %}͏ ‌ {% endfor %} + {%- endif %} + + + +
    + + + + + + + + + + + + + + + + + +
     
    + + padelnomics + +
    + {% block body %}{% endblock %} +
    +

    + padelnomics.io +  ·  + {{ tagline }} +

    +

    + {{ copyright_text }} +

    +
    +
    + + diff --git a/web/src/padelnomics/templates/emails/_macros.html b/web/src/padelnomics/templates/emails/_macros.html new file mode 100644 index 0000000..1d043d4 --- /dev/null +++ b/web/src/padelnomics/templates/emails/_macros.html @@ -0,0 +1,61 @@ +{# + Shared macros for transactional email templates. + + Import in child templates: + {% from "emails/_macros.html" import email_button, heat_badge, section_heading, info_box %} +#} + + +{# ─── CTA Button ───────────────────────────────────────────────────────────── + Table-based blue button — works in all major email clients. + Uses display:block for full-width tap target on mobile. +#} +{% macro email_button(url, label) %} + + +
    + {{ label }} +
    +{% endmacro %} + + +{# ─── Heat Badge ───────────────────────────────────────────────────────────── + Inline colored badge: HOT (red), WARM (orange), COOL (blue). + heat: uppercase string "HOT" | "WARM" | "COOL" +#} +{% macro heat_badge(heat) %} +{%- set colors = {"HOT": "#DC2626", "WARM": "#EA580C", "COOL": "#2563EB"} -%} +{%- set bg = colors.get(heat, "#2563EB") -%} +{{ heat }} +{%- endmacro %} + + +{# ─── Small heat badge (compact variant for table rows) ────────────────────── + 1px smaller padding, used in weekly_digest lead table rows. +#} +{% macro heat_badge_sm(heat) %} +{%- set colors = {"HOT": "#DC2626", "WARM": "#EA580C", "COOL": "#2563EB"} -%} +{%- set bg = colors.get(heat, "#2563EB") -%} +{{ heat }} +{%- endmacro %} + + +{# ─── Section Heading ───────────────────────────────────────────────────────── + Small uppercase label above a data table section. +#} +{% macro section_heading(text) %} +

    {{ text }}

    +{% endmacro %} + + +{# ─── Info Box ──────────────────────────────────────────────────────────────── + Left-bordered callout box. color: "blue" (default) or "yellow". +#} +{% macro info_box(text, color="blue") %} +{%- if color == "yellow" -%} + {%- set bg = "#FEF3C7" -%}{%- set border = "#F59E0B" -%} +{%- else -%} + {%- set bg = "#F0F9FF" -%}{%- set border = "#1D4ED8" -%} +{%- endif -%} +

    {{ text }}

    +{% endmacro %} diff --git a/web/src/padelnomics/templates/emails/magic_link.html b/web/src/padelnomics/templates/emails/magic_link.html new file mode 100644 index 0000000..e004747 --- /dev/null +++ b/web/src/padelnomics/templates/emails/magic_link.html @@ -0,0 +1,12 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button %} + +{% block body %} +

    {{ t.email_magic_link_heading | tformat(app_name=app_name) }}

    +
    +

    {{ t.email_magic_link_body | tformat(expiry_minutes=expiry_minutes) }}

    +{{ email_button(link, t.email_magic_link_btn) }} +

    {{ t.email_magic_link_fallback }}

    +

    {{ link }}

    +

    {{ t.email_magic_link_ignore }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/supplier_enquiry.html b/web/src/padelnomics/templates/emails/supplier_enquiry.html new file mode 100644 index 0000000..54dd23d --- /dev/null +++ b/web/src/padelnomics/templates/emails/supplier_enquiry.html @@ -0,0 +1,19 @@ +{% extends "emails/_base.html" %} + +{% block body %} +

    {{ t.email_enquiry_heading | tformat(contact_name=contact_name) }}

    +
    +

    {{ t.email_enquiry_body | tformat(supplier_name=supplier_name) }}

    + + + + + + + + + +
    {{ t.email_enquiry_lbl_from }}{{ contact_name }} <{{ contact_email }}>
    {{ t.email_enquiry_lbl_message }}{{ message }}
    +

    {{ t.email_enquiry_respond_fast }}

    +

    {{ t.email_enquiry_reply | tformat(contact_email=contact_email) }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/welcome.html b/web/src/padelnomics/templates/emails/welcome.html new file mode 100644 index 0000000..d3d3438 --- /dev/null +++ b/web/src/padelnomics/templates/emails/welcome.html @@ -0,0 +1,25 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button %} + +{% block body %} +

    {{ t.email_welcome_heading | tformat(app_name=app_name) }}

    +
    +

    {{ t.email_welcome_greeting | tformat(first_name=first_name) }}

    +

    {{ t.email_welcome_body }}

    +

    {{ t.email_welcome_quickstart_heading }}

    + + + + + + + + + + + + + +
    {{ t.email_welcome_link_planner }}
    {{ t.email_welcome_link_markets }}
    {{ t.email_welcome_link_quotes }}
    +{{ email_button(base_url ~ "/planner", t.email_welcome_btn) }} +{% endblock %} diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index 387987a..fe872c1 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -22,6 +22,7 @@ from .core import ( utcnow, utcnow_iso, ) +from .email_templates import render_email_template from .i18n import get_translations logger = logging.getLogger(__name__) @@ -228,20 +229,18 @@ async def handle_send_magic_link(payload: dict) -> None: logger.debug("MAGIC LINK for %s: %s", payload["email"], link) expiry_minutes = config.MAGIC_LINK_EXPIRY_MINUTES - body = ( - f'

    {_t("email_magic_link_heading", lang, app_name=config.APP_NAME)}

    ' - f'
    ' - f'

    {_t("email_magic_link_body", lang, expiry_minutes=expiry_minutes)}

    ' - f'{_email_button(link, _t("email_magic_link_btn", lang))}' - f'

    {_t("email_magic_link_fallback", lang)}

    ' - f'

    {link}

    ' - f'

    {_t("email_magic_link_ignore", lang)}

    ' + html = render_email_template( + "emails/magic_link.html", + lang=lang, + link=link, + expiry_minutes=expiry_minutes, + preheader=_t("email_magic_link_preheader", lang, expiry_minutes=expiry_minutes), ) await send_email( to=payload["email"], subject=_t("email_magic_link_subject", lang, app_name=config.APP_NAME), - html=_email_wrap(body, lang, preheader=_t("email_magic_link_preheader", lang, expiry_minutes=expiry_minutes)), + html=html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="magic_link", ) @@ -315,27 +314,17 @@ async def handle_send_welcome(payload: dict) -> None: name_parts = (payload.get("name") or "").split() first_name = name_parts[0] if name_parts else "there" - body = ( - f'

    {_t("email_welcome_heading", lang, app_name=config.APP_NAME)}

    ' - f'
    ' - f'

    {_t("email_welcome_greeting", lang, first_name=first_name)}

    ' - f'

    {_t("email_welcome_body", lang)}

    ' - f'

    {_t("email_welcome_quickstart_heading", lang)}

    ' - f'' - f'' - f'' - f'' - f'' - f'' - f'' - f'
    {_t("email_welcome_link_planner", lang)}
    {_t("email_welcome_link_markets", lang)}
    {_t("email_welcome_link_quotes", lang)}
    ' - f'{_email_button(f"{config.BASE_URL}/planner", _t("email_welcome_btn", lang))}' + html = render_email_template( + "emails/welcome.html", + lang=lang, + first_name=first_name, + preheader=_t("email_welcome_preheader", lang), ) await send_email( to=payload["email"], subject=_t("email_welcome_subject", lang), - html=_email_wrap(body, lang, preheader=_t("email_welcome_preheader", lang)), + html=html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="welcome", ) @@ -727,25 +716,20 @@ async def handle_send_supplier_enquiry_email(payload: dict) -> None: contact_email = payload.get("contact_email", "") message = payload.get("message", "") - body = ( - f'

    ' - f'{_t("email_enquiry_heading", lang, contact_name=contact_name)}

    ' - f'
    ' - f'

    {_t("email_enquiry_body", lang, supplier_name=supplier_name)}

    ' - f'' - f'' - f'' - f'' - f'' - f'
    {_t("email_enquiry_lbl_from", lang)}{contact_name} <{contact_email}>
    {_t("email_enquiry_lbl_message", lang)}{message}
    ' - f'

    {_t("email_enquiry_respond_fast", lang)}

    ' - f'

    {_t("email_enquiry_reply", lang, contact_email=contact_email)}

    ' + html = render_email_template( + "emails/supplier_enquiry.html", + lang=lang, + supplier_name=supplier_name, + contact_name=contact_name, + contact_email=contact_email, + message=message, + preheader=_t("email_enquiry_preheader", lang), ) await send_email( to=supplier_email, subject=_t("email_enquiry_subject", lang, contact_name=contact_name), - html=_email_wrap(body, lang, preheader=_t("email_enquiry_preheader", lang)), + html=html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="supplier_enquiry", ) From 1c7cdc42f28c74fc3d63d75e59bfc863eee4e715 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:05:20 +0100 Subject: [PATCH 88/98] =?UTF-8?q?feat(emails):=20subtask=203=20=E2=80=94?= =?UTF-8?q?=204=20medium=20templates=20(quote=5Fverification,=20waitlist,?= =?UTF-8?q?=20lead=5Fmatched)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add quote_verification.html (with optional project recap card) - Add waitlist_supplier.html, waitlist_general.html - Add lead_matched.html (with next-steps section + tip box) - Refactor 3 handlers in worker.py: send_quote_verification, send_waitlist_confirmation, send_lead_matched_notification Co-Authored-By: Claude Sonnet 4.6 --- .../templates/emails/lead_matched.html | 17 ++++ .../templates/emails/quote_verification.html | 24 +++++ .../templates/emails/waitlist_general.html | 14 +++ .../templates/emails/waitlist_supplier.html | 18 ++++ web/src/padelnomics/worker.py | 93 ++++++------------- 5 files changed, 100 insertions(+), 66 deletions(-) create mode 100644 web/src/padelnomics/templates/emails/lead_matched.html create mode 100644 web/src/padelnomics/templates/emails/quote_verification.html create mode 100644 web/src/padelnomics/templates/emails/waitlist_general.html create mode 100644 web/src/padelnomics/templates/emails/waitlist_supplier.html diff --git a/web/src/padelnomics/templates/emails/lead_matched.html b/web/src/padelnomics/templates/emails/lead_matched.html new file mode 100644 index 0000000..ec934b5 --- /dev/null +++ b/web/src/padelnomics/templates/emails/lead_matched.html @@ -0,0 +1,17 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button %} + +{% block body %} +

    {{ t.email_lead_matched_heading }}

    +
    +

    {{ t.email_lead_matched_greeting | tformat(first_name=first_name) }}

    +

    {{ t.email_lead_matched_body }}

    +

    {{ t.email_lead_matched_context | tformat(facility_type=facility_type, court_count=court_count, country=country) }}

    + +

    {{ t.email_lead_matched_next_heading }}

    +

    {{ t.email_lead_matched_next_body }}

    +

    {{ t.email_lead_matched_tip }}

    + +{{ email_button(base_url ~ "/dashboard", t.email_lead_matched_btn) }} +

    {{ t.email_lead_matched_note }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/quote_verification.html b/web/src/padelnomics/templates/emails/quote_verification.html new file mode 100644 index 0000000..c48e7f0 --- /dev/null +++ b/web/src/padelnomics/templates/emails/quote_verification.html @@ -0,0 +1,24 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button %} + +{% block body %} +

    {{ t.email_quote_verify_heading }}

    +
    +

    {{ t.email_quote_verify_greeting | tformat(first_name=first_name) }}

    +

    {{ t.email_quote_verify_body | tformat(app_name=app_name) }}

    + +{%- if recap_parts %} + + +
    + {{ t.email_quote_verify_project_label }} {{ recap_parts | join(" · ") | safe }} +
    +{%- endif %} + +

    {{ t.email_quote_verify_urgency }}

    +{{ email_button(link, t.email_quote_verify_btn) }} +

    {{ t.email_quote_verify_expires }}

    +

    {{ t.email_quote_verify_fallback }}

    +

    {{ link }}

    +

    {{ t.email_quote_verify_ignore }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/waitlist_general.html b/web/src/padelnomics/templates/emails/waitlist_general.html new file mode 100644 index 0000000..6546027 --- /dev/null +++ b/web/src/padelnomics/templates/emails/waitlist_general.html @@ -0,0 +1,14 @@ +{% extends "emails/_base.html" %} + +{% block body %} +

    {{ t.email_waitlist_general_heading }}

    +
    +

    {{ t.email_waitlist_general_body }}

    +

    {{ t.email_waitlist_general_perks_intro }}

    +
      +
    • {{ t.email_waitlist_general_perk_1 }}
    • +
    • {{ t.email_waitlist_general_perk_2 }}
    • +
    • {{ t.email_waitlist_general_perk_3 }}
    • +
    +

    {{ t.email_waitlist_general_outro }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/waitlist_supplier.html b/web/src/padelnomics/templates/emails/waitlist_supplier.html new file mode 100644 index 0000000..689868e --- /dev/null +++ b/web/src/padelnomics/templates/emails/waitlist_supplier.html @@ -0,0 +1,18 @@ +{% extends "emails/_base.html" %} + +{% block body %} +

    {{ t.email_waitlist_supplier_heading }}

    +
    +

    {{ t.email_waitlist_supplier_body | tformat(plan_name=plan_name) }}

    +

    {{ t.email_waitlist_supplier_perks_intro }}

    +
      +
    • {{ t.email_waitlist_supplier_perk_1 }}
    • +
    • {{ t.email_waitlist_supplier_perk_2 }}
    • +
    • {{ t.email_waitlist_supplier_perk_3 }}
    • +
    +

    {{ t.email_waitlist_supplier_meanwhile }}

    + +{% endblock %} diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index fe872c1..f9db18e 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -265,8 +265,6 @@ async def handle_send_quote_verification(payload: dict) -> None: facility_type = payload.get("facility_type", "") country = payload.get("country", "") - # Project recap card - project_card = "" recap_parts = [] if court_count: recap_parts.append(f"{court_count} courts") @@ -274,34 +272,22 @@ async def handle_send_quote_verification(payload: dict) -> None: recap_parts.append(facility_type) if country: recap_parts.append(country) - if recap_parts: - project_card = ( - f'' - f'
    ' - f'{_t("email_quote_verify_project_label", lang)} {" · ".join(recap_parts)}' - f'
    ' - ) preheader = _t("email_quote_verify_preheader_courts", lang, court_count=court_count) if court_count else _t("email_quote_verify_preheader", lang) - body = ( - f'

    {_t("email_quote_verify_heading", lang)}

    ' - f'
    ' - f'

    {_t("email_quote_verify_greeting", lang, first_name=first_name)}

    ' - f'

    {_t("email_quote_verify_body", lang, app_name=config.APP_NAME)}

    ' - f'{project_card}' - f'

    {_t("email_quote_verify_urgency", lang)}

    ' - f'{_email_button(link, _t("email_quote_verify_btn", lang))}' - f'

    {_t("email_quote_verify_expires", lang)}

    ' - f'

    {_t("email_quote_verify_fallback", lang)}

    ' - f'

    {link}

    ' - f'

    {_t("email_quote_verify_ignore", lang)}

    ' + html = render_email_template( + "emails/quote_verification.html", + lang=lang, + link=link, + first_name=first_name, + recap_parts=recap_parts, + preheader=preheader, ) await send_email( to=payload["email"], subject=_t("email_quote_verify_subject", lang), - html=_email_wrap(body, lang, preheader=preheader), + html=html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="quote_verification", ) @@ -340,43 +326,24 @@ async def handle_send_waitlist_confirmation(payload: dict) -> None: if intent.startswith("supplier_"): plan_name = intent.replace("supplier_", "").title() subject = _t("email_waitlist_supplier_subject", lang, plan_name=plan_name) - preheader = _t("email_waitlist_supplier_preheader", lang) - body = ( - f'

    {_t("email_waitlist_supplier_heading", lang)}

    ' - f'
    ' - f'

    {_t("email_waitlist_supplier_body", lang, plan_name=plan_name)}

    ' - f'

    {_t("email_waitlist_supplier_perks_intro", lang)}

    ' - f'
      ' - f'
    • {_t("email_waitlist_supplier_perk_1", lang)}
    • ' - f'
    • {_t("email_waitlist_supplier_perk_2", lang)}
    • ' - f'
    • {_t("email_waitlist_supplier_perk_3", lang)}
    • ' - f'
    ' - f'

    {_t("email_waitlist_supplier_meanwhile", lang)}

    ' - f'' + html = render_email_template( + "emails/waitlist_supplier.html", + lang=lang, + plan_name=plan_name, + preheader=_t("email_waitlist_supplier_preheader", lang), ) else: subject = _t("email_waitlist_general_subject", lang) - preheader = _t("email_waitlist_general_preheader", lang) - body = ( - f'

    {_t("email_waitlist_general_heading", lang)}

    ' - f'
    ' - f'

    {_t("email_waitlist_general_body", lang)}

    ' - f'

    {_t("email_waitlist_general_perks_intro", lang)}

    ' - f'
      ' - f'
    • {_t("email_waitlist_general_perk_1", lang)}
    • ' - f'
    • {_t("email_waitlist_general_perk_2", lang)}
    • ' - f'
    • {_t("email_waitlist_general_perk_3", lang)}
    • ' - f'
    ' - f'

    {_t("email_waitlist_general_outro", lang)}

    ' + html = render_email_template( + "emails/waitlist_general.html", + lang=lang, + preheader=_t("email_waitlist_general_preheader", lang), ) await send_email( to=email, subject=subject, - html=_email_wrap(body, lang, preheader=preheader), + html=html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="waitlist", ) @@ -524,26 +491,20 @@ async def handle_send_lead_matched_notification(payload: dict) -> None: first_name = (lead["contact_name"] or "").split()[0] if lead.get("contact_name") else "there" - body = ( - f'

    {_t("email_lead_matched_heading", lang)}

    ' - f'
    ' - f'

    {_t("email_lead_matched_greeting", lang, first_name=first_name)}

    ' - f'

    {_t("email_lead_matched_body", lang)}

    ' - f'

    {_t("email_lead_matched_context", lang, facility_type=lead["facility_type"] or "padel", court_count=lead["court_count"] or "?", country=lead["country"] or "your area")}

    ' - # What happens next - f'

    {_t("email_lead_matched_next_heading", lang)}

    ' - f'

    {_t("email_lead_matched_next_body", lang)}

    ' - f'

    ' - f'{_t("email_lead_matched_tip", lang)}

    ' - f'{_email_button(f"{config.BASE_URL}/dashboard", _t("email_lead_matched_btn", lang))}' - f'

    {_t("email_lead_matched_note", lang)}

    ' + html = render_email_template( + "emails/lead_matched.html", + lang=lang, + first_name=first_name, + facility_type=lead["facility_type"] or "padel", + court_count=lead["court_count"] or "?", + country=lead["country"] or "your area", + preheader=_t("email_lead_matched_preheader", lang), ) await send_email( to=lead["contact_email"], subject=_t("email_lead_matched_subject", lang, first_name=first_name), - html=_email_wrap(body, lang, preheader=_t("email_lead_matched_preheader", lang)), + html=html, from_addr=EMAIL_ADDRESSES["leads"], email_type="lead_matched", ) From c31d4a71a0be079618282267af29516cdb83a4e9 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:10:55 +0100 Subject: [PATCH 89/98] =?UTF-8?q?feat(emails):=20subtask=204=20=E2=80=94?= =?UTF-8?q?=204=20complex=20templates=20(lead=5Fforward,=20match=5Fnotify,?= =?UTF-8?q?=20digest,=20business=5Fplan)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add lead_forward.html (brief table + contact table + optional CTA token link) - Add lead_match_notify.html (new matching lead alert with heat badge) - Add weekly_digest.html (leads table with Jinja2 for loop) - Add business_plan.html (PDF ready notification with download CTA) - Refactor 4 handlers in worker.py: send_lead_forward_email, notify_matching_suppliers, send_weekly_lead_digest, generate_business_plan Co-Authored-By: Claude Sonnet 4.6 --- .../templates/emails/business_plan.html | 11 ++ .../templates/emails/lead_forward.html | 53 ++++++ .../templates/emails/lead_match_notify.html | 30 ++++ .../templates/emails/weekly_digest.html | 33 ++++ web/src/padelnomics/worker.py | 160 +++++------------- 5 files changed, 172 insertions(+), 115 deletions(-) create mode 100644 web/src/padelnomics/templates/emails/business_plan.html create mode 100644 web/src/padelnomics/templates/emails/lead_forward.html create mode 100644 web/src/padelnomics/templates/emails/lead_match_notify.html create mode 100644 web/src/padelnomics/templates/emails/weekly_digest.html diff --git a/web/src/padelnomics/templates/emails/business_plan.html b/web/src/padelnomics/templates/emails/business_plan.html new file mode 100644 index 0000000..1443d87 --- /dev/null +++ b/web/src/padelnomics/templates/emails/business_plan.html @@ -0,0 +1,11 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button %} + +{% block body %} +

    {{ t.email_business_plan_heading }}

    +
    +

    {{ t.email_business_plan_body }}

    +

    {{ t.email_business_plan_includes }}

    +{{ email_button(download_url, t.email_business_plan_btn) }} +

    {{ t.email_business_plan_quote_cta | tformat(quote_url=quote_url) }}

    +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/lead_forward.html b/web/src/padelnomics/templates/emails/lead_forward.html new file mode 100644 index 0000000..fdac780 --- /dev/null +++ b/web/src/padelnomics/templates/emails/lead_forward.html @@ -0,0 +1,53 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button, heat_badge, section_heading %} + +{% block body %} +{# Yellow urgency banner #} +

    {{ t.email_lead_forward_urgency }}

    + +

    {{ t.email_lead_forward_heading }} {{ heat_badge(heat) }}

    +
    + +{{ section_heading(t.email_lead_forward_section_brief) }} + + {% for label, value in brief_rows %} + + + + + {% endfor %} +
    {{ label }}{{ value }}
    + +{{ section_heading(t.email_lead_forward_section_contact) }} + + + + + + + + + + + + + + + + + + + + + +
    {{ t.email_lead_forward_lbl_name }}{{ contact_name }}
    {{ t.email_lead_forward_lbl_email }}{{ contact_email }}
    {{ t.email_lead_forward_lbl_phone }}{{ contact_phone }}
    {{ t.email_lead_forward_lbl_company }}{{ contact_company }}
    {{ t.email_lead_forward_lbl_role }}{{ stakeholder_type }}
    + +{{ email_button(base_url ~ "/suppliers/leads", t.email_lead_forward_btn) }} +

    {{ t.email_lead_forward_reply_direct | tformat(contact_email=contact_email) }}

    + +{%- if cta_url %} +

    + ✓ Mark as contacted +

    +{%- endif %} +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/lead_match_notify.html b/web/src/padelnomics/templates/emails/lead_match_notify.html new file mode 100644 index 0000000..918625d --- /dev/null +++ b/web/src/padelnomics/templates/emails/lead_match_notify.html @@ -0,0 +1,30 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button, heat_badge %} + +{% block body %} +

    New [{{ heat }}] lead in {{ country }} {{ heat_badge(heat) }}

    +
    +

    A new project brief has been submitted that matches your service area.

    + + + + + + + + + + + + + + + + + + +
    Facility{{ facility_type }}
    Courts{{ courts }}
    Country{{ country }}
    Timeline{{ timeline or "-" }}
    + +

    Contact details are available after unlocking. Credits required: {{ credit_cost }}.

    +{{ email_button(base_url ~ "/suppliers/leads", "View lead feed") }} +{% endblock %} diff --git a/web/src/padelnomics/templates/emails/weekly_digest.html b/web/src/padelnomics/templates/emails/weekly_digest.html new file mode 100644 index 0000000..3f74930 --- /dev/null +++ b/web/src/padelnomics/templates/emails/weekly_digest.html @@ -0,0 +1,33 @@ +{% extends "emails/_base.html" %} +{% from "emails/_macros.html" import email_button, heat_badge_sm %} + +{% block body %} +

    + Your weekly lead digest — {{ leads | length }} new {{ "lead" if leads | length == 1 else "leads" }} +

    +
    +

    New matching leads in your service area this week:

    + + + + + + + + + + + {% for lead in leads %} + + + + + + {% endfor %} + +
    ProjectCountryTimeline
    + {{ heat_badge_sm(lead.heat | upper) }} {{ lead.facility_type or "Padel" }}, {{ lead.court_count or "?" }} courts + {{ lead.country or "-" }}{{ lead.timeline or "-" }}
    + +{{ email_button(base_url ~ "/suppliers/leads", "Unlock leads →") }} +{% endblock %} diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index f9db18e..76e48bd 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -384,15 +384,6 @@ async def handle_send_lead_forward_email(payload: dict) -> None: subject = f"[{heat}] New padel project in {country} \u2014 {courts} courts, \u20ac{budget}" - # Heat badge color - heat_colors = {"HOT": "#DC2626", "WARM": "#EA580C", "COOL": "#2563EB"} - heat_bg = heat_colors.get(heat, "#2563EB") - heat_badge = ( - f'{heat}' - ) - tl = lambda key: _t(key, lang) # noqa: E731 brief_rows = [ @@ -405,50 +396,11 @@ async def handle_send_lead_forward_email(payload: dict) -> None: (tl("email_lead_forward_lbl_additional"), lead["additional_info"] or "-"), ] - brief_html = "" - for label, value in brief_rows: - brief_html += ( - f'{label}' - f'{value}' - ) - - contact_name = lead["contact_name"] or "-" - contact_phone = lead["contact_phone"] or "-" - - # Contact section with prominent email - contact_html = ( - f'{tl("email_lead_forward_lbl_name")}' - f'{contact_name}' - f'{tl("email_lead_forward_lbl_email")}' - f'{contact_email}' - f'{tl("email_lead_forward_lbl_phone")}' - f'{contact_phone}' - f'{tl("email_lead_forward_lbl_company")}' - f'{lead["contact_company"] or "-"}' - f'{tl("email_lead_forward_lbl_role")}' - f'{lead["stakeholder_type"] or "-"}' - ) - preheader_parts = [f"{facility_type} project"] if timeline: preheader_parts.append(f"{timeline} timeline") preheader_parts.append(_t("email_lead_forward_preheader_suffix", lang)) - body = ( - f'

    ' - f'{_t("email_lead_forward_urgency", lang)}

    ' - f'

    {tl("email_lead_forward_heading")} {heat_badge}

    ' - f'
    ' - f'

    {tl("email_lead_forward_section_brief")}

    ' - f'{brief_html}
    ' - f'

    {tl("email_lead_forward_section_contact")}

    ' - f'{contact_html}
    ' - f'{_email_button(f"{config.BASE_URL}/suppliers/leads", tl("email_lead_forward_btn"))}' - f'

    ' - f'{_t("email_lead_forward_reply_direct", lang, contact_email=contact_email)}

    ' - ) - # Send to supplier contact email or general contact to_email = supplier.get("contact_email") or supplier.get("contact") or "" if not to_email: @@ -458,16 +410,25 @@ async def handle_send_lead_forward_email(payload: dict) -> None: # Generate one-click "I've contacted this lead" CTA token cta_token = secrets.token_urlsafe(24) cta_url = f"{config.BASE_URL}/suppliers/leads/cta/{cta_token}" - body += ( - f'

    ' - f'' - f'✓ Mark as contacted

    ' + + html = render_email_template( + "emails/lead_forward.html", + lang=lang, + heat=heat, + brief_rows=brief_rows, + contact_name=lead["contact_name"] or "-", + contact_email=contact_email, + contact_phone=lead["contact_phone"] or "-", + contact_company=lead["contact_company"] or "-", + stakeholder_type=lead["stakeholder_type"] or "-", + cta_url=cta_url, + preheader=", ".join(preheader_parts), ) await send_email( to=to_email, subject=subject, - html=_email_wrap(body, lang, preheader=", ".join(preheader_parts)), + html=html, from_addr=EMAIL_ADDRESSES["leads"], email_type="lead_forward", ) @@ -549,30 +510,22 @@ async def handle_notify_matching_suppliers(payload: dict) -> None: if not to_email: continue - body = ( - f'

    ' - f'New [{heat}] lead in {country}

    ' - f'
    ' - f'

    A new project brief has been submitted that matches your service area.

    ' - f'' - f'' - f'' - f'' - f'' - f'' - f'' - f'' - f'' - f'
    Facility{facility_type}
    Courts{courts}
    Country{country}
    Timeline{timeline or "-"}
    ' - f'

    ' - f'Contact details are available after unlocking. Credits required: {lead.get("credit_cost", "?")}.

    ' - f'{_email_button(f"{config.BASE_URL}/suppliers/leads", "View lead feed")}' + notify_html = render_email_template( + "emails/lead_match_notify.html", + lang=lang, + heat=heat, + country=country, + facility_type=facility_type, + courts=courts, + timeline=timeline, + credit_cost=lead.get("credit_cost", "?"), + preheader=f"New matching lead in {country}", ) await send_email( to=to_email, subject=f"[{heat}] New {facility_type} project in {country} — {courts} courts", - html=_email_wrap(body, lang, preheader=f"New matching lead in {country}"), + html=notify_html, from_addr=EMAIL_ADDRESSES["leads"], email_type="lead_match_notify", ) @@ -617,48 +570,27 @@ async def handle_send_weekly_lead_digest(payload: dict) -> None: if not new_leads: continue - lead_rows_html = "" - for ld in new_leads: - heat = (ld["heat_score"] or "cool").upper() - heat_colors = {"HOT": "#DC2626", "WARM": "#EA580C", "COOL": "#2563EB"} - hc = heat_colors.get(heat, "#2563EB") - badge = ( - f'{heat}' - ) - lead_rows_html += ( - f'' - f'' - f'{badge} {ld["facility_type"] or "Padel"}, {ld["court_count"] or "?"} courts' - f'{ld["country"] or "-"}' - f'{ld["timeline"] or "-"}' - f'' - ) - - body = ( - f'

    ' - f'Your weekly lead digest — {len(new_leads)} new {"lead" if len(new_leads) == 1 else "leads"}

    ' - f'
    ' - f'

    New matching leads in your service area this week:

    ' - f'' - f'' - f'' - f'' - f'' - f'' - f'{lead_rows_html}' - f'
    ProjectCountryTimeline
    ' - f'{_email_button(f"{config.BASE_URL}/suppliers/leads", "Unlock leads →")}' - ) + # Normalise lead dicts for template — heat_score → heat (uppercase) + digest_leads = [ + {**ld, "heat": (ld["heat_score"] or "cool").upper()} + for ld in new_leads + ] area_summary = ", ".join(countries[:3]) if len(countries) > 3: area_summary += f" +{len(countries) - 3}" + digest_html = render_email_template( + "emails/weekly_digest.html", + lang="en", + leads=digest_leads, + preheader=f"{len(new_leads)} new leads matching your service area", + ) + await send_email( to=to_email, subject=f"{len(new_leads)} new padel {'lead' if len(new_leads) == 1 else 'leads'} in {area_summary}", - html=_email_wrap(body, "en", preheader=f"{len(new_leads)} new leads matching your service area"), + html=digest_html, from_addr=EMAIL_ADDRESSES["leads"], email_type="weekly_digest", ) @@ -776,19 +708,17 @@ async def handle_generate_business_plan(payload: dict) -> None: export_token = export_row["token"] user = await fetch_one("SELECT email FROM users WHERE id = ?", (user_id,)) if user: - body = ( - f'

    {_t("email_business_plan_heading", language)}

    ' - f'
    ' - f'

    {_t("email_business_plan_body", language)}

    ' - f'

    {_t("email_business_plan_includes", language)}

    ' - f'{_email_button(f"{config.BASE_URL}/planner/export/{export_token}", _t("email_business_plan_btn", language))}' - f'

    ' - f'{_t("email_business_plan_quote_cta", language, quote_url=f"{config.BASE_URL}/{language}/leads/quote")}

    ' + bp_html = render_email_template( + "emails/business_plan.html", + lang=language, + download_url=f"{config.BASE_URL}/planner/export/{export_token}", + quote_url=f"{config.BASE_URL}/{language}/leads/quote", + preheader=_t("email_business_plan_preheader", language), ) await send_email( to=user["email"], subject=_t("email_business_plan_subject", language), - html=_email_wrap(body, language, preheader=_t("email_business_plan_preheader", language)), + html=bp_html, from_addr=EMAIL_ADDRESSES["transactional"], email_type="business_plan", ) From 536d5c8f402a45f396df6f522d9b97e16ae830b2 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:12:09 +0100 Subject: [PATCH 90/98] =?UTF-8?q?feat(emails):=20subtask=205=20=E2=80=94?= =?UTF-8?q?=20compose=20preview=20(admin=5Fcompose=20template=20+=20HTMX?= =?UTF-8?q?=20endpoint)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add emails/admin_compose.html: branded wrapper for ad-hoc compose body - Update email_compose.html: two-column layout with HTMX live preview pane (hx-post, hx-trigger=input delay:500ms, hx-target=#preview-pane) - Add partials/email_preview_frame.html: sandboxed iframe partial - Add POST /admin/emails/compose/preview route (no CSRF — read-only render) - Update email_compose POST handler to use render_email_template() instead of importing _email_wrap from worker Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 43 ++++++- .../admin/templates/admin/email_compose.html | 106 ++++++++++++------ .../admin/partials/email_preview_frame.html | 8 ++ .../templates/emails/admin_compose.html | 5 + 4 files changed, 126 insertions(+), 36 deletions(-) create mode 100644 web/src/padelnomics/admin/templates/admin/partials/email_preview_frame.html create mode 100644 web/src/padelnomics/templates/emails/admin_compose.html diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index 32bf36d..b60d27f 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -33,6 +33,7 @@ from ..core import ( utcnow, utcnow_iso, ) +from ..email_templates import EMAIL_TEMPLATE_REGISTRY, render_email_template logger = logging.getLogger(__name__) @@ -1398,10 +1399,16 @@ async def email_compose(): email_addresses=EMAIL_ADDRESSES, ) - html = f"

    {body.replace(chr(10), '
    ')}

    " + body_html = f"

    {body.replace(chr(10), '
    ')}

    " if wrap: - from ..worker import _email_wrap - html = _email_wrap(html) + html = render_email_template( + "emails/admin_compose.html", + lang="en", + body_html=body_html, + preheader="", + ) + else: + html = body_html result = await send_email( to=to, subject=subject, html=html, @@ -1424,6 +1431,36 @@ async def email_compose(): ) +@bp.route("/emails/compose/preview", methods=["POST"]) +@role_required("admin") +async def compose_preview(): + """HTMX endpoint: render live preview for compose textarea (no CSRF — read-only).""" + form = await request.form + body = form.get("body", "").strip() + wrap = form.get("wrap", "") == "1" + + body_html = f"

    {body.replace(chr(10), '
    ')}

    " if body else "" + + if wrap and body_html: + try: + rendered_html = render_email_template( + "emails/admin_compose.html", + lang="en", + body_html=body_html, + preheader="", + ) + except Exception: + logger.exception("compose_preview: template render failed") + rendered_html = body_html + else: + rendered_html = body_html + + return await render_template( + "admin/partials/email_preview_frame.html", + rendered_html=rendered_html, + ) + + # --- Audiences --- @bp.route("/emails/audiences") diff --git a/web/src/padelnomics/admin/templates/admin/email_compose.html b/web/src/padelnomics/admin/templates/admin/email_compose.html index dfd5a7a..1ab8846 100644 --- a/web/src/padelnomics/admin/templates/admin/email_compose.html +++ b/web/src/padelnomics/admin/templates/admin/email_compose.html @@ -2,51 +2,91 @@ {% set admin_page = "compose" %} {% block title %}Compose Email - Admin - {{ config.APP_NAME }}{% endblock %} +{% block admin_head %} + +{% endblock %} + {% block admin_content %}
    ← Sent Log

    Compose Email

    -
    -
    - +
    + {# ── Left: form ────────────────────────────────────── #} +
    +
    + + -
    - - -
    +
    + + +
    -
    - - -
    +
    + + +
    -
    - - -
    +
    + + +
    -
    - - -
    +
    + + +
    -
    - -
    +
    + +
    -
    - - Cancel +
    + + Cancel +
    +
    - +
    + + {# ── Right: live preview panel ─────────────────────── #} +
    +
    Live preview
    +
    +

    Start typing to see a preview…

    +
    +
    {% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/partials/email_preview_frame.html b/web/src/padelnomics/admin/templates/admin/partials/email_preview_frame.html new file mode 100644 index 0000000..5943fe3 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/partials/email_preview_frame.html @@ -0,0 +1,8 @@ +{# HTMX partial: sandboxed iframe showing a rendered email preview. + Rendered by POST /admin/emails/compose/preview. #} + diff --git a/web/src/padelnomics/templates/emails/admin_compose.html b/web/src/padelnomics/templates/emails/admin_compose.html new file mode 100644 index 0000000..657ad07 --- /dev/null +++ b/web/src/padelnomics/templates/emails/admin_compose.html @@ -0,0 +1,5 @@ +{% extends "emails/_base.html" %} + +{% block body %} +{{ body_html | safe }} +{% endblock %} From 4fafd3e80e659723c552911720ea9cab11a6ecd8 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:13:35 +0100 Subject: [PATCH 91/98] =?UTF-8?q?feat(emails):=20subtask=206=20=E2=80=94?= =?UTF-8?q?=20admin=20gallery=20(routes,=20templates,=20sidebar=20link)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add GET /admin/emails/gallery — card grid of all 11 email types - Add GET /admin/emails/gallery/?lang=en|de — preview with lang toggle - Add email_gallery.html: 3-column responsive card grid - Add email_gallery_preview.html: full-width iframe + EN/DE toggle + log link - Add Gallery sidebar link to base_admin.html (admin_page == 'gallery') Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/admin/routes.py | 39 +++++++++ .../admin/templates/admin/base_admin.html | 4 + .../admin/templates/admin/email_gallery.html | 81 +++++++++++++++++++ .../admin/email_gallery_preview.html | 61 ++++++++++++++ 4 files changed, 185 insertions(+) create mode 100644 web/src/padelnomics/admin/templates/admin/email_gallery.html create mode 100644 web/src/padelnomics/admin/templates/admin/email_gallery_preview.html diff --git a/web/src/padelnomics/admin/routes.py b/web/src/padelnomics/admin/routes.py index b60d27f..c578d99 100644 --- a/web/src/padelnomics/admin/routes.py +++ b/web/src/padelnomics/admin/routes.py @@ -1249,6 +1249,45 @@ async def emails(): ) +@bp.route("/emails/gallery") +@role_required("admin") +async def email_gallery(): + """Gallery of all email template types with sample previews.""" + return await render_template( + "admin/email_gallery.html", + registry=EMAIL_TEMPLATE_REGISTRY, + ) + + +@bp.route("/emails/gallery/") +@role_required("admin") +async def email_gallery_preview(slug: str): + """Rendered preview of a single email template with sample data.""" + entry = EMAIL_TEMPLATE_REGISTRY.get(slug) + if not entry: + await flash(f"Unknown email template: {slug!r}", "error") + return redirect(url_for("admin.email_gallery")) + + lang = request.args.get("lang", "en") + if lang not in ("en", "de"): + lang = "en" + + try: + sample = entry["sample_data"](lang) + rendered_html = render_email_template(entry["template"], lang=lang, **sample) + except Exception: + logger.exception("email_gallery_preview: render failed for %s (lang=%s)", slug, lang) + rendered_html = "

    Render error — see logs.

    " + + return await render_template( + "admin/email_gallery_preview.html", + slug=slug, + entry=entry, + lang=lang, + rendered_html=rendered_html, + ) + + @bp.route("/emails/results") @role_required("admin") async def email_results(): diff --git a/web/src/padelnomics/admin/templates/admin/base_admin.html b/web/src/padelnomics/admin/templates/admin/base_admin.html index 06883f3..d5ffbf0 100644 --- a/web/src/padelnomics/admin/templates/admin/base_admin.html +++ b/web/src/padelnomics/admin/templates/admin/base_admin.html @@ -118,6 +118,10 @@ Compose + + + Gallery + Audiences diff --git a/web/src/padelnomics/admin/templates/admin/email_gallery.html b/web/src/padelnomics/admin/templates/admin/email_gallery.html new file mode 100644 index 0000000..6e03f13 --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/email_gallery.html @@ -0,0 +1,81 @@ +{% extends "admin/base_admin.html" %} +{% set admin_page = "gallery" %} +{% block title %}Email Gallery - Admin - {{ config.APP_NAME }}{% endblock %} + +{% block admin_head %} + +{% endblock %} + +{% block admin_content %} +
    +
    +

    Email Gallery

    + {{ registry | length }} template{{ 's' if registry | length != 1 else '' }} +
    +

    Rendered previews of all transactional email templates with sample data.

    +
    + +
    +{% endblock %} diff --git a/web/src/padelnomics/admin/templates/admin/email_gallery_preview.html b/web/src/padelnomics/admin/templates/admin/email_gallery_preview.html new file mode 100644 index 0000000..3fc6bfa --- /dev/null +++ b/web/src/padelnomics/admin/templates/admin/email_gallery_preview.html @@ -0,0 +1,61 @@ +{% extends "admin/base_admin.html" %} +{% set admin_page = "gallery" %} +{% block title %}{{ entry.label }} Preview - Email Gallery - Admin{% endblock %} + +{% block admin_head %} + +{% endblock %} + +{% block admin_content %} +
    + ← Email Gallery +

    {{ entry.label }}

    +

    {{ entry.description }}

    +
    + +
    +
    + {% if entry.email_type %} + + View in sent log → + + {% endif %} +
    + + {# Language toggle #} +
    + EN + DE +
    +
    + + +{% endblock %} From 7b03fd71f99560388133965ed8a7baa208e71a09 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:14:38 +0100 Subject: [PATCH 92/98] feat(extract): convert playtomic_availability to JSONL output - availability_{date}.jsonl.gz replaces .json.gz for morning snapshots - Each JSONL line = one venue object with date + captured_at_utc injected - Eliminates in-memory consolidation: working.jsonl IS the final file (compress_jsonl_atomic at end instead of write_gzip_atomic blob) - Crash recovery unchanged: working.jsonl accumulates via flush_partial_batch - _load_morning_availability tries .jsonl.gz first, falls back to .json.gz - Skip check covers both formats during transition - Recheck files stay blob format (small, infrequent) stg_playtomic_availability: UNION ALL transition (morning_jsonl + morning_blob + recheck_blob) - morning_jsonl: read_json JSONL, tenant_id direct column, no outer UNNEST - morning_blob / recheck_blob: subquery + LATERAL UNNEST (unchanged semantics) - All three produce (snapshot_date, captured_at_utc, snapshot_type, recheck_hour, tenant_id, slots_json) - Downstream raw_resources / raw_slots CTEs unchanged Co-Authored-By: Claude Sonnet 4.6 --- .../playtomic_availability.py | 77 ++++++---- .../staging/stg_playtomic_availability.sql | 145 +++++++++++------- 2 files changed, 139 insertions(+), 83 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py index e086855..1fab6e2 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py @@ -16,7 +16,7 @@ records (a few seconds of work with 10 workers) are lost on crash. Recheck mode: re-queries venues with slots starting within the next 90 minutes. Writes a separate recheck file for more accurate occupancy measurement. -Landing: {LANDING_DIR}/playtomic/{year}/{month}/availability_{date}.json.gz +Landing: {LANDING_DIR}/playtomic/{year}/{month}/availability_{date}.jsonl.gz Recheck: {LANDING_DIR}/playtomic/{year}/{month}/availability_{date}_recheck_{HH}.json.gz """ @@ -34,7 +34,13 @@ import niquests from ._shared import HTTP_TIMEOUT_SECONDS, USER_AGENT, run_extractor, setup_logging from .proxy import load_fallback_proxy_urls, load_proxy_urls, make_tiered_cycler -from .utils import flush_partial_batch, landing_path, load_partial_results, write_gzip_atomic +from .utils import ( + compress_jsonl_atomic, + flush_partial_batch, + landing_path, + load_partial_results, + write_gzip_atomic, +) logger = setup_logging("padelnomics.extract.playtomic_availability") @@ -273,14 +279,14 @@ def extract( year, month = year_month.split("/") dest_dir = landing_path(landing_dir, "playtomic", year, month) - dest = dest_dir / f"availability_{target_date}.json.gz" - - if dest.exists(): - logger.info("Already have %s — skipping", dest) + dest = dest_dir / f"availability_{target_date}.jsonl.gz" + old_blob = dest_dir / f"availability_{target_date}.json.gz" + if dest.exists() or old_blob.exists(): + logger.info("Already have availability for %s — skipping", target_date) return {"files_written": 0, "files_skipped": 1, "bytes_written": 0} - # Crash resumption: load already-fetched venues from partial file - partial_path = dest.with_suffix(".partial.jsonl") + # Crash resumption: load already-fetched venues from working file + partial_path = dest_dir / f"availability_{target_date}.working.jsonl" prior_results, already_done = load_partial_results(partial_path, id_key="tenant_id") if already_done: logger.info("Resuming: %d venues already fetched from partial file", len(already_done)) @@ -297,7 +303,10 @@ def extract( start_min_str = start_min.strftime("%Y-%m-%dT%H:%M:%S") start_max_str = start_max.strftime("%Y-%m-%dT%H:%M:%S") - # Partial file for incremental crash-safe progress + # Timestamp stamped into every JSONL line — computed once before the fetch loop. + captured_at = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ") + + # Working file for incremental crash-safe progress (IS the final file). partial_file = open(partial_path, "a") # noqa: SIM115 partial_lock = threading.Lock() pending_batch: list[dict] = [] @@ -305,6 +314,9 @@ def extract( def _on_result(result: dict) -> None: # Called inside _fetch_venues_parallel's lock — no additional locking needed. # In serial mode, called single-threaded — also safe without extra locking. + # Inject date + captured_at so every JSONL line is self-contained. + result["date"] = target_date + result["captured_at_utc"] = captured_at pending_batch.append(result) if len(pending_batch) >= PARTIAL_FLUSH_SIZE: flush_partial_batch(partial_file, partial_lock, pending_batch) @@ -348,24 +360,13 @@ def extract( pending_batch.clear() partial_file.close() - # Consolidate prior (resumed) + new results into final file - venues_data = prior_results + new_venues_data - captured_at = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ") - payload = json.dumps({ - "date": target_date, - "captured_at_utc": captured_at, - "venue_count": len(venues_data), - "venues_errored": venues_errored, - "venues": venues_data, - }).encode() - - bytes_written = write_gzip_atomic(dest, payload) - if partial_path.exists(): - partial_path.unlink() + # Working file IS the output — compress atomically (deletes source). + total_venues = len(prior_results) + len(new_venues_data) + bytes_written = compress_jsonl_atomic(partial_path, dest) logger.info( "%d venues scraped (%d errors) -> %s (%s bytes)", - len(venues_data), venues_errored, dest, f"{bytes_written:,}", + total_venues, venues_errored, dest, f"{bytes_written:,}", ) return { @@ -380,14 +381,36 @@ def extract( # Recheck mode — re-query venues with upcoming slots for accurate occupancy # --------------------------------------------------------------------------- +def _read_availability_jsonl(path: Path) -> dict: + """Read a JSONL availability file into the blob dict format recheck expects.""" + venues = [] + date_val = captured_at = None + with gzip.open(path, "rt") as f: + for line in f: + line = line.strip() + if not line: + continue + try: + record = json.loads(line) + except json.JSONDecodeError: + break # truncated last line on crash + if date_val is None: + date_val = record.get("date") + captured_at = record.get("captured_at_utc") + venues.append(record) + return {"date": date_val, "captured_at_utc": captured_at, "venues": venues} + + def _load_morning_availability(landing_dir: Path, target_date: str) -> dict | None: - """Load today's morning availability file. Returns parsed JSON or None.""" + """Load today's morning availability file (JSONL or blob). Returns dict or None.""" playtomic_dir = landing_dir / "playtomic" - # Search across year/month dirs for the target date + # Try JSONL first (new format), fall back to blob (old format) + matches = list(playtomic_dir.glob(f"*/*/availability_{target_date}.jsonl.gz")) + if matches: + return _read_availability_jsonl(matches[0]) matches = list(playtomic_dir.glob(f"*/*/availability_{target_date}.json.gz")) if not matches: return None - with gzip.open(matches[0], "rb") as f: return json.loads(f.read()) diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_availability.sql b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_availability.sql index bf0b3f2..9092796 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_availability.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_playtomic_availability.sql @@ -3,12 +3,17 @@ -- "Available" = the slot was NOT booked at capture time. Missing slots = booked. -- -- Reads BOTH morning snapshots and recheck files: --- Morning: availability_{date}.json.gz → snapshot_type = 'morning' --- Recheck: availability_{date}_recheck_{HH}.json.gz → snapshot_type = 'recheck' +-- Morning (new): availability_{date}.jsonl.gz → snapshot_type = 'morning' +-- Morning (old): availability_{date}.json.gz → snapshot_type = 'morning' +-- Recheck: availability_{date}_recheck_{HH}.json.gz → snapshot_type = 'recheck' -- -- Only 60-min duration slots are kept (canonical hourly rate + occupancy unit). -- Price parsed from strings like "14.56 EUR" or "48 GBP". -- +-- Supports two morning landing formats (UNION ALL during migration): +-- New: availability_{date}.jsonl.gz — one venue per line, columns: tenant_id, slots, date, captured_at_utc +-- Old: availability_{date}.json.gz — {"date":..., "venues": [...]} blob (UNNEST required) +-- -- Requires: at least one availability file in the landing zone. -- A seed file (data/landing/playtomic/1970/01/availability_1970-01-01.json.gz) -- with empty venues[] ensures this model runs before real data arrives. @@ -20,77 +25,105 @@ MODEL ( grain (snapshot_date, tenant_id, resource_id, slot_start_time, snapshot_type, captured_at_utc) ); --- Morning snapshots (filename does NOT contain '_recheck_') -WITH morning_files AS ( +WITH +-- New format: one venue per JSONL line — no outer UNNEST needed +morning_jsonl AS ( SELECT - *, - 'morning' AS snapshot_type, - NULL::INTEGER AS recheck_hour + date AS snapshot_date, + captured_at_utc, + 'morning' AS snapshot_type, + NULL::INTEGER AS recheck_hour, + tenant_id, + slots AS slots_json FROM read_json( - @LANDING_DIR || '/playtomic/*/*/availability_*.json.gz', - format = 'auto', + @LANDING_DIR || '/playtomic/*/*/availability_*.jsonl.gz', + format = 'newline_delimited', columns = { date: 'VARCHAR', captured_at_utc: 'VARCHAR', - venues: 'JSON[]' + tenant_id: 'VARCHAR', + slots: 'JSON' }, - filename = true, - maximum_object_size = 134217728 -- 128 MB; daily files grow with venue count + filename = true ) WHERE filename NOT LIKE '%_recheck_%' - AND venues IS NOT NULL - AND json_array_length(venues) > 0 + AND tenant_id IS NOT NULL ), --- Recheck snapshots (filename contains '_recheck_') --- Use TRY_CAST on a regex-extracted hour to get the recheck_hour. --- If no recheck files exist yet, this CTE produces zero rows (safe). -recheck_files AS ( +-- Old format: {"date":..., "venues": [...]} blob — kept for transition +morning_blob AS ( SELECT - *, - 'recheck' AS snapshot_type, - TRY_CAST( - regexp_extract(filename, '_recheck_(\d+)', 1) AS INTEGER - ) AS recheck_hour - FROM read_json( - @LANDING_DIR || '/playtomic/*/*/availability_*_recheck_*.json.gz', - format = 'auto', - columns = { - date: 'VARCHAR', - captured_at_utc: 'VARCHAR', - venues: 'JSON[]' - }, - filename = true, - maximum_object_size = 134217728 -- 128 MB; matches morning snapshot limit - ) - WHERE venues IS NOT NULL - AND json_array_length(venues) > 0 -), -all_files AS ( - SELECT date, captured_at_utc, venues, snapshot_type, recheck_hour FROM morning_files - UNION ALL - SELECT date, captured_at_utc, venues, snapshot_type, recheck_hour FROM recheck_files -), -raw_venues AS ( - SELECT - af.date AS snapshot_date, + af.date AS snapshot_date, af.captured_at_utc, - af.snapshot_type, - af.recheck_hour, - venue_json - FROM all_files af, + 'morning' AS snapshot_type, + NULL::INTEGER AS recheck_hour, + venue_json ->> 'tenant_id' AS tenant_id, + venue_json -> 'slots' AS slots_json + FROM ( + SELECT date, captured_at_utc, venues + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/availability_*.json.gz', + format = 'auto', + columns = { + date: 'VARCHAR', + captured_at_utc: 'VARCHAR', + venues: 'JSON[]' + }, + filename = true, + maximum_object_size = 134217728 -- 128 MB; daily files grow with venue count + ) + WHERE filename NOT LIKE '%_recheck_%' + AND venues IS NOT NULL + AND json_array_length(venues) > 0 + ) af, LATERAL UNNEST(af.venues) AS t(venue_json) ), +-- Recheck snapshots (blob format only — small files, no JSONL conversion needed) +recheck_blob AS ( + SELECT + rf.date AS snapshot_date, + rf.captured_at_utc, + 'recheck' AS snapshot_type, + TRY_CAST( + regexp_extract(rf.filename, '_recheck_(\d+)', 1) AS INTEGER + ) AS recheck_hour, + venue_json ->> 'tenant_id' AS tenant_id, + venue_json -> 'slots' AS slots_json + FROM ( + SELECT date, captured_at_utc, venues, filename + FROM read_json( + @LANDING_DIR || '/playtomic/*/*/availability_*_recheck_*.json.gz', + format = 'auto', + columns = { + date: 'VARCHAR', + captured_at_utc: 'VARCHAR', + venues: 'JSON[]' + }, + filename = true, + maximum_object_size = 134217728 -- 128 MB; matches morning snapshot limit + ) + WHERE venues IS NOT NULL + AND json_array_length(venues) > 0 + ) rf, + LATERAL UNNEST(rf.venues) AS t(venue_json) +), +all_venues AS ( + SELECT * FROM morning_jsonl + UNION ALL + SELECT * FROM morning_blob + UNION ALL + SELECT * FROM recheck_blob +), raw_resources AS ( SELECT - rv.snapshot_date, - rv.captured_at_utc, - rv.snapshot_type, - rv.recheck_hour, - rv.venue_json ->> 'tenant_id' AS tenant_id, + av.snapshot_date, + av.captured_at_utc, + av.snapshot_type, + av.recheck_hour, + av.tenant_id, resource_json - FROM raw_venues rv, + FROM all_venues av, LATERAL UNNEST( - from_json(rv.venue_json -> 'slots', '["JSON"]') + from_json(av.slots_json, '["JSON"]') ) AS t(resource_json) ), raw_slots AS ( From a4f246d69a4d81c30055d0be4b279ff30d749bd1 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:16:59 +0100 Subject: [PATCH 93/98] feat(extract): convert geonames to JSONL output - cities_global.jsonl.gz replaces .json.gz (one city object per line) - Empty placeholder writes a minimal .jsonl.gz (null row, filtered in staging) - Eliminates the {"rows": [...]} blob wrapper and maximum_object_size workaround stg_population_geonames: UNION ALL transition (jsonl_rows + blob_rows) - jsonl_rows: read_json JSONL, explicit columns, no UNNEST - blob_rows: existing UNNEST(rows) pattern with 40MB size limit retained Co-Authored-By: Claude Sonnet 4.6 --- .../src/padelnomics_extract/geonames.py | 30 ++++++++----- .../staging/stg_population_geonames.sql | 45 ++++++++++++++++--- 2 files changed, 58 insertions(+), 17 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/geonames.py b/extract/padelnomics_extract/src/padelnomics_extract/geonames.py index b6d6a8d..0e83498 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/geonames.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/geonames.py @@ -10,14 +10,14 @@ highest padel investment opportunity (white space markets). Requires: GEONAMES_USERNAME env var (free registration at geonames.org) -Landing: {LANDING_DIR}/geonames/{year}/{month}/cities_global.json.gz -Output: {"rows": [{"geoname_id": 2950159, "city_name": "Berlin", - "country_code": "DE", "population": 3644826, - "lat": 52.524, "lon": 13.411, - "admin1_code": "16", "admin2_code": "00", - "ref_year": 2024}], "count": N} +Landing: {LANDING_DIR}/geonames/{year}/{month}/cities_global.jsonl.gz +Output: one JSON object per line, e.g.: + {"geoname_id": 2950159, "city_name": "Berlin", "country_code": "DE", + "population": 3644826, "lat": 52.524, "lon": 13.411, + "admin1_code": "16", "admin2_code": "00", "ref_year": 2024} """ +import gzip import io import json import os @@ -28,7 +28,7 @@ from pathlib import Path import niquests from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging -from .utils import get_last_cursor, landing_path, write_gzip_atomic +from .utils import compress_jsonl_atomic, get_last_cursor, landing_path logger = setup_logging("padelnomics.extract.geonames") @@ -131,9 +131,12 @@ def extract( logger.warning("GEONAMES_USERNAME not set — writing empty placeholder so SQLMesh models can run") year, month = year_month.split("/") dest_dir = landing_path(landing_dir, "geonames", year, month) - dest = dest_dir / "cities_global.json.gz" + dest = dest_dir / "cities_global.jsonl.gz" if not dest.exists(): - write_gzip_atomic(dest, b'{"rows": [], "count": 0}') + tmp = dest.with_suffix(".gz.tmp") + with gzip.open(tmp, "wt") as f: + f.write('{"geoname_id":null}\n') # filtered by WHERE geoname_id IS NOT NULL + tmp.rename(dest) return {"files_written": 0, "files_skipped": 1, "bytes_written": 0} last_cursor = get_last_cursor(conn, EXTRACTOR_NAME) @@ -164,9 +167,12 @@ def extract( logger.info("parsed %d global locations (pop ≥1K)", len(rows)) dest_dir = landing_path(landing_dir, "geonames", year, month) - dest = dest_dir / "cities_global.json.gz" - payload = json.dumps({"rows": rows, "count": len(rows)}).encode() - bytes_written = write_gzip_atomic(dest, payload) + dest = dest_dir / "cities_global.jsonl.gz" + working_path = dest.with_suffix(".working.jsonl") + with open(working_path, "w") as f: + for row in rows: + f.write(json.dumps(row, separators=(",", ":")) + "\n") + bytes_written = compress_jsonl_atomic(working_path, dest) logger.info("written %s bytes compressed", f"{bytes_written:,}") return { diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql b/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql index 699c90e..82f4826 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_population_geonames.sql @@ -3,7 +3,11 @@ -- Broad coverage (140K+ locations) enables Gemeinde-level market intelligence. -- One row per geoname_id (GeoNames stable numeric identifier). -- --- Source: data/landing/geonames/{year}/{month}/cities_global.json.gz +-- Supports two landing formats (UNION ALL during migration): +-- New: cities_global.jsonl.gz — one city per line, columns directly accessible +-- Old: cities_global.json.gz — {"rows": [...]} blob (UNNEST required) +-- +-- Source: data/landing/geonames/{year}/{month}/cities_global.{jsonl,json}.gz MODEL ( name staging.stg_population_geonames, @@ -12,7 +16,33 @@ MODEL ( grain geoname_id ); -WITH parsed AS ( +WITH +-- New format: one city per JSONL line +jsonl_rows AS ( + SELECT + TRY_CAST(geoname_id AS INTEGER) AS geoname_id, + city_name, + country_code, + TRY_CAST(lat AS DOUBLE) AS lat, + TRY_CAST(lon AS DOUBLE) AS lon, + admin1_code, + admin2_code, + TRY_CAST(population AS BIGINT) AS population, + TRY_CAST(ref_year AS INTEGER) AS ref_year, + CURRENT_DATE AS extracted_date + FROM read_json( + @LANDING_DIR || '/geonames/*/*/cities_global.jsonl.gz', + format = 'newline_delimited', + columns = { + geoname_id: 'INTEGER', city_name: 'VARCHAR', country_code: 'VARCHAR', + lat: 'DOUBLE', lon: 'DOUBLE', admin1_code: 'VARCHAR', admin2_code: 'VARCHAR', + population: 'BIGINT', ref_year: 'INTEGER' + } + ) + WHERE geoname_id IS NOT NULL +), +-- Old format: {"rows": [...]} blob — kept for transition +blob_rows AS ( SELECT TRY_CAST(row ->> 'geoname_id' AS INTEGER) AS geoname_id, row ->> 'city_name' AS city_name, @@ -33,11 +63,16 @@ WITH parsed AS ( ) ) WHERE (row ->> 'geoname_id') IS NOT NULL +), +all_rows AS ( + SELECT * FROM jsonl_rows + UNION ALL + SELECT * FROM blob_rows ) SELECT geoname_id, - TRIM(city_name) AS city_name, - UPPER(country_code) AS country_code, + TRIM(city_name) AS city_name, + UPPER(country_code) AS country_code, lat, lon, NULLIF(TRIM(admin1_code), '') AS admin1_code, @@ -45,7 +80,7 @@ SELECT population, ref_year, extracted_date -FROM parsed +FROM all_rows WHERE population IS NOT NULL AND population > 0 AND geoname_id IS NOT NULL From b5b8493543b42aecb09fe040cf5195f271a10ed5 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:19:37 +0100 Subject: [PATCH 94/98] feat(extract): regional overpass_tennis splitting + JSONL output MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace single global Overpass query (150K+ elements, times out) with 10 regional bbox queries (~10-40K elements each, 150s server / 180s client). - REGIONS: 10 bboxes covering all continents - Crash recovery: working.jsonl accumulates per-region results; already_seen_ids deduplication skips re-written elements on restart - Overlapping bbox elements deduped by OSM id across regions - Retry per region: up to 2 retries with 30s cooldown - Polite 5s inter-region delay - Skip if courts.jsonl.gz or courts.json.gz already exists for the month stg_tennis_courts: UNION ALL transition (jsonl_elements + blob_elements) - jsonl_elements: JSONL, explicit columns, COALESCE lat/lon with center coords (supports both node direct lat/lon and way/relation Overpass out center) - blob_elements: existing UNNEST(elements) pattern, unchanged - Removed osm_type='node' filter — ways/relations now usable via center coords - Dedup on (osm_id, extracted_date DESC) unchanged Co-Authored-By: Claude Sonnet 4.6 --- .../padelnomics_extract/overpass_tennis.py | 163 ++++++++++++++---- .../models/staging/stg_tennis_courts.sql | 53 +++++- 2 files changed, 177 insertions(+), 39 deletions(-) diff --git a/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py b/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py index d0a6748..e7f1c0f 100644 --- a/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py +++ b/extract/padelnomics_extract/src/padelnomics_extract/overpass_tennis.py @@ -1,40 +1,77 @@ """Overpass API extractor — global tennis court locations from OpenStreetMap. -Queries the Overpass API for all nodes/ways/relations tagged sport=tennis. -Tennis court density near a location is a proxy for racket-sport culture — -areas with many tennis clubs are prime candidates for padel adoption. +Queries the Overpass API for all nodes/ways/relations tagged sport=tennis, +split across 10 geographic regions to avoid timeout on the ~150K+ global result. -The query returns ~150K+ results globally (vs ~5K for padel), so a higher -Overpass timeout is used. +Regional strategy: + - Each region is a bounding box covering a continent or sub-continent + - Each region is queried independently (POST with [bbox:...]) + - Overlapping bboxes are deduped on OSM element id + - One region per POST (~10-40K elements each, well within Overpass limits) + - Crash recovery: working JSONL accumulates completed regions; on restart + already-written IDs are skipped, completed regions produce 0 new elements -Landing: {LANDING_DIR}/overpass_tennis/{year}/{month}/courts.json.gz +Landing: {LANDING_DIR}/overpass_tennis/{year}/{month}/courts.jsonl.gz """ +import json import sqlite3 +import time from pathlib import Path import niquests -from ._shared import OVERPASS_TIMEOUT_SECONDS, run_extractor, setup_logging -from .utils import landing_path, write_gzip_atomic +from ._shared import run_extractor, setup_logging +from .utils import compress_jsonl_atomic, landing_path, load_partial_results logger = setup_logging("padelnomics.extract.overpass_tennis") EXTRACTOR_NAME = "overpass_tennis" OVERPASS_URL = "https://overpass-api.de/api/interpreter" -# Tennis returns ~150K+ elements globally vs ~5K for padel — use 3× timeout. -TENNIS_OVERPASS_TIMEOUT_SECONDS = OVERPASS_TIMEOUT_SECONDS * 3 +# Each region is [south, west, north, east] — Overpass bbox format +REGIONS = [ + {"name": "europe_west", "bbox": "35.0,-11.0,61.0,8.0"}, # FR ES GB PT IE BE NL + {"name": "europe_central", "bbox": "42.0,8.0,55.5,24.0"}, # DE IT AT CH CZ PL HU + {"name": "europe_east", "bbox": "35.0,24.0,72.0,60.0"}, # Nordics Baltics GR TR RO + {"name": "north_america", "bbox": "15.0,-170.0,72.0,-50.0"}, # US CA MX + {"name": "south_america", "bbox": "-56.0,-82.0,15.0,-34.0"}, # BR AR CL + {"name": "asia_east", "bbox": "18.0,73.0,54.0,150.0"}, # JP KR CN + {"name": "asia_west", "bbox": "-11.0,24.0,42.0,73.0"}, # Middle East India + {"name": "oceania", "bbox": "-50.0,110.0,5.0,180.0"}, # AU NZ + {"name": "africa", "bbox": "-35.0,-18.0,37.0,52.0"}, # ZA EG MA + {"name": "asia_north", "bbox": "42.0,60.0,82.0,180.0"}, # RU-east KZ +] -OVERPASS_QUERY = ( - "[out:json][timeout:300];\n" - "(\n" - ' node["sport"="tennis"];\n' - ' way["sport"="tennis"];\n' - ' relation["sport"="tennis"];\n' - ");\n" - "out center;" -) +MAX_RETRIES_PER_REGION = 2 +RETRY_DELAY_SECONDS = 30 # Overpass cooldown between retries +REGION_TIMEOUT_SECONDS = 180 # Client-side per-region timeout (server uses 150s) +INTER_REGION_DELAY_SECONDS = 5 # Polite delay between regions + + +def _region_query(bbox: str) -> str: + """Build an Overpass QL query for tennis courts within a bounding box.""" + return ( + f"[out:json][timeout:150][bbox:{bbox}];\n" + "(\n" + " node[\"sport\"=\"tennis\"];\n" + " way[\"sport\"=\"tennis\"];\n" + " rel[\"sport\"=\"tennis\"];\n" + ");\n" + "out center;" + ) + + +def _query_region(session: niquests.Session, region: dict) -> list[dict]: + """POST one regional Overpass query. Returns list of OSM elements.""" + query = _region_query(region["bbox"]) + resp = session.post( + OVERPASS_URL, + data={"data": query}, + timeout=REGION_TIMEOUT_SECONDS, + ) + resp.raise_for_status() + return resp.json().get("elements", []) def extract( @@ -43,24 +80,84 @@ def extract( conn: sqlite3.Connection, session: niquests.Session, ) -> dict: - """POST OverpassQL query for tennis courts and write raw OSM JSON. Returns run metrics.""" + """Query Overpass for global tennis courts using regional bbox splitting. + + Splits the global query into REGIONS to avoid Overpass timeout. + Writes one OSM element per line to courts.jsonl.gz. + Crash-safe: working.jsonl accumulates results; on restart already-written + element IDs are skipped so completed regions produce 0 new elements. + """ + assert landing_dir.is_dir(), f"landing_dir must exist: {landing_dir}" + assert "/" in year_month and len(year_month) == 7, f"year_month must be YYYY/MM: {year_month!r}" + year, month = year_month.split("/") dest_dir = landing_path(landing_dir, "overpass_tennis", year, month) - dest = dest_dir / "courts.json.gz" + dest = dest_dir / "courts.jsonl.gz" + old_blob = dest_dir / "courts.json.gz" - logger.info("POST %s (sport=tennis, ~150K+ results expected)", OVERPASS_URL) - resp = session.post( - OVERPASS_URL, - data={"data": OVERPASS_QUERY}, - timeout=TENNIS_OVERPASS_TIMEOUT_SECONDS, + if dest.exists() or old_blob.exists(): + logger.info("Already have courts for %s — skipping", year_month) + return {"files_written": 0, "files_skipped": 1, "bytes_written": 0} + + # Crash recovery: load already-written elements from the working file + working_path = dest_dir / "courts.working.jsonl" + prior_records, already_seen_ids = load_partial_results(working_path, id_key="id") + if already_seen_ids: + logger.info("Resuming: %d elements already in working file", len(already_seen_ids)) + + total_new = 0 + regions_succeeded: list[str] = [] + regions_failed: list[str] = [] + + working_file = open(working_path, "a") # noqa: SIM115 + try: + for i, region in enumerate(REGIONS): + for attempt in range(MAX_RETRIES_PER_REGION + 1): + try: + elements = _query_region(session, region) + new_elements = [e for e in elements if str(e.get("id", "")) not in already_seen_ids] + for elem in new_elements: + working_file.write(json.dumps(elem, separators=(",", ":")) + "\n") + already_seen_ids.add(str(elem["id"])) + working_file.flush() + total_new += len(new_elements) + regions_succeeded.append(region["name"]) + logger.info( + "Region %s: %d elements (%d new, %d total)", + region["name"], len(elements), len(new_elements), len(already_seen_ids), + ) + break + except niquests.exceptions.RequestException as exc: + if attempt < MAX_RETRIES_PER_REGION: + logger.warning( + "Region %s attempt %d failed: %s — retrying in %ds", + region["name"], attempt + 1, exc, RETRY_DELAY_SECONDS, + ) + time.sleep(RETRY_DELAY_SECONDS) + else: + regions_failed.append(region["name"]) + logger.error( + "Region %s failed after %d attempts: %s", + region["name"], MAX_RETRIES_PER_REGION + 1, exc, + ) + + if i < len(REGIONS) - 1: + time.sleep(INTER_REGION_DELAY_SECONDS) + finally: + working_file.close() + + total_elements = len(prior_records) + total_new + if total_elements == 0: + raise RuntimeError(f"All regions failed, no elements written: {regions_failed}") + + if regions_failed: + logger.warning("Completed with %d failed regions: %s", len(regions_failed), regions_failed) + + bytes_written = compress_jsonl_atomic(working_path, dest) + logger.info( + "%d total elements (%d regions, %d failed) -> %s (%s bytes)", + total_elements, len(regions_succeeded), len(regions_failed), dest, f"{bytes_written:,}", ) - resp.raise_for_status() - - size_bytes = len(resp.content) - logger.info("%s bytes received", f"{size_bytes:,}") - - bytes_written = write_gzip_atomic(dest, resp.content) - logger.info("wrote %s (%s bytes compressed)", dest, f"{bytes_written:,}") return { "files_written": 1, diff --git a/transform/sqlmesh_padelnomics/models/staging/stg_tennis_courts.sql b/transform/sqlmesh_padelnomics/models/staging/stg_tennis_courts.sql index 8821f45..c9c5577 100644 --- a/transform/sqlmesh_padelnomics/models/staging/stg_tennis_courts.sql +++ b/transform/sqlmesh_padelnomics/models/staging/stg_tennis_courts.sql @@ -2,7 +2,12 @@ -- Used as a "racket sport culture" signal in the opportunity score: -- areas with high tennis court density are prime padel adoption markets. -- --- Source: data/landing/overpass_tennis/{year}/{month}/courts.json.gz +-- Supports two landing formats (UNION ALL during migration): +-- New: courts.jsonl.gz — one OSM element per line; nodes have lat/lon directly, +-- ways/relations have center.lat/center.lon (Overpass out center) +-- Old: courts.json.gz — {"elements": [...]} blob (UNNEST required) +-- +-- Source: data/landing/overpass_tennis/{year}/{month}/courts.{jsonl,json}.gz MODEL ( name staging.stg_tennis_courts, @@ -11,7 +16,39 @@ MODEL ( grain osm_id ); -WITH parsed AS ( +WITH +-- New format: one OSM element per JSONL line +jsonl_elements AS ( + SELECT + type AS osm_type, + TRY_CAST(id AS BIGINT) AS osm_id, + -- Nodes: lat/lon direct. Ways/relations: center object (Overpass out center). + COALESCE( + TRY_CAST(lat AS DOUBLE), + TRY_CAST(center ->> 'lat' AS DOUBLE) + ) AS lat, + COALESCE( + TRY_CAST(lon AS DOUBLE), + TRY_CAST(center ->> 'lon' AS DOUBLE) + ) AS lon, + tags ->> 'name' AS name, + tags ->> 'addr:country' AS country_code, + tags ->> 'addr:city' AS city_tag, + filename AS source_file, + CURRENT_DATE AS extracted_date + FROM read_json( + @LANDING_DIR || '/overpass_tennis/*/*/courts.jsonl.gz', + format = 'newline_delimited', + columns = { + type: 'VARCHAR', id: 'BIGINT', lat: 'DOUBLE', lon: 'DOUBLE', + center: 'JSON', tags: 'JSON' + }, + filename = true + ) + WHERE type IS NOT NULL +), +-- Old format: {"elements": [...]} blob — kept for transition +blob_elements AS ( SELECT elem ->> 'type' AS osm_type, (elem ->> 'id')::BIGINT AS osm_id, @@ -32,12 +69,16 @@ WITH parsed AS ( ) WHERE (elem ->> 'type') IS NOT NULL ), +parsed AS ( + SELECT * FROM jsonl_elements + UNION ALL + SELECT * FROM blob_elements +), deduped AS ( SELECT *, ROW_NUMBER() OVER (PARTITION BY osm_id ORDER BY extracted_date DESC) AS rn FROM parsed - WHERE osm_type = 'node' - AND lat IS NOT NULL AND lon IS NOT NULL + WHERE lat IS NOT NULL AND lon IS NOT NULL AND lat BETWEEN -90 AND 90 AND lon BETWEEN -180 AND 180 ), @@ -54,8 +95,8 @@ with_country AS ( WHEN lat BETWEEN 36.35 AND 47.09 AND lon BETWEEN 6.62 AND 18.51 THEN 'IT' WHEN lat BETWEEN 37.00 AND 42.15 AND lon BETWEEN -9.50 AND -6.19 THEN 'PT' ELSE NULL - END) AS country_code, - NULLIF(TRIM(name), '') AS name, + END) AS country_code, + NULLIF(TRIM(name), '') AS name, NULLIF(TRIM(city_tag), '') AS city, extracted_date FROM deduped From fb83f432db73421fa08c56e122afecb8a5ceea12 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:20:32 +0100 Subject: [PATCH 95/98] =?UTF-8?q?feat(emails):=20subtask=207=20=E2=80=94?= =?UTF-8?q?=20remove=20=5Femail=5Fwrap()=20and=20=5Femail=5Fbutton()=20fro?= =?UTF-8?q?m=20worker.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit All 10 email handlers now use render_email_template(). The two legacy inline-HTML helpers are no longer needed and have been removed. Co-Authored-By: Claude Sonnet 4.6 --- web/src/padelnomics/worker.py | 83 ----------------------------------- 1 file changed, 83 deletions(-) diff --git a/web/src/padelnomics/worker.py b/web/src/padelnomics/worker.py index 76e48bd..25b3e29 100644 --- a/web/src/padelnomics/worker.py +++ b/web/src/padelnomics/worker.py @@ -42,89 +42,6 @@ def _t(key: str, lang: str = "en", **kwargs) -> str: return raw.format(**kwargs) if kwargs else raw -def _email_wrap(body: str, lang: str = "en", preheader: str = "") -> str: - """Wrap email body in a branded layout with inline CSS. - - preheader: hidden preview text shown in email client list views. - """ - year = utcnow().year - tagline = _t("email_footer_tagline", lang) - copyright_text = _t("email_footer_copyright", lang, year=year, app_name=config.APP_NAME) - # Hidden preheader trick: visible text + invisible padding to prevent - # email clients from pulling body text into the preview. - preheader_html = "" - if preheader: - preheader_html = ( - f'' - f'{preheader}{"͏ ‌ " * 30}' - ) - return f"""\ - - - - - - {config.APP_NAME} - - - {preheader_html} - - -
    - - - - - - - - - - - - - - - - - -
     
    - - padelnomics - -
    - {body} -
    -

    - padelnomics.io -  ·  - {tagline} -

    -

    - {copyright_text} -

    -
    -
    - -""" - - -def _email_button(url: str, label: str) -> str: - """Render a branded CTA button for email. - - Uses display:block for full-width tap target on mobile. - """ - return ( - f'' - f'
    ' - f'' - f"{label}
    " - ) - - def task(name: str): """Decorator to register a task handler.""" From ec7f115f16020288405a351933548c25bb054716 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:24:48 +0100 Subject: [PATCH 96/98] feat: add init_landing_seeds.py for empty-landing bootstrap Creates minimal .jsonl.gz and .json.gz seed files so all SQLMesh staging models can compile and run before real extraction data arrives. Each seed has a single null record filtered by the staging model's WHERE clause (tenant_id IS NOT NULL, geoname_id IS NOT NULL, type IS NOT NULL, etc). Covers both formats (JSONL + blob) for the UNION ALL transition CTEs: playtomic/1970/01/: tenants.{jsonl,json}.gz, availability seeds (morning + recheck) geonames/1970/01/: cities_global.{jsonl,json}.gz overpass_tennis/1970/01/: courts.{jsonl,json}.gz overpass/1970/01/: courts.json.gz (padel, unchanged format) eurostat/1970/01/: urb_cpop1.json.gz, ilc_di03.json.gz eurostat_city_labels/1970/01/: cities_codelist.json.gz ons_uk/1970/01/: lad_population.json.gz census_usa/1970/01/: acs5_places.json.gz Co-Authored-By: Claude Sonnet 4.6 --- scripts/init_landing_seeds.py | 101 ++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 scripts/init_landing_seeds.py diff --git a/scripts/init_landing_seeds.py b/scripts/init_landing_seeds.py new file mode 100644 index 0000000..cc61bd5 --- /dev/null +++ b/scripts/init_landing_seeds.py @@ -0,0 +1,101 @@ +"""Create minimal landing zone seed files so SQLMesh models can run before real data arrives. + +Each seed contains one null/empty record that is filtered out by the staging model's +WHERE clause. Seeds live in the 1970/01 epoch so they're never confused with real data. + +Usage: + uv run python scripts/init_landing_seeds.py [--landing-dir data/landing] + +Idempotent: skips existing files. +""" + +import argparse +import gzip +import json +from pathlib import Path + + +def create_seed(dest: Path, content: bytes) -> None: + """Write content to a gzip file atomically. Skips if the file already exists.""" + if dest.exists(): + return + dest.parent.mkdir(parents=True, exist_ok=True) + tmp = dest.with_suffix(dest.suffix + ".tmp") + with gzip.open(tmp, "wb") as f: + f.write(content) + tmp.rename(dest) + print(f" created: {dest}") + + +def main() -> None: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--landing-dir", default="data/landing", type=Path) + args = parser.parse_args() + base: Path = args.landing_dir + + seeds = { + # --- Playtomic tenants --- + # JSONL: one null tenant (filtered by WHERE tenant_id IS NOT NULL) + "playtomic/1970/01/tenants.jsonl.gz": + b'{"tenant_id":null}\n', + # Blob: empty tenants array + "playtomic/1970/01/tenants.json.gz": + json.dumps({"tenants": [], "count": 0}).encode(), + + # --- Playtomic availability (morning) --- + # JSONL: one null venue (filtered by WHERE tenant_id IS NOT NULL) + "playtomic/1970/01/availability_1970-01-01.jsonl.gz": + b'{"tenant_id":null,"date":"1970-01-01","captured_at_utc":"1970-01-01T00:00:00Z","slots":null}\n', + # Blob: empty venues array + "playtomic/1970/01/availability_1970-01-01.json.gz": + json.dumps({"date": "1970-01-01", "captured_at_utc": "1970-01-01T00:00:00Z", + "venue_count": 0, "venues": []}).encode(), + + # --- Playtomic recheck (blob only, small format) --- + "playtomic/1970/01/availability_1970-01-01_recheck_00.json.gz": + json.dumps({"date": "1970-01-01", "captured_at_utc": "1970-01-01T00:00:00Z", + "recheck_hour": 0, "venues": []}).encode(), + + # --- GeoNames --- + # JSONL: one null city (filtered by WHERE geoname_id IS NOT NULL) + "geonames/1970/01/cities_global.jsonl.gz": + b'{"geoname_id":null}\n', + # Blob: empty rows array + "geonames/1970/01/cities_global.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + + # --- Overpass tennis --- + # JSONL: one null element (filtered by WHERE type IS NOT NULL) + "overpass_tennis/1970/01/courts.jsonl.gz": + b'{"type":null,"id":null}\n', + # Blob: empty elements array + "overpass_tennis/1970/01/courts.json.gz": + json.dumps({"version": 0.6, "elements": []}).encode(), + + # --- Overpass padel (unchanged format) --- + "overpass/1970/01/courts.json.gz": + json.dumps({"version": 0.6, "elements": []}).encode(), + + # --- Eurostat --- + "eurostat/1970/01/urb_cpop1.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + "eurostat/1970/01/ilc_di03.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + "eurostat_city_labels/1970/01/cities_codelist.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + + # --- National statistics --- + "ons_uk/1970/01/lad_population.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + "census_usa/1970/01/acs5_places.json.gz": + json.dumps({"rows": [], "count": 0}).encode(), + } + + print(f"Initialising landing seeds in: {base}") + for rel_path, content in seeds.items(): + create_seed(base / rel_path, content) + print("Done.") + + +if __name__ == "__main__": + main() From 578a4098932c946424ac37e59fb4b1ca188b558b Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:24:52 +0100 Subject: [PATCH 97/98] feat(email-templates): tests, docs, and fix quote_verification sample data (subtask 8) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add 50 tests in test_email_templates.py: - TestRenderEmailTemplate: all 11 registry templates render in EN + DE without error; checks DOCTYPE, wordmark, font, CTA color, template- specific content (heat badges, brief rows, weekly digest loop, etc.) and registry structure - TestEmailGalleryRoutes: access control, gallery list (all labels present, preview links), preview pages (EN/DE/nonexistent/invalid-lang), compose preview endpoint (plain + wrapped + empty body) - Fix _quote_verification_sample: add missing recap_parts key — StrictUndefined raised on the {% if recap_parts %} check when the variable was absent - Update CHANGELOG.md: document email template system (renderer, base, macros, 11 templates, registry, gallery, compose preview, removed helpers) - Update PROJECT.md: add email template system + gallery to Done section Co-Authored-By: Claude Opus 4.6 --- CHANGELOG.md | 13 ++ PROJECT.md | 2 + web/src/padelnomics/email_templates.py | 1 + web/tests/test_email_templates.py | 248 +++++++++++++++++++++++++ 4 files changed, 264 insertions(+) create mode 100644 web/tests/test_email_templates.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 6189c7e..832ea1f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] ### Added +- **Email template system** — all 11 transactional emails migrated from inline f-string HTML in `worker.py` to Jinja2 templates: + - **Standalone renderer** (`email_templates.py`) — `render_email_template()` uses a module-level `jinja2.Environment` with `autoescape=True`, works outside Quart request context (worker process); `tformat` filter mirrors the one in `app.py` + - **`_base.html`** — branded shell (dark header, 3px blue accent, white card body, footer with tagline + copyright); replaces the old `_email_wrap()` helper + - **`_macros.html`** — reusable Jinja2 macros: `email_button`, `heat_badge`, `heat_badge_sm`, `section_heading`, `info_box` + - **11 email templates**: `magic_link`, `quote_verification`, `welcome`, `waitlist_supplier`, `waitlist_general`, `lead_matched`, `lead_forward`, `lead_match_notify`, `weekly_digest`, `business_plan`, `admin_compose` + - **`EMAIL_TEMPLATE_REGISTRY`** — dict mapping slug → `{template, label, description, email_type, sample_data}` with realistic sample data callables for each template + - **Admin email gallery** (`/admin/emails/gallery`) — card grid of all email types; preview page with EN/DE language toggle renders each template in a sandboxed iframe (`srcdoc`); "View in sent log →" cross-link; gallery link added to admin sidebar + - **Compose live preview** — two-column compose layout: form on the left, HTMX-powered preview iframe on the right; `hx-trigger="input delay:500ms"` on the textarea; `POST /admin/emails/compose/preview` endpoint supports plain body or branded wrapper via `wrap` checkbox + - 50 new tests covering all template renders (EN + DE), registry structure, gallery routes (access control, list, preview, lang fallback), and compose preview endpoint + +### Removed +- `_email_wrap()` and `_email_button()` helper functions removed from `worker.py` — replaced by templates + - **Marketplace admin dashboard** (`/admin/marketplace`) — single-screen health view for the two-sided market: - **Lead funnel** — total / verified-new (ready to unlock) / unlocked / won / conversion rate - **Credit economy** — total credits issued, consumed (lead unlocks), outstanding balance across all paid suppliers, 30-day burn rate diff --git a/PROJECT.md b/PROJECT.md index 6e94289..9feaa0c 100644 --- a/PROJECT.md +++ b/PROJECT.md @@ -107,6 +107,8 @@ - [x] Task queue management (list, retry, delete) - [x] Lead funnel stats on admin dashboard - [x] Email hub (`/admin/emails`) — sent log, inbox, compose, audiences, delivery event tracking via Resend webhooks +- [x] **Email template system** — 11 transactional emails as Jinja2 templates (`emails/*.html`); standalone `render_email_template()` renderer works in worker + admin; `_base.html` + `_macros.html` shared shell; `EMAIL_TEMPLATE_REGISTRY` with sample data for gallery previews; `_email_wrap()` / `_email_button()` helpers removed +- [x] **Admin email gallery** (`/admin/emails/gallery`) — card grid of all templates, EN/DE preview in sandboxed iframe, "View in sent log" cross-link; compose page now has HTMX live preview pane - [x] **pSEO Engine tab** (`/admin/pseo`) — content gap detection, data freshness signals, article health checks (hreflang orphans, missing build files, broken scenario refs), generation job monitoring with live progress bars - [x] **Marketplace admin dashboard** (`/admin/marketplace`) — lead funnel, credit economy, supplier engagement, live activity stream, inline feature flag toggles - [x] **Lead matching notifications** — `notify_matching_suppliers` task on quote verification + `send_weekly_lead_digest` every Monday; one-click CTA token in forward emails diff --git a/web/src/padelnomics/email_templates.py b/web/src/padelnomics/email_templates.py index 220000c..0ead4c7 100644 --- a/web/src/padelnomics/email_templates.py +++ b/web/src/padelnomics/email_templates.py @@ -107,6 +107,7 @@ def _quote_verification_sample(lang: str) -> dict: "court_count": court_count, "facility_type": "Indoor Padel Club", "country": "Germany", + "recap_parts": ["4 courts", "Indoor Padel Club", "Germany"], "preheader": t.get("email_quote_verify_preheader_courts", "").format(court_count=court_count), } diff --git a/web/tests/test_email_templates.py b/web/tests/test_email_templates.py new file mode 100644 index 0000000..b5dc04e --- /dev/null +++ b/web/tests/test_email_templates.py @@ -0,0 +1,248 @@ +""" +Tests for the standalone email template renderer and admin gallery routes. + +render_email_template() tests: each registry entry renders without error, +produces a valid DOCTYPE document, includes the wordmark, and supports both +EN and DE languages. + +Admin gallery tests: access control, list page, preview page, error handling. +""" + +import pytest +from padelnomics.core import utcnow_iso +from padelnomics.email_templates import EMAIL_TEMPLATE_REGISTRY, render_email_template + + +# ── render_email_template() ────────────────────────────────────────────────── + + +class TestRenderEmailTemplate: + """render_email_template() produces valid HTML for all registry entries.""" + + @pytest.mark.parametrize("slug", list(EMAIL_TEMPLATE_REGISTRY.keys())) + def test_all_templates_render_en(self, slug): + entry = EMAIL_TEMPLATE_REGISTRY[slug] + sample = entry["sample_data"]("en") + html = render_email_template(entry["template"], lang="en", **sample) + assert "" in html + assert "padelnomics" in html.lower() + assert 'lang="en"' in html + + @pytest.mark.parametrize("slug", list(EMAIL_TEMPLATE_REGISTRY.keys())) + def test_all_templates_render_de(self, slug): + entry = EMAIL_TEMPLATE_REGISTRY[slug] + sample = entry["sample_data"]("de") + html = render_email_template(entry["template"], lang="de", **sample) + assert "" in html + assert "padelnomics" in html.lower() + assert 'lang="de"' in html + + def test_magic_link_contains_verify_link(self): + entry = EMAIL_TEMPLATE_REGISTRY["magic_link"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "/auth/verify?token=" in html + + def test_magic_link_has_preheader(self): + entry = EMAIL_TEMPLATE_REGISTRY["magic_link"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "display:none" in html # hidden preheader span + + def test_lead_forward_has_heat_badge(self): + entry = EMAIL_TEMPLATE_REGISTRY["lead_forward"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "HOT" in html + assert "#DC2626" in html # HOT badge color + + def test_lead_forward_has_brief_rows(self): + entry = EMAIL_TEMPLATE_REGISTRY["lead_forward"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + # Brief rows table is rendered (e.g. "Facility" label) + assert "Facility" in html + + def test_lead_forward_has_contact_info(self): + entry = EMAIL_TEMPLATE_REGISTRY["lead_forward"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "ceo@padelclub.es" in html + assert "Carlos Rivera" in html + + def test_weekly_digest_loops_over_leads(self): + entry = EMAIL_TEMPLATE_REGISTRY["weekly_digest"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + # Sample data has 3 leads — all 3 countries should appear + assert "Germany" in html + assert "Austria" in html + assert "Switzerland" in html + + def test_weekly_digest_has_heat_badges(self): + entry = EMAIL_TEMPLATE_REGISTRY["weekly_digest"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "HOT" in html + assert "WARM" in html + assert "COOL" in html + + def test_welcome_has_quickstart_links(self): + entry = EMAIL_TEMPLATE_REGISTRY["welcome"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "/planner" in html + assert "/markets" in html + + def test_admin_compose_renders_body_html(self): + entry = EMAIL_TEMPLATE_REGISTRY["admin_compose"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "test message" in html.lower() + + def test_business_plan_has_download_link(self): + entry = EMAIL_TEMPLATE_REGISTRY["business_plan"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "/planner/export/" in html + + def test_invalid_lang_raises(self): + entry = EMAIL_TEMPLATE_REGISTRY["magic_link"] + with pytest.raises(AssertionError, match="Unsupported lang"): + render_email_template(entry["template"], lang="fr", **entry["sample_data"]("en")) + + def test_non_emails_prefix_raises(self): + with pytest.raises(AssertionError, match="Expected emails/ prefix"): + render_email_template("base.html", lang="en") + + def test_common_design_elements_present(self): + """Branded shell must include font + blue accent across all templates.""" + entry = EMAIL_TEMPLATE_REGISTRY["magic_link"] + html = render_email_template(entry["template"], lang="en", **entry["sample_data"]("en")) + assert "Bricolage Grotesque" in html + assert "#1D4ED8" in html + assert "padelnomics.io" in html + + def test_registry_has_required_keys(self): + for slug, entry in EMAIL_TEMPLATE_REGISTRY.items(): + assert "template" in entry, f"{slug}: missing 'template'" + assert "label" in entry, f"{slug}: missing 'label'" + assert "description" in entry, f"{slug}: missing 'description'" + assert callable(entry.get("sample_data")), f"{slug}: sample_data must be callable" + assert entry["template"].startswith("emails/"), f"{slug}: template must start with emails/" + + +# ── Admin gallery routes ────────────────────────────────────────────────────── + + +@pytest.fixture +async def admin_client(app, db): + """Test client with a user that has the admin role.""" + now = utcnow_iso() + async with db.execute( + "INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)", + ("gallery_admin@test.com", "Gallery Admin", now), + ) as cursor: + admin_id = cursor.lastrowid + await db.execute( + "INSERT INTO user_roles (user_id, role) VALUES (?, 'admin')", (admin_id,) + ) + await db.commit() + + async with app.test_client() as c: + async with c.session_transaction() as sess: + sess["user_id"] = admin_id + yield c + + +class TestEmailGalleryRoutes: + @pytest.mark.asyncio + async def test_gallery_requires_auth(self, client): + resp = await client.get("/admin/emails/gallery") + assert resp.status_code == 302 + + @pytest.mark.asyncio + async def test_gallery_list_returns_200(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery") + assert resp.status_code == 200 + + @pytest.mark.asyncio + async def test_gallery_list_shows_all_template_labels(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery") + html = (await resp.get_data(as_text=True)) + for entry in EMAIL_TEMPLATE_REGISTRY.values(): + assert entry["label"] in html, f"Expected label {entry['label']!r} on gallery page" + + @pytest.mark.asyncio + async def test_gallery_preview_magic_link_en(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/magic_link") + assert resp.status_code == 200 + html = (await resp.get_data(as_text=True)) + assert "srcdoc" in html # sandboxed iframe is present + assert "Magic Link" in html + + @pytest.mark.asyncio + async def test_gallery_preview_magic_link_de(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/magic_link?lang=de") + assert resp.status_code == 200 + html = (await resp.get_data(as_text=True)) + assert 'lang="de"' in html or "de" in html # lang toggle shows active state + + @pytest.mark.asyncio + async def test_gallery_preview_lead_forward(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/lead_forward") + assert resp.status_code == 200 + html = (await resp.get_data(as_text=True)) + assert "Lead Forward" in html + assert "srcdoc" in html + + @pytest.mark.asyncio + async def test_gallery_preview_weekly_digest(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/weekly_digest") + assert resp.status_code == 200 + + @pytest.mark.asyncio + async def test_gallery_preview_nonexistent_slug_redirects(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/does-not-exist") + assert resp.status_code == 302 + + @pytest.mark.asyncio + async def test_gallery_preview_invalid_lang_falls_back(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery/magic_link?lang=fr") + assert resp.status_code == 200 # invalid lang → falls back to "en" + + @pytest.mark.asyncio + async def test_gallery_preview_requires_auth(self, client): + resp = await client.get("/admin/emails/gallery/magic_link") + assert resp.status_code == 302 + + @pytest.mark.asyncio + async def test_gallery_list_has_preview_links(self, admin_client): + resp = await admin_client.get("/admin/emails/gallery") + html = (await resp.get_data(as_text=True)) + # Each card links to the preview page + for slug in EMAIL_TEMPLATE_REGISTRY: + assert f"/admin/emails/gallery/{slug}" in html + + @pytest.mark.asyncio + async def test_compose_preview_plain_body(self, admin_client): + """POST to compose/preview with wrap=0 returns plain HTML body.""" + resp = await admin_client.post( + "/admin/emails/compose/preview", + form={"body": "Hello world", "wrap": "0"}, + ) + assert resp.status_code == 200 + html = (await resp.get_data(as_text=True)) + assert "Hello world" in html + + @pytest.mark.asyncio + async def test_compose_preview_wrapped_body(self, admin_client): + """POST to compose/preview with wrap=1 wraps body in branded layout.""" + resp = await admin_client.post( + "/admin/emails/compose/preview", + form={"body": "Test preview content", "wrap": "1"}, + ) + assert resp.status_code == 200 + html = (await resp.get_data(as_text=True)) + assert "Test preview content" in html + # Branded wrapper includes padelnomics wordmark + assert "padelnomics" in html.lower() + + @pytest.mark.asyncio + async def test_compose_preview_empty_body(self, admin_client): + """Empty body returns an empty but valid partial.""" + resp = await admin_client.post( + "/admin/emails/compose/preview", + form={"body": "", "wrap": "1"}, + ) + assert resp.status_code == 200 From 683ca3fc2482db3a65a0cbd70534d96ecb872b50 Mon Sep 17 00:00:00 2001 From: Deeman Date: Wed, 25 Feb 2026 12:28:43 +0100 Subject: [PATCH 98/98] docs: update CHANGELOG and PROJECT.md for JSONL landing format Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 18 ++++++++++++++++++ PROJECT.md | 3 +++ 2 files changed, 21 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6189c7e..63e16bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] ### Added +- **JSONL streaming landing format** — extractors now write one JSON object per line (`.jsonl.gz`) instead of a single large blob, eliminating in-memory accumulation and `maximum_object_size` workarounds: + - `playtomic_tenants.py` → `tenants.jsonl.gz` (one tenant per line; dedup still happens in memory before write) + - `playtomic_availability.py` → `availability_{date}.jsonl.gz` (one venue per line with `date`/`captured_at_utc` injected; working file IS the final file — eliminates the consolidation step) + - `geonames.py` → `cities_global.jsonl.gz` (one city per line; eliminates 30 MB blob and its `maximum_object_size` workaround) + - `compress_jsonl_atomic(jsonl_path, dest_path)` utility added to `utils.py` — streams compression in 1 MB chunks, atomic `.tmp` rename, deletes source +- **Regional Overpass splitting for tennis courts** — replaces single global query (150K+ elements, timed out) with 10 regional bbox queries (~10-40K elements each, 150s server / 180s client): + - Regions: europe\_west, europe\_central, europe\_east, north\_america, south\_america, asia\_east, asia\_west, oceania, africa, asia\_north + - Per-region retry (2 attempts, 30s cooldown) + 5s inter-region polite delay + - Crash recovery via `working.jsonl` accumulation — already-written element IDs skipped on restart; completed regions produce 0 new elements on re-query + - Output: `courts.jsonl.gz` (one OSM element per line) +- **`scripts/init_landing_seeds.py`** — creates minimal `.jsonl.gz` and `.json.gz` seed files in `1970/01/` so SQLMesh staging models can run before real extraction data arrives; idempotent + +### Changed +- All modified staging SQL models use **UNION ALL transition CTEs** — both JSONL (new) and blob (old) formats are readable simultaneously; old `.json.gz` files in the landing zone continue working until they rotate out naturally: + - `stg_playtomic_venues`, `stg_playtomic_resources`, `stg_playtomic_opening_hours` — JSONL top-level columns (no `UNNEST(tenants)`) + - `stg_playtomic_availability` — JSONL morning files + blob morning files + blob recheck files + - `stg_population_geonames` — JSONL city rows (no `UNNEST(rows)`, no `maximum_object_size`) + - `stg_tennis_courts` — JSONL elements with `COALESCE(lat, center.lat)` for way/relation centre coords; blob UNNEST kept for old files - **Marketplace admin dashboard** (`/admin/marketplace`) — single-screen health view for the two-sided market: - **Lead funnel** — total / verified-new (ready to unlock) / unlocked / won / conversion rate - **Credit economy** — total credits issued, consumed (lead unlocks), outstanding balance across all paid suppliers, 30-day burn rate diff --git a/PROJECT.md b/PROJECT.md index 6e94289..9562f7f 100644 --- a/PROJECT.md +++ b/PROJECT.md @@ -93,6 +93,9 @@ - [x] `dim_venues` (OSM + Playtomic deduped), `dim_cities` (Eurostat population) - [x] `city_market_profile` (market score OBT), `planner_defaults` (per-city calculator pre-fill) - [x] DuckDB analytics reader in app lifecycle +- [x] **JSONL streaming landing format** — extractors write `.jsonl.gz` (one record per line); constant-memory compression via `compress_jsonl_atomic()`; eliminates `maximum_object_size` workarounds; all modified staging models use UNION ALL transition to support both formats +- [x] **Regional Overpass tennis splitting** — 10 regional bbox queries replace the single global 150K-element query that timed out; crash recovery via `working.jsonl` accumulation +- [x] **`init_landing_seeds.py`** — creates minimal seed files for both JSONL and blob formats so SQLMesh can run before real data arrives ### i18n - [x] Full i18n across entire app (EN + DE)