feat(logging): replace all print() with Python logging module
Zero print() calls remain in the app and worker (scripts use basicConfig for clean CLI output). Setup_logging() in core.py reads LOG_LEVEL env var and configures the root logger once. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> # Conflicts: # web/src/padelnomics/core.py # web/src/padelnomics/worker.py
This commit is contained in:
@@ -7,7 +7,9 @@ from pathlib import Path
|
||||
from quart import Quart, Response, abort, g, redirect, request, session, url_for
|
||||
|
||||
from .analytics import close_analytics_db, open_analytics_db
|
||||
from .core import close_db, config, get_csrf_token, init_db, is_flag_enabled, setup_request_id
|
||||
from .core import close_db, config, get_csrf_token, init_db, is_flag_enabled, setup_logging, setup_request_id
|
||||
|
||||
setup_logging()
|
||||
from .i18n import LANG_BLUEPRINTS, SUPPORTED_LANGS, get_translations
|
||||
|
||||
_ASSET_VERSION = str(int(time.time()))
|
||||
|
||||
@@ -4,6 +4,7 @@ Core infrastructure: database, config, email, and shared utilities.
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -89,6 +90,21 @@ class Config:
|
||||
config = Config()
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
"""Configure root logger. Call once from each entry point (app, worker, scripts)."""
|
||||
level_name = os.environ.get("LOG_LEVEL", "DEBUG" if config.DEBUG else "INFO")
|
||||
level = getattr(logging, level_name.upper(), logging.INFO)
|
||||
logging.basicConfig(
|
||||
level=level,
|
||||
format="%(asctime)s %(levelname)-8s %(name)s: %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
logging.getLogger("hypercorn").setLevel(logging.WARNING)
|
||||
logging.getLogger("asyncio").setLevel(logging.WARNING)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# =============================================================================
|
||||
# Datetime helpers
|
||||
# =============================================================================
|
||||
@@ -107,7 +123,6 @@ def utcnow_iso() -> str:
|
||||
"""
|
||||
return datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Database
|
||||
# =============================================================================
|
||||
@@ -384,7 +399,7 @@ async def send_email(
|
||||
resend_id = None
|
||||
|
||||
if not config.RESEND_API_KEY:
|
||||
print(f"[EMAIL] Would send to {to}: {subject}")
|
||||
logger.info("Would send to %s: %s", to, subject)
|
||||
resend_id = "dev"
|
||||
else:
|
||||
resend.api_key = config.RESEND_API_KEY
|
||||
@@ -400,7 +415,7 @@ async def send_email(
|
||||
)
|
||||
resend_id = result.get("id") if isinstance(result, dict) else getattr(result, "id", None)
|
||||
except Exception as e:
|
||||
print(f"[EMAIL] Error sending to {to}: {e}")
|
||||
logger.error("Error sending to %s: %s", to, e)
|
||||
return None
|
||||
|
||||
# Log to email_log (best-effort, never fail the send)
|
||||
@@ -411,7 +426,7 @@ async def send_email(
|
||||
(resend_id, sender, to, subject, email_type),
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[EMAIL] Failed to log email: {e}")
|
||||
logger.error("Failed to log email: %s", e)
|
||||
|
||||
return resend_id
|
||||
|
||||
|
||||
@@ -34,12 +34,15 @@ Design decisions
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
@@ -89,7 +92,7 @@ def migrate(db_path=None):
|
||||
|
||||
if pending:
|
||||
for name in pending:
|
||||
print(f" Applying {name}...")
|
||||
logger.info("Applying %s...", name)
|
||||
mod = importlib.import_module(
|
||||
f"padelnomics.migrations.versions.{name}"
|
||||
)
|
||||
@@ -98,9 +101,9 @@ def migrate(db_path=None):
|
||||
"INSERT INTO _migrations (name) VALUES (?)", (name,)
|
||||
)
|
||||
conn.commit()
|
||||
print(f"✓ Applied {len(pending)} migration(s): {db_path}")
|
||||
logger.info("Applied %s migration(s): %s", len(pending), db_path)
|
||||
else:
|
||||
print(f"✓ All migrations already applied: {db_path}")
|
||||
logger.info("All migrations already applied: %s", db_path)
|
||||
|
||||
# Show tables (excluding internal sqlite/fts tables)
|
||||
cursor = conn.execute(
|
||||
@@ -109,10 +112,11 @@ def migrate(db_path=None):
|
||||
" ORDER BY name"
|
||||
)
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
print(f" Tables: {', '.join(tables)}")
|
||||
logger.info("Tables: %s", ", ".join(tables))
|
||||
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
migrate()
|
||||
|
||||
@@ -3,9 +3,12 @@ Planner domain: padel court financial planner + scenario management.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from quart import Blueprint, Response, g, jsonify, render_template, request
|
||||
|
||||
from ..auth.routes import login_required
|
||||
@@ -533,7 +536,7 @@ async def save_scenario():
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[NURTURE] Failed to add {g.user['email']} to audience: {e}")
|
||||
logger.warning("Failed to add %s to nurture audience: %s", g.user["email"], e)
|
||||
|
||||
lang = g.get("lang", "en")
|
||||
t = get_translations(lang)
|
||||
|
||||
@@ -34,12 +34,15 @@ Fields mapped (DuckDB → data_json camelCase key):
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_PATH = os.getenv("DATABASE_PATH", "data/app.db")
|
||||
@@ -67,13 +70,13 @@ def _load_analytics(city_slugs: list[str]) -> dict[str, dict]:
|
||||
"""
|
||||
path = Path(DUCKDB_PATH)
|
||||
if not path.exists():
|
||||
print(f" [analytics] DuckDB not found at {path} — skipping analytics refresh.")
|
||||
logger.warning("DuckDB not found at %s — skipping analytics refresh.", path)
|
||||
return {}
|
||||
|
||||
try:
|
||||
import duckdb
|
||||
except ImportError:
|
||||
print(" [analytics] duckdb not installed — skipping analytics refresh.")
|
||||
logger.warning("duckdb not installed — skipping analytics refresh.")
|
||||
return {}
|
||||
|
||||
result: dict[str, dict] = {}
|
||||
@@ -98,7 +101,7 @@ def _load_analytics(city_slugs: list[str]) -> dict[str, dict]:
|
||||
result[slug] = overrides
|
||||
|
||||
except Exception as exc:
|
||||
print(f" [analytics] DuckDB query failed: {exc}")
|
||||
logger.error("DuckDB query failed: %s", exc)
|
||||
|
||||
return result
|
||||
|
||||
@@ -124,13 +127,13 @@ def refresh(dry_run: bool = False) -> int:
|
||||
city_slug_to_ids.setdefault(slug, []).append(row["id"])
|
||||
|
||||
if not city_slug_to_ids:
|
||||
print("No template_data rows with city_slug found.")
|
||||
logger.info("No template_data rows with city_slug found.")
|
||||
conn.close()
|
||||
return 0
|
||||
|
||||
analytics = _load_analytics(list(city_slug_to_ids.keys()))
|
||||
if not analytics:
|
||||
print("No analytics data found — nothing to update.")
|
||||
logger.info("No analytics data found — nothing to update.")
|
||||
conn.close()
|
||||
return 0
|
||||
|
||||
@@ -154,13 +157,13 @@ def refresh(dry_run: bool = False) -> int:
|
||||
|
||||
data.update(overrides)
|
||||
if dry_run:
|
||||
print(f" [dry-run] id={row_id} city_slug={slug}: {changed}")
|
||||
logger.info("[dry-run] id=%s city_slug=%s: %s", row_id, slug, changed)
|
||||
else:
|
||||
conn.execute(
|
||||
"UPDATE template_data SET data_json = ?, updated_at = datetime('now') WHERE id = ?",
|
||||
(json.dumps(data), row_id),
|
||||
)
|
||||
print(f" Updated id={row_id} city_slug={slug}: {list(changed.keys())}")
|
||||
logger.info("Updated id=%s city_slug=%s: %s", row_id, slug, list(changed.keys()))
|
||||
updated += 1
|
||||
|
||||
if not dry_run:
|
||||
@@ -184,7 +187,7 @@ def _trigger_generation() -> None:
|
||||
headers={"X-Admin-Key": admin_key},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=120) as resp:
|
||||
print(f" Generation triggered: HTTP {resp.status}")
|
||||
logger.info("Generation triggered: HTTP %s", resp.status)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
@@ -195,14 +198,17 @@ def main() -> None:
|
||||
help="Trigger article re-generation after updating")
|
||||
args = parser.parse_args()
|
||||
|
||||
print(f"{'[DRY RUN] ' if args.dry_run else ''}Refreshing template_data from DuckDB…")
|
||||
prefix = "[DRY RUN] " if args.dry_run else ""
|
||||
logger.info("%sRefreshing template_data from DuckDB...", prefix)
|
||||
count = refresh(dry_run=args.dry_run)
|
||||
print(f"{'Would update' if args.dry_run else 'Updated'} {count} rows.")
|
||||
action = "Would update" if args.dry_run else "Updated"
|
||||
logger.info("%s %s rows.", action, count)
|
||||
|
||||
if args.generate and count > 0 and not args.dry_run:
|
||||
print("Triggering article generation…")
|
||||
logger.info("Triggering article generation...")
|
||||
_trigger_generation()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
main()
|
||||
|
||||
@@ -15,6 +15,7 @@ Usage:
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
@@ -23,6 +24,8 @@ from pathlib import Path
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_PATH = os.getenv("DATABASE_PATH", "data/app.db")
|
||||
@@ -1363,7 +1366,7 @@ def seed_templates(conn: sqlite3.Connection) -> dict[str, int]:
|
||||
).fetchone()
|
||||
|
||||
if existing:
|
||||
print(f" Template '{tmpl['slug']}' already exists (id={existing[0]}), skipping.")
|
||||
logger.info(" Template '%s' already exists (id=%s), skipping.", tmpl["slug"], existing[0])
|
||||
template_ids[tmpl["slug"]] = existing[0]
|
||||
else:
|
||||
cur = conn.execute(
|
||||
@@ -1383,7 +1386,7 @@ def seed_templates(conn: sqlite3.Connection) -> dict[str, int]:
|
||||
),
|
||||
)
|
||||
template_ids[tmpl["slug"]] = cur.lastrowid
|
||||
print(f" Created template '{tmpl['slug']}' (id={cur.lastrowid})")
|
||||
logger.info(" Created template '%s' (id=%s)", tmpl["slug"], cur.lastrowid)
|
||||
|
||||
return template_ids
|
||||
|
||||
@@ -1411,7 +1414,7 @@ def seed_data_rows(conn: sqlite3.Connection, template_ids: dict[str, int]) -> in
|
||||
).fetchone()
|
||||
|
||||
if existing:
|
||||
print(f" Data row '{city_slug}' ({lang}) already exists, skipping.")
|
||||
logger.info(" Data row '%s' (%s) already exists, skipping.", city_slug, lang)
|
||||
else:
|
||||
conn.execute(
|
||||
"""INSERT INTO template_data (template_id, data_json, created_at)
|
||||
@@ -1419,7 +1422,7 @@ def seed_data_rows(conn: sqlite3.Connection, template_ids: dict[str, int]) -> in
|
||||
(tmpl_id, data_json, now),
|
||||
)
|
||||
inserted += 1
|
||||
print(f" Inserted data row '{city_slug}' ({lang})")
|
||||
logger.info(" Inserted data row '%s' (%s)", city_slug, lang)
|
||||
|
||||
return inserted
|
||||
|
||||
@@ -1432,7 +1435,7 @@ async def generate_articles(template_ids: dict[str, int]) -> None:
|
||||
from padelnomics.admin.routes import _generate_from_template # noqa: PLC0415
|
||||
from padelnomics.core import close_db, fetch_one, init_db
|
||||
|
||||
print("\nInitialising database connection...")
|
||||
logger.info("Initialising database connection...")
|
||||
await init_db(DATABASE_PATH)
|
||||
|
||||
start_date = date.today() - timedelta(days=30) # backdate so all are immediately live
|
||||
@@ -1441,9 +1444,9 @@ async def generate_articles(template_ids: dict[str, int]) -> None:
|
||||
template = await fetch_one("SELECT * FROM article_templates WHERE id = ?", (tmpl_id,))
|
||||
assert template is not None, f"Template '{slug}' not found in DB"
|
||||
|
||||
print(f"\nGenerating articles for template '{slug}'...")
|
||||
logger.info("Generating articles for template '%s'...", slug)
|
||||
count = await _generate_from_template(template, start_date, articles_per_day=3)
|
||||
print(f" Generated {count} articles.")
|
||||
logger.info(" Generated %s articles.", count)
|
||||
|
||||
await close_db()
|
||||
|
||||
@@ -1463,28 +1466,29 @@ def main() -> None:
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
print("Seeding article templates...")
|
||||
logger.info("Seeding article templates...")
|
||||
template_ids = seed_templates(conn)
|
||||
|
||||
print("\nSeeding city data rows...")
|
||||
logger.info("Seeding city data rows...")
|
||||
inserted = seed_data_rows(conn, template_ids)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print(f"\nDone. {inserted} data rows inserted.")
|
||||
print("Templates and data rows are visible in admin → Templates.")
|
||||
logger.info("Done. %s data rows inserted.", inserted)
|
||||
logger.info("Templates and data rows are visible in admin -> Templates.")
|
||||
|
||||
if "--generate" in sys.argv:
|
||||
print("\nRunning article generation pipeline...")
|
||||
logger.info("Running article generation pipeline...")
|
||||
asyncio.run(generate_articles(template_ids))
|
||||
print("\nGeneration complete. Check admin → Articles.")
|
||||
logger.info("Generation complete. Check admin -> Articles.")
|
||||
else:
|
||||
print(
|
||||
"\nTo generate articles, either:\n"
|
||||
logger.info(
|
||||
"To generate articles, either:\n"
|
||||
" 1. Run: uv run python -m padelnomics.scripts.seed_content --generate\n"
|
||||
" 2. Or visit admin → Templates → (template) → Generate"
|
||||
" 2. Or visit admin -> Templates -> (template) -> Generate"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
main()
|
||||
|
||||
@@ -7,6 +7,7 @@ Usage:
|
||||
uv run python -m padelnomics.scripts.seed_dev_data
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
@@ -15,6 +16,8 @@ from pathlib import Path
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_PATH = os.getenv("DATABASE_PATH", "data/app.db")
|
||||
@@ -284,7 +287,7 @@ LEADS = [
|
||||
def main():
|
||||
db_path = DATABASE_PATH
|
||||
if not Path(db_path).exists():
|
||||
print(f"ERROR: Database not found at {db_path}. Run migrations first.")
|
||||
logger.error("Database not found at %s. Run migrations first.", db_path)
|
||||
sys.exit(1)
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
@@ -295,34 +298,34 @@ def main():
|
||||
now = datetime.now(UTC)
|
||||
|
||||
# 1. Create dev user
|
||||
print("Creating dev user (dev@localhost)...")
|
||||
logger.info("Creating dev user (dev@localhost)...")
|
||||
existing = conn.execute("SELECT id FROM users WHERE email = 'dev@localhost'").fetchone()
|
||||
if existing:
|
||||
dev_user_id = existing["id"]
|
||||
print(f" Already exists (id={dev_user_id})")
|
||||
logger.info(" Already exists (id=%s)", dev_user_id)
|
||||
else:
|
||||
cursor = conn.execute(
|
||||
"INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)",
|
||||
("dev@localhost", "Dev User", now.strftime("%Y-%m-%d %H:%M:%S")),
|
||||
)
|
||||
dev_user_id = cursor.lastrowid
|
||||
print(f" Created (id={dev_user_id})")
|
||||
logger.info(" Created (id=%s)", dev_user_id)
|
||||
|
||||
# Grant admin role to dev user
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO user_roles (user_id, role) VALUES (?, 'admin')",
|
||||
(dev_user_id,),
|
||||
)
|
||||
print(" Admin role granted")
|
||||
logger.info(" Admin role granted")
|
||||
|
||||
# 2. Seed suppliers
|
||||
print(f"\nSeeding {len(SUPPLIERS)} suppliers...")
|
||||
logger.info("Seeding %s suppliers...", len(SUPPLIERS))
|
||||
supplier_ids = {}
|
||||
for s in SUPPLIERS:
|
||||
existing = conn.execute("SELECT id FROM suppliers WHERE slug = ?", (s["slug"],)).fetchone()
|
||||
if existing:
|
||||
supplier_ids[s["slug"]] = existing["id"]
|
||||
print(f" {s['name']} already exists (id={existing['id']})")
|
||||
logger.info(" %s already exists (id=%s)", s["name"], existing["id"])
|
||||
continue
|
||||
|
||||
cursor = conn.execute(
|
||||
@@ -340,10 +343,10 @@ def main():
|
||||
),
|
||||
)
|
||||
supplier_ids[s["slug"]] = cursor.lastrowid
|
||||
print(f" {s['name']} -> id={cursor.lastrowid}")
|
||||
logger.info(" %s -> id=%s", s["name"], cursor.lastrowid)
|
||||
|
||||
# 3. Claim paid suppliers — each gets its own owner user + subscription
|
||||
print("\nClaiming paid suppliers with owner accounts...")
|
||||
logger.info("Claiming paid suppliers with owner accounts...")
|
||||
claimed_suppliers = [
|
||||
("padeltech-gmbh", "supplier_pro", "hans@padeltech.example.com", "Hans Weber"),
|
||||
("courtbuild-spain", "supplier_growth", "maria@courtbuild.example.com", "Maria Garcia"),
|
||||
@@ -398,10 +401,10 @@ def main():
|
||||
(owner_id, plan, f"sub_dev_{slug}",
|
||||
period_end, now.strftime("%Y-%m-%d %H:%M:%S")),
|
||||
)
|
||||
print(f" {slug} -> owner {email} ({plan})")
|
||||
logger.info(" %s -> owner %s (%s)", slug, email, plan)
|
||||
|
||||
# 4. Seed leads
|
||||
print(f"\nSeeding {len(LEADS)} leads...")
|
||||
logger.info("Seeding %s leads...", len(LEADS))
|
||||
lead_ids = []
|
||||
for i, lead in enumerate(LEADS):
|
||||
from padelnomics.credits import HEAT_CREDIT_COSTS
|
||||
@@ -426,10 +429,10 @@ def main():
|
||||
),
|
||||
)
|
||||
lead_ids.append(cursor.lastrowid)
|
||||
print(f" Lead #{cursor.lastrowid}: {lead['contact_name']} ({lead['heat_score']}, {lead['country']})")
|
||||
logger.info(" Lead #%s: %s (%s, %s)", cursor.lastrowid, lead["contact_name"], lead["heat_score"], lead["country"])
|
||||
|
||||
# 5. Add credit ledger entries for claimed suppliers
|
||||
print("\nAdding credit ledger entries...")
|
||||
logger.info("Adding credit ledger entries...")
|
||||
for slug in ("padeltech-gmbh", "courtbuild-spain", "desert-padel-fze"):
|
||||
sid = supplier_ids.get(slug)
|
||||
if not sid:
|
||||
@@ -448,10 +451,10 @@ def main():
|
||||
VALUES (?, ?, ?, 'admin_adjustment', 'Welcome bonus', ?)""",
|
||||
(sid, 10, monthly + 10, (now - timedelta(days=25)).isoformat()),
|
||||
)
|
||||
print(f" {slug}: 2 ledger entries")
|
||||
logger.info(" %s: 2 ledger entries", slug)
|
||||
|
||||
# 6. Add lead forwards for testing
|
||||
print("\nAdding lead forwards...")
|
||||
logger.info("Adding lead forwards...")
|
||||
padeltech_id = supplier_ids.get("padeltech-gmbh")
|
||||
if padeltech_id and len(lead_ids) >= 2:
|
||||
for lead_id in lead_ids[:2]:
|
||||
@@ -476,15 +479,16 @@ def main():
|
||||
(padeltech_id, 80, lead_id, f"Unlocked lead #{lead_id}",
|
||||
(now - timedelta(hours=6)).isoformat()),
|
||||
)
|
||||
print(f" PadelTech unlocked lead #{lead_id}")
|
||||
logger.info(" PadelTech unlocked lead #%s", lead_id)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print(f"\nDone! Seed data written to {db_path}")
|
||||
print(" Login: /auth/dev-login?email=dev@localhost")
|
||||
print(" Admin: set ADMIN_EMAILS=dev@localhost in .env, then dev-login grants admin role")
|
||||
logger.info("Done! Seed data written to %s", db_path)
|
||||
logger.info(" Login: /auth/dev-login?email=dev@localhost")
|
||||
logger.info(" Admin: set ADMIN_EMAILS=dev@localhost in .env, then dev-login grants admin role")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
main()
|
||||
|
||||
@@ -6,6 +6,7 @@ Commands:
|
||||
uv run python -m padelnomics.scripts.setup_paddle --sync # re-populate DB from existing Paddle products
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
@@ -13,6 +14,8 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from paddle_billing import Client as PaddleClient
|
||||
from paddle_billing import Environment, Options
|
||||
from paddle_billing.Entities.Events.EventTypeName import EventTypeName
|
||||
@@ -33,7 +36,8 @@ DATABASE_PATH = os.getenv("DATABASE_PATH", "data/app.db")
|
||||
BASE_URL = os.getenv("BASE_URL", "http://localhost:5000")
|
||||
|
||||
if not PADDLE_API_KEY:
|
||||
print("ERROR: Set PADDLE_API_KEY in .env first")
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
logger.error("Set PADDLE_API_KEY in .env first")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -202,7 +206,7 @@ _PRODUCT_BY_NAME = {p["name"]: p for p in PRODUCTS}
|
||||
def _open_db():
|
||||
db_path = DATABASE_PATH
|
||||
if not Path(db_path).exists():
|
||||
print(f"ERROR: Database not found at {db_path}. Run migrations first.")
|
||||
logger.error("Database not found at %s. Run migrations first.", db_path)
|
||||
sys.exit(1)
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
@@ -221,7 +225,7 @@ def _write_product(conn, key, product_id, price_id, name, price_cents, billing_t
|
||||
|
||||
def sync(paddle, conn):
|
||||
"""Fetch existing products from Paddle and re-populate paddle_products table."""
|
||||
print(f"Syncing products from Paddle ({PADDLE_ENVIRONMENT})...\n")
|
||||
logger.info("Syncing products from Paddle (%s)...", PADDLE_ENVIRONMENT)
|
||||
|
||||
products = paddle.products.list(ListProducts(includes=[Includes.Prices]))
|
||||
|
||||
@@ -231,7 +235,7 @@ def sync(paddle, conn):
|
||||
if not spec:
|
||||
continue
|
||||
if not product.prices or len(product.prices) == 0:
|
||||
print(f" SKIP {spec['key']}: no prices on {product.id}")
|
||||
logger.warning(" SKIP %s: no prices on %s", spec["key"], product.id)
|
||||
continue
|
||||
|
||||
# Use the first active price
|
||||
@@ -241,26 +245,26 @@ def sync(paddle, conn):
|
||||
spec["name"], spec["price"], spec["billing_type"],
|
||||
)
|
||||
matched += 1
|
||||
print(f" {spec['key']}: {product.id} / {price.id}")
|
||||
logger.info(" %s: %s / %s", spec["key"], product.id, price.id)
|
||||
|
||||
conn.commit()
|
||||
|
||||
if matched == 0:
|
||||
print("\nNo matching products found in Paddle. Run without --sync first.")
|
||||
logger.warning("No matching products found in Paddle. Run without --sync first.")
|
||||
else:
|
||||
print(f"\n✓ {matched}/{len(PRODUCTS)} products synced to DB")
|
||||
logger.info("%s/%s products synced to DB", matched, len(PRODUCTS))
|
||||
|
||||
|
||||
def create(paddle, conn):
|
||||
"""Create new products and prices in Paddle, write to DB, set up webhook."""
|
||||
print(f"Creating products in {PADDLE_ENVIRONMENT}...\n")
|
||||
logger.info("Creating products in %s...", PADDLE_ENVIRONMENT)
|
||||
|
||||
for spec in PRODUCTS:
|
||||
product = paddle.products.create(CreateProduct(
|
||||
name=spec["name"],
|
||||
tax_category=TaxCategory.Standard,
|
||||
))
|
||||
print(f" Product: {spec['name']} -> {product.id}")
|
||||
logger.info(" Product: %s -> %s", spec["name"], product.id)
|
||||
|
||||
price_kwargs = {
|
||||
"description": spec["name"],
|
||||
@@ -276,7 +280,7 @@ def create(paddle, conn):
|
||||
price_kwargs["billing_cycle"] = Duration(interval=Interval.Month, frequency=1)
|
||||
|
||||
price = paddle.prices.create(CreatePrice(**price_kwargs))
|
||||
print(f" Price: {spec['key']} = {price.id}")
|
||||
logger.info(" Price: %s = %s", spec["key"], price.id)
|
||||
|
||||
_write_product(
|
||||
conn, spec["key"], product.id, price.id,
|
||||
@@ -284,7 +288,7 @@ def create(paddle, conn):
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
print("\n✓ All products written to DB")
|
||||
logger.info("All products written to DB")
|
||||
|
||||
# -- Notification destination (webhook) -----------------------------------
|
||||
|
||||
@@ -298,8 +302,8 @@ def create(paddle, conn):
|
||||
EventTypeName.TransactionCompleted,
|
||||
]
|
||||
|
||||
print("\nCreating webhook notification destination...")
|
||||
print(f" URL: {webhook_url}")
|
||||
logger.info("Creating webhook notification destination...")
|
||||
logger.info(" URL: %s", webhook_url)
|
||||
|
||||
notification_setting = paddle.notification_settings.create(
|
||||
CreateNotificationSetting(
|
||||
@@ -313,8 +317,8 @@ def create(paddle, conn):
|
||||
)
|
||||
|
||||
webhook_secret = notification_setting.endpoint_secret_key
|
||||
print(f" ID: {notification_setting.id}")
|
||||
print(f" Secret: {webhook_secret}")
|
||||
logger.info(" ID: %s", notification_setting.id)
|
||||
logger.info(" Secret: %s", webhook_secret)
|
||||
|
||||
env_path = Path(".env")
|
||||
env_vars = {
|
||||
@@ -331,13 +335,13 @@ def create(paddle, conn):
|
||||
else:
|
||||
env_text = env_text.rstrip("\n") + f"\n{replacement}\n"
|
||||
env_path.write_text(env_text)
|
||||
print("\n✓ PADDLE_WEBHOOK_SECRET and PADDLE_NOTIFICATION_SETTING_ID written to .env")
|
||||
logger.info("PADDLE_WEBHOOK_SECRET and PADDLE_NOTIFICATION_SETTING_ID written to .env")
|
||||
else:
|
||||
print("\n Add to .env:")
|
||||
logger.info("Add to .env:")
|
||||
for key, value in env_vars.items():
|
||||
print(f" {key}={value}")
|
||||
logger.info(" %s=%s", key, value)
|
||||
|
||||
print("\nDone. dev_run.sh will start ngrok and update the webhook URL automatically.")
|
||||
logger.info("Done. dev_run.sh will start ngrok and update the webhook URL automatically.")
|
||||
|
||||
|
||||
def main():
|
||||
@@ -355,4 +359,5 @@ def main():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
|
||||
main()
|
||||
|
||||
@@ -7,6 +7,8 @@ import json
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import logging
|
||||
|
||||
from .core import (
|
||||
EMAIL_ADDRESSES,
|
||||
config,
|
||||
@@ -15,11 +17,15 @@ from .core import (
|
||||
fetch_one,
|
||||
init_db,
|
||||
send_email,
|
||||
setup_logging,
|
||||
utcnow,
|
||||
utcnow_iso,
|
||||
)
|
||||
from .i18n import get_translations
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
scheduler_logger = logging.getLogger(f"{__name__}.scheduler")
|
||||
|
||||
# Task handlers registry
|
||||
HANDLERS: dict[str, callable] = {}
|
||||
|
||||
@@ -218,10 +224,7 @@ async def handle_send_magic_link(payload: dict) -> None:
|
||||
link = f"{config.BASE_URL}/auth/verify?token={payload['token']}"
|
||||
|
||||
if config.DEBUG:
|
||||
print(f"\n{'=' * 60}")
|
||||
print(f" MAGIC LINK for {payload['email']}")
|
||||
print(f" {link}")
|
||||
print(f"{'=' * 60}\n")
|
||||
logger.debug("MAGIC LINK for %s: %s", payload["email"], link)
|
||||
|
||||
expiry_minutes = config.MAGIC_LINK_EXPIRY_MINUTES
|
||||
body = (
|
||||
@@ -253,10 +256,7 @@ async def handle_send_quote_verification(payload: dict) -> None:
|
||||
)
|
||||
|
||||
if config.DEBUG:
|
||||
print(f"\n{'=' * 60}")
|
||||
print(f" QUOTE VERIFICATION for {payload['email']}")
|
||||
print(f" {link}")
|
||||
print(f"{'=' * 60}\n")
|
||||
logger.debug("QUOTE VERIFICATION for %s: %s", payload["email"], link)
|
||||
|
||||
first_name = (
|
||||
payload.get("contact_name", "").split()[0] if payload.get("contact_name") else "there"
|
||||
@@ -495,7 +495,7 @@ async def handle_send_lead_forward_email(payload: dict) -> None:
|
||||
# Send to supplier contact email or general contact
|
||||
to_email = supplier.get("contact_email") or supplier.get("contact") or ""
|
||||
if not to_email:
|
||||
print(f"[WORKER] No email for supplier {supplier_id}, skipping lead forward")
|
||||
logger.warning("No email for supplier %s, skipping lead forward", supplier_id)
|
||||
return
|
||||
|
||||
await send_email(
|
||||
@@ -598,9 +598,9 @@ async def handle_refill_monthly_credits(payload: dict) -> None:
|
||||
for s in suppliers:
|
||||
try:
|
||||
await monthly_credit_refill(s["id"])
|
||||
print(f"[WORKER] Refilled credits for supplier {s['id']}")
|
||||
logger.info("Refilled credits for supplier %s", s["id"])
|
||||
except Exception as e:
|
||||
print(f"[WORKER] Failed to refill credits for supplier {s['id']}: {e}")
|
||||
logger.error("Failed to refill credits for supplier %s: %s", s["id"], e)
|
||||
|
||||
|
||||
@task("generate_business_plan")
|
||||
@@ -661,7 +661,7 @@ async def handle_generate_business_plan(payload: dict) -> None:
|
||||
email_type="business_plan",
|
||||
)
|
||||
|
||||
print(f"[WORKER] Generated business plan PDF: export_id={export_id}")
|
||||
logger.info("Generated business plan PDF: export_id=%s", export_id)
|
||||
|
||||
except Exception:
|
||||
await execute(
|
||||
@@ -690,7 +690,7 @@ async def handle_sync_gsc(payload: dict) -> None:
|
||||
from .seo import sync_gsc
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_gsc(days_back=days_back)
|
||||
print(f"[WORKER] GSC sync complete: {rows} rows")
|
||||
logger.info("GSC sync complete: %s rows", rows)
|
||||
|
||||
|
||||
@task("sync_bing")
|
||||
@@ -699,7 +699,7 @@ async def handle_sync_bing(payload: dict) -> None:
|
||||
from .seo import sync_bing
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_bing(days_back=days_back)
|
||||
print(f"[WORKER] Bing sync complete: {rows} rows")
|
||||
logger.info("Bing sync complete: %s rows", rows)
|
||||
|
||||
|
||||
@task("sync_umami")
|
||||
@@ -708,7 +708,7 @@ async def handle_sync_umami(payload: dict) -> None:
|
||||
from .seo import sync_umami
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_umami(days_back=days_back)
|
||||
print(f"[WORKER] Umami sync complete: {rows} rows")
|
||||
logger.info("Umami sync complete: %s rows", rows)
|
||||
|
||||
|
||||
@task("cleanup_seo_metrics")
|
||||
@@ -716,7 +716,7 @@ async def handle_cleanup_seo_metrics(payload: dict) -> None:
|
||||
"""Delete SEO metrics older than 12 months."""
|
||||
from .seo import cleanup_old_metrics
|
||||
deleted = await cleanup_old_metrics(retention_days=365)
|
||||
print(f"[WORKER] Cleaned up {deleted} old SEO metric rows")
|
||||
logger.info("Cleaned up %s old SEO metric rows", deleted)
|
||||
|
||||
|
||||
@task("generate_articles")
|
||||
@@ -732,7 +732,7 @@ async def handle_generate_articles(payload: dict) -> None:
|
||||
limit = payload.get("limit", 500)
|
||||
|
||||
count = await generate_articles(slug, start_date, articles_per_day, limit=limit)
|
||||
print(f"[WORKER] Generated {count} articles for template '{slug}'")
|
||||
logger.info("Generated %s articles for template '%s'", count, slug)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
@@ -755,20 +755,21 @@ async def process_task(task: dict) -> None:
|
||||
payload = json.loads(task["payload"]) if task["payload"] else {}
|
||||
await handler(payload)
|
||||
await mark_complete(task_id)
|
||||
print(f"[WORKER] Completed: {task_name} (id={task_id})")
|
||||
logger.info("Completed: %s (id=%s)", task_name, task_id)
|
||||
except Exception as e:
|
||||
error = f"{e}\n{traceback.format_exc()}"
|
||||
await mark_failed(task_id, error, retries)
|
||||
print(f"[WORKER] Failed: {task_name} (id={task_id}): {e}")
|
||||
logger.error("Failed: %s (id=%s): %s", task_name, task_id, e)
|
||||
|
||||
|
||||
async def run_worker(poll_interval: float = 1.0) -> None:
|
||||
"""Main worker loop."""
|
||||
print("[WORKER] Starting...")
|
||||
setup_logging()
|
||||
logger.info("Starting...")
|
||||
await init_db()
|
||||
from .analytics import open_analytics_db
|
||||
open_analytics_db()
|
||||
print("[WORKER] Analytics DB opened.")
|
||||
logger.info("Analytics DB opened.")
|
||||
|
||||
while True:
|
||||
try:
|
||||
@@ -781,13 +782,14 @@ async def run_worker(poll_interval: float = 1.0) -> None:
|
||||
await asyncio.sleep(poll_interval)
|
||||
|
||||
except Exception as e:
|
||||
print(f"[WORKER] Error: {e}")
|
||||
logger.error("Error: %s", e)
|
||||
await asyncio.sleep(poll_interval * 5)
|
||||
|
||||
|
||||
async def run_scheduler() -> None:
|
||||
"""Schedule periodic cleanup tasks."""
|
||||
print("[SCHEDULER] Starting...")
|
||||
setup_logging()
|
||||
scheduler_logger.info("Starting...")
|
||||
await init_db()
|
||||
|
||||
last_credit_refill = None
|
||||
@@ -806,7 +808,7 @@ async def run_scheduler() -> None:
|
||||
if today.day == 1 and last_credit_refill != this_month:
|
||||
await enqueue("refill_monthly_credits")
|
||||
last_credit_refill = this_month
|
||||
print(f"[SCHEDULER] Queued monthly credit refill for {this_month}")
|
||||
scheduler_logger.info("Queued monthly credit refill for %s", this_month)
|
||||
|
||||
# Daily SEO metrics sync — run once per day after 6am UTC
|
||||
# (GSC data has ~2 day delay, syncing at 6am ensures data is ready)
|
||||
@@ -817,12 +819,12 @@ async def run_scheduler() -> None:
|
||||
await enqueue("sync_umami")
|
||||
await enqueue("cleanup_seo_metrics")
|
||||
last_seo_sync_date = today_date
|
||||
print(f"[SCHEDULER] Queued SEO metric syncs for {today_date}")
|
||||
scheduler_logger.info("Queued SEO metric syncs for %s", today_date)
|
||||
|
||||
await asyncio.sleep(3600) # 1 hour
|
||||
|
||||
except Exception as e:
|
||||
print(f"[SCHEDULER] Error: {e}")
|
||||
scheduler_logger.error("Error: %s", e)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user