fix: use SQLite-compatible space format in utcnow_iso(), fix credits ordering

utcnow_iso() now produces 'YYYY-MM-DD HH:MM:SS' (space separator) matching
SQLite's datetime('now') so lexicographic comparisons like
'published_at <= datetime(now)' work correctly.

Also add `id DESC` tiebreaker to get_ledger() ORDER BY to preserve
insertion order when multiple credits are added within the same second.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Deeman
2026-02-24 10:30:18 +01:00
parent a05c230ce3
commit e33b28025e
20 changed files with 48 additions and 60 deletions

View File

@@ -2,7 +2,7 @@
Admin domain: role-based admin panel for managing users, tasks, etc.
"""
import json
from datetime import date, datetime, timedelta
from datetime import date, timedelta
from pathlib import Path
import mistune
@@ -68,7 +68,7 @@ async def get_dashboard_stats() -> dict:
"""Get admin dashboard statistics."""
now = utcnow()
today = now.date().isoformat()
week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S")
week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%d %H:%M:%S")
users_total = await fetch_one("SELECT COUNT(*) as count FROM users WHERE deleted_at IS NULL")
users_today = await fetch_one(
"SELECT COUNT(*) as count FROM users WHERE created_at >= ? AND deleted_at IS NULL",

View File

@@ -3,7 +3,7 @@ Auth domain: magic link authentication, user management, decorators.
"""
import secrets
from datetime import datetime, timedelta
from datetime import timedelta
from functools import wraps
from pathlib import Path
@@ -87,7 +87,7 @@ async def create_auth_token(user_id: int, token: str, minutes: int = None) -> in
expires = utcnow() + timedelta(minutes=minutes)
return await execute(
"INSERT INTO auth_tokens (user_id, token, expires_at) VALUES (?, ?, ?)",
(user_id, token, expires.strftime("%Y-%m-%dT%H:%M:%S")),
(user_id, token, expires.strftime("%Y-%m-%d %H:%M:%S")),
)

View File

@@ -5,7 +5,7 @@ Payment provider: paddle
import json
import secrets
from datetime import datetime, timedelta
from datetime import timedelta
from pathlib import Path
from paddle_billing import Client as PaddleClient
@@ -413,7 +413,7 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None:
# Sticky boost purchases
elif key == "boost_sticky_week" and supplier_id:
from ..core import transaction as db_transaction
expires = (utcnow() + timedelta(weeks=1)).strftime("%Y-%m-%dT%H:%M:%S")
expires = (utcnow() + timedelta(weeks=1)).strftime("%Y-%m-%d %H:%M:%S")
country = custom_data.get("sticky_country", "")
async with db_transaction() as db:
await db.execute(
@@ -429,7 +429,7 @@ async def _handle_transaction_completed(data: dict, custom_data: dict) -> None:
elif key == "boost_sticky_month" and supplier_id:
from ..core import transaction as db_transaction
expires = (utcnow() + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S")
expires = (utcnow() + timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
country = custom_data.get("sticky_country", "")
async with db_transaction() as db:
await db.execute(

View File

@@ -135,7 +135,7 @@ def _validate_table_name(data_table: str) -> None:
def _datetimeformat(value: str, fmt: str = "%Y-%m-%d") -> str:
"""Jinja2 filter: format a date string (or 'now') with strftime."""
from datetime import UTC, datetime
from datetime import datetime
if value == "now":
dt = datetime.now(UTC)

View File

@@ -102,10 +102,10 @@ def utcnow() -> datetime:
def utcnow_iso() -> str:
"""UTC now as naive ISO string for SQLite TEXT columns.
Produces YYYY-MM-DDTHH:MM:SS (no +00:00 suffix) to match the existing
format stored in the DB so lexicographic SQL comparisons keep working.
Produces YYYY-MM-DD HH:MM:SS (space separator, no +00:00 suffix) to match
SQLite's native datetime('now') format so lexicographic SQL comparisons work.
"""
return datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
return datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
# =============================================================================
@@ -554,12 +554,12 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t
# Clean old entries and count recent
await execute(
"DELETE FROM rate_limits WHERE key = ? AND timestamp < ?",
(key, window_start.strftime("%Y-%m-%dT%H:%M:%S")),
(key, window_start.strftime("%Y-%m-%d %H:%M:%S")),
)
result = await fetch_one(
"SELECT COUNT(*) as count FROM rate_limits WHERE key = ? AND timestamp > ?",
(key, window_start.strftime("%Y-%m-%dT%H:%M:%S")),
(key, window_start.strftime("%Y-%m-%d %H:%M:%S")),
)
count = result["count"] if result else 0
@@ -575,7 +575,7 @@ async def check_rate_limit(key: str, limit: int = None, window: int = None) -> t
# Record this request
await execute(
"INSERT INTO rate_limits (key, timestamp) VALUES (?, ?)",
(key, now.strftime("%Y-%m-%dT%H:%M:%S")),
(key, now.strftime("%Y-%m-%d %H:%M:%S")),
)
return True, info
@@ -671,7 +671,7 @@ async def hard_delete(table: str, id: int) -> bool:
async def purge_deleted(table: str, days: int = 30) -> int:
"""Purge records deleted more than X days ago."""
cutoff = (utcnow() - timedelta(days=days)).strftime("%Y-%m-%dT%H:%M:%S")
cutoff = (utcnow() - timedelta(days=days)).strftime("%Y-%m-%d %H:%M:%S")
return await execute(
f"DELETE FROM {table} WHERE deleted_at IS NOT NULL AND deleted_at < ?", (cutoff,)
)

View File

@@ -199,6 +199,6 @@ async def get_ledger(supplier_id: int, limit: int = 50) -> list[dict]:
FROM credit_ledger cl
LEFT JOIN lead_forwards lf ON cl.reference_id = lf.id AND cl.event_type = 'lead_unlock'
WHERE cl.supplier_id = ?
ORDER BY cl.created_at DESC LIMIT ?""",
ORDER BY cl.created_at DESC, cl.id DESC LIMIT ?""",
(supplier_id, limit),
)

View File

@@ -4,7 +4,6 @@ Leads domain: capture interest in court suppliers and financing.
import json
import secrets
from datetime import datetime
from pathlib import Path
from quart import Blueprint, flash, g, jsonify, redirect, render_template, request, session, url_for

View File

@@ -1390,7 +1390,7 @@ def seed_templates(conn: sqlite3.Connection) -> dict[str, int]:
def seed_data_rows(conn: sqlite3.Connection, template_ids: dict[str, int]) -> int:
"""Insert template_data rows for all cities × languages. Returns count inserted."""
now = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
now = datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
inserted = 0
en_id = template_ids.get("city-padel-cost-en")

View File

@@ -303,7 +303,7 @@ def main():
else:
cursor = conn.execute(
"INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)",
("dev@localhost", "Dev User", now.strftime("%Y-%m-%dT%H:%M:%S")),
("dev@localhost", "Dev User", now.strftime("%Y-%m-%d %H:%M:%S")),
)
dev_user_id = cursor.lastrowid
print(f" Created (id={dev_user_id})")
@@ -336,7 +336,7 @@ def main():
s["website"], s["description"], s["category"], s["tier"],
s["credit_balance"], s["monthly_credits"], s["contact_name"],
s["contact_email"], s["years_in_business"], s["project_count"],
s["service_area"], now.strftime("%Y-%m-%dT%H:%M:%S"),
s["service_area"], now.strftime("%Y-%m-%d %H:%M:%S"),
),
)
supplier_ids[s["slug"]] = cursor.lastrowid
@@ -349,7 +349,7 @@ def main():
("courtbuild-spain", "supplier_growth", "maria@courtbuild.example.com", "Maria Garcia"),
("desert-padel-fze", "supplier_pro", "ahmed@desertpadel.example.com", "Ahmed Al-Rashid"),
]
period_end = (now + timedelta(days=30)).strftime("%Y-%m-%dT%H:%M:%S")
period_end = (now + timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
for slug, plan, email, name in claimed_suppliers:
sid = supplier_ids.get(slug)
if not sid:
@@ -364,14 +364,14 @@ def main():
else:
cursor = conn.execute(
"INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)",
(email, name, now.strftime("%Y-%m-%dT%H:%M:%S")),
(email, name, now.strftime("%Y-%m-%d %H:%M:%S")),
)
owner_id = cursor.lastrowid
# Claim the supplier
conn.execute(
"UPDATE suppliers SET claimed_by = ?, claimed_at = ? WHERE id = ? AND claimed_by IS NULL",
(owner_id, now.strftime("%Y-%m-%dT%H:%M:%S"), sid),
(owner_id, now.strftime("%Y-%m-%d %H:%M:%S"), sid),
)
# Create billing customer record
@@ -382,7 +382,7 @@ def main():
conn.execute(
"""INSERT INTO billing_customers (user_id, provider_customer_id, created_at)
VALUES (?, ?, ?)""",
(owner_id, f"ctm_dev_{slug}", now.strftime("%Y-%m-%dT%H:%M:%S")),
(owner_id, f"ctm_dev_{slug}", now.strftime("%Y-%m-%d %H:%M:%S")),
)
# Create active subscription
@@ -396,7 +396,7 @@ def main():
current_period_end, created_at)
VALUES (?, ?, 'active', ?, ?, ?)""",
(owner_id, plan, f"sub_dev_{slug}",
period_end, now.strftime("%Y-%m-%dT%H:%M:%S")),
period_end, now.strftime("%Y-%m-%d %H:%M:%S")),
)
print(f" {slug} -> owner {email} ({plan})")

View File

@@ -127,7 +127,7 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS)
"""INSERT INTO seo_sync_log
(source, status, rows_synced, started_at, completed_at, duration_ms)
VALUES ('bing', 'success', ?, ?, ?, ?)""",
(rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
return rows_synced
@@ -137,6 +137,6 @@ async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS)
"""INSERT INTO seo_sync_log
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
VALUES ('bing', 'failed', 0, ?, ?, ?, ?)""",
(str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
raise

View File

@@ -5,7 +5,7 @@ is synchronous, so sync runs in asyncio.to_thread().
"""
import asyncio
from datetime import datetime, timedelta
from datetime import timedelta
from pathlib import Path
from urllib.parse import urlparse
@@ -127,7 +127,7 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int:
"""INSERT INTO seo_sync_log
(source, status, rows_synced, started_at, completed_at, duration_ms)
VALUES ('gsc', 'success', ?, ?, ?, ?)""",
(rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
return rows_synced
@@ -137,6 +137,6 @@ async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int:
"""INSERT INTO seo_sync_log
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
VALUES ('gsc', 'failed', 0, ?, ?, ?, ?)""",
(str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
raise

View File

@@ -4,7 +4,7 @@ Uses bearer token auth. Self-hosted instance, no rate limits.
Config already exists: UMAMI_API_URL, UMAMI_API_TOKEN, UMAMI_WEBSITE_ID.
"""
from datetime import datetime, timedelta
from datetime import timedelta
import httpx
@@ -101,7 +101,7 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS
"""INSERT INTO seo_sync_log
(source, status, rows_synced, started_at, completed_at, duration_ms)
VALUES ('umami', 'success', ?, ?, ?, ?)""",
(rows_synced, started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(rows_synced, started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
return rows_synced
@@ -111,6 +111,6 @@ async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS
"""INSERT INTO seo_sync_log
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
VALUES ('umami', 'failed', 0, ?, ?, ?, ?)""",
(str(exc), started_at.strftime("%Y-%m-%dT%H:%M:%S"), utcnow_iso(), duration_ms),
(str(exc), started_at.strftime("%Y-%m-%d %H:%M:%S"), utcnow_iso(), duration_ms),
)
raise

View File

@@ -13,9 +13,9 @@ from ..core import (
config,
csrf_protect,
execute,
feature_gate,
fetch_all,
fetch_one,
feature_gate,
get_paddle_price,
is_flag_enabled,
)

View File

@@ -142,7 +142,7 @@ async def enqueue(task_name: str, payload: dict = None, run_at: datetime = None)
(
task_name,
json.dumps(payload or {}),
(run_at or utcnow()).strftime("%Y-%m-%dT%H:%M:%S"),
(run_at or utcnow()).strftime("%Y-%m-%d %H:%M:%S"),
utcnow_iso(),
),
)
@@ -401,7 +401,7 @@ async def handle_cleanup_tokens(payload: dict) -> None:
@task("cleanup_rate_limits")
async def handle_cleanup_rate_limits(payload: dict) -> None:
"""Clean up old rate limit entries."""
cutoff = (utcnow() - timedelta(hours=1)).strftime("%Y-%m-%dT%H:%M:%S")
cutoff = (utcnow() - timedelta(hours=1)).strftime("%Y-%m-%d %H:%M:%S")
await execute("DELETE FROM rate_limits WHERE timestamp < ?", (cutoff,))
@@ -674,7 +674,7 @@ async def handle_generate_business_plan(payload: dict) -> None:
@task("cleanup_old_tasks")
async def handle_cleanup_tasks(payload: dict) -> None:
"""Clean up completed/failed tasks older than 7 days."""
cutoff = (utcnow() - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%S")
cutoff = (utcnow() - timedelta(days=7)).strftime("%Y-%m-%d %H:%M:%S")
await execute(
"DELETE FROM tasks WHERE status IN ('complete', 'failed') AND created_at < ?", (cutoff,)
)

View File

@@ -8,9 +8,7 @@ sitemap integration, admin CRUD routes, and path collision prevention.
import importlib
import json
import sqlite3
from datetime import date, datetime
from padelnomics.core import utcnow_iso
from datetime import date
from pathlib import Path
import pytest
@@ -21,7 +19,7 @@ from padelnomics.content.routes import (
bake_scenario_cards,
is_reserved_path,
)
from padelnomics.core import execute, fetch_all, fetch_one, slugify
from padelnomics.core import execute, fetch_all, fetch_one, slugify, utcnow_iso
from padelnomics.planner.calculator import calc, validate_state
SCHEMA_PATH = Path(__file__).parent.parent / "src" / "padelnomics" / "migrations" / "schema.sql"

View File

@@ -3,9 +3,8 @@ Tests for the credit system (credits.py).
Pure SQL operations against real in-memory SQLite — no mocking needed.
"""
from padelnomics.core import utcnow_iso
import pytest
from padelnomics.core import utcnow_iso
from padelnomics.credits import (
InsufficientCredits,
add_credits,

View File

@@ -7,16 +7,13 @@ Integration tests exercise full request/response flows via Quart test client.
"""
import sqlite3
from padelnomics.core import utcnow_iso
from pathlib import Path
from unittest.mock import AsyncMock, patch
import pytest
from padelnomics import core
from padelnomics.core import utcnow_iso
from padelnomics.migrations.migrate import migrate
from padelnomics import core
# ── Fixtures & helpers ────────────────────────────────────────────
@@ -294,8 +291,9 @@ class TestLeadUnlockGate:
@pytest.mark.asyncio
async def test_route_imports_is_flag_enabled(self):
"""suppliers/routes.py imports is_flag_enabled (gate is wired up)."""
from padelnomics.suppliers.routes import unlock_lead
import inspect
from padelnomics.suppliers.routes import unlock_lead
src = inspect.getsource(unlock_lead)
assert "is_flag_enabled" in src
assert "lead_unlock" in src

View File

@@ -1,11 +1,10 @@
"""Tests for the SEO metrics module: queries, sync functions, admin routes."""
from datetime import UTC, datetime, timedelta
from padelnomics.core import utcnow_iso
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from padelnomics.core import utcnow_iso
from padelnomics.seo._queries import (
cleanup_old_metrics,
get_article_scorecard,

View File

@@ -8,19 +8,16 @@ supervisor.py lives in src/padelnomics/ (not a uv workspace package), so we
add src/ to sys.path before importing.
"""
import sys
# Load supervisor.py directly by path — avoids clashing with the web app's
# 'padelnomics' namespace (which is the installed web package).
import importlib.util as _ilu
import textwrap
import tomllib
from datetime import UTC, datetime, timedelta
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
# Load supervisor.py directly by path — avoids clashing with the web app's
# 'padelnomics' namespace (which is the installed web package).
import importlib.util as _ilu
_SUP_PATH = Path(__file__).parent.parent.parent / "src" / "padelnomics" / "supervisor.py"
_spec = _ilu.spec_from_file_location("padelnomics_supervisor", _SUP_PATH)
sup = _ilu.module_from_spec(_spec)
@@ -32,7 +29,6 @@ from padelnomics_extract.proxy import (
make_sticky_selector,
)
# ── load_workflows ────────────────────────────────────────────────

View File

@@ -6,12 +6,11 @@ Uses the existing client, db, sign_payload from conftest.
"""
import json
from datetime import UTC, datetime
from padelnomics.core import utcnow_iso
from unittest.mock import AsyncMock, patch
import pytest
from conftest import sign_payload
from padelnomics.core import utcnow_iso
WEBHOOK_PATH = "/billing/webhook/paddle"
SIG_HEADER = "Paddle-Signature"