feat: SEO/GEO admin hub — GSC, Bing, Umami sync + search/funnel/scorecard views
# Conflicts: # CHANGELOG.md # uv.lock # web/src/padelnomics/admin/templates/admin/base_admin.html # web/src/padelnomics/core.py
This commit is contained in:
@@ -7,6 +7,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
- **SEO/GEO admin hub** — syncs search performance data from Google Search Console (service
|
||||
account auth), Bing Webmaster Tools (API key), and Umami (bearer token) into 3 new SQLite
|
||||
tables (`seo_search_metrics`, `seo_analytics_metrics`, `seo_sync_log`); daily background
|
||||
sync via worker scheduler at 6am UTC; admin dashboard at `/admin/seo` with three HTMX tab
|
||||
views: search performance (top queries, top pages, country/device breakdown), full funnel
|
||||
(impressions → clicks → pageviews → visitors → planner users → leads), and per-article
|
||||
scorecard with attention flags (low CTR, no clicks); manual "Sync Now" button; 12-month
|
||||
data retention with automatic cleanup; all data sources optional (skip silently if not
|
||||
configured)
|
||||
- **Landing zone backup to R2** — append-only landing files (`data/landing/*.json.gz`)
|
||||
synced to Cloudflare R2 every 30 minutes via systemd timer + rclone; extraction state
|
||||
DB (`.state.sqlite`) continuously replicated via Litestream (second DB entry in existing
|
||||
|
||||
@@ -111,6 +111,7 @@
|
||||
- [x] English legal pages (GDPR, proper controller identity)
|
||||
- [x] Cookie consent banner (functional/A/B categories, 1-year cookie)
|
||||
- [x] Virtual office address on imprint
|
||||
- [x] SEO/GEO admin hub — GSC + Bing + Umami sync, search/funnel/scorecard views, daily background sync
|
||||
|
||||
### Other
|
||||
- [x] A/B testing framework (`@ab_test` decorator + Umami `data-tag`)
|
||||
@@ -140,7 +141,7 @@ _Move here when you start working on it._
|
||||
| Publish SEO articles: run `seed_content --generate` on prod (or trigger from admin) | First LinkedIn post |
|
||||
| Wipe 5 test suppliers (`example.com` entries from `seed_dev_data.py`) | |
|
||||
| Verify Resend production API key — test magic link email | |
|
||||
| Submit sitemap to Google Search Console | Set up Google Search Console + Bing Webmaster Tools |
|
||||
| Submit sitemap to Google Search Console | Set up Google Search Console + Bing Webmaster Tools (SEO hub ready — just add env vars) |
|
||||
| Verify Litestream R2 backup running on prod | |
|
||||
|
||||
### Week 1–2 — First Revenue
|
||||
|
||||
226
uv.lock
generated
226
uv.lock
generated
@@ -356,6 +356,65 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cssselect2"
|
||||
version = "0.9.0"
|
||||
@@ -511,6 +570,77 @@ woff = [
|
||||
{ name = "zopfli" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-auth" },
|
||||
{ name = "googleapis-common-protos" },
|
||||
{ name = "proto-plus" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl", hash = "sha256:80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5", size = 173288, upload-time = "2026-02-18T20:28:10.367Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.190.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
{ name = "google-auth" },
|
||||
{ name = "google-auth-httplib2" },
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e4/8d/4ab3e3516b93bb50ed7814738ea61d49cba3f72f4e331dc9518ae2731e92/google_api_python_client-2.190.0.tar.gz", hash = "sha256:5357f34552e3724d80d2604c8fa146766e0a9d6bb0afada886fafed9feafeef6", size = 14111143, upload-time = "2026-02-12T00:38:03.37Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/ad/223d5f4b0b987669ffeb3eadd7e9f85ece633aa7fd3246f1e2f6238e1e05/google_api_python_client-2.190.0-py3-none-any.whl", hash = "sha256:d9b5266758f96c39b8c21d9bbfeb4e58c14dbfba3c931f7c5a8d7fdcd292dd57", size = 14682070, upload-time = "2026-02-12T00:38:00.974Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth"
|
||||
version = "2.48.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cryptography" },
|
||||
{ name = "pyasn1-modules" },
|
||||
{ name = "rsa" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth-httplib2"
|
||||
version = "0.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-auth" },
|
||||
{ name = "httplib2" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d5/ad/c1f2b1175096a8d04cf202ad5ea6065f108d26be6fc7215876bde4a7981d/google_auth_httplib2-0.3.0.tar.gz", hash = "sha256:177898a0175252480d5ed916aeea183c2df87c1f9c26705d74ae6b951c268b0b", size = 11134, upload-time = "2025-12-15T22:13:51.825Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl", hash = "sha256:426167e5df066e3f5a0fc7ea18768c08e7296046594ce4c8c409c2457dd1f776", size = 9529, upload-time = "2025-12-15T22:13:51.048Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "googleapis-common-protos"
|
||||
version = "1.72.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.3.1"
|
||||
@@ -607,6 +737,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httplib2"
|
||||
version = "0.31.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyparsing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c1/1f/e86365613582c027dda5ddb64e1010e57a3d53e99ab8a72093fa13d565ec/httplib2-0.31.2.tar.gz", hash = "sha256:385e0869d7397484f4eab426197a4c020b606edd43372492337c0b4010ae5d24", size = 250800, upload-time = "2026-01-23T11:04:44.165Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/90/fd509079dfcab01102c0fdd87f3a9506894bc70afcf9e9785ef6b2b3aff6/httplib2-0.31.2-py3-none-any.whl", hash = "sha256:dbf0c2fa3862acf3c55c078ea9c0bc4481d7dc5117cae71be9514912cf9f8349", size = 91099, upload-time = "2026-01-23T11:04:42.78Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
@@ -1152,6 +1294,9 @@ source = { editable = "web" }
|
||||
dependencies = [
|
||||
{ name = "aiosqlite" },
|
||||
{ name = "duckdb" },
|
||||
{ name = "google-api-python-client" },
|
||||
{ name = "google-auth" },
|
||||
{ name = "httpx" },
|
||||
{ name = "hypercorn" },
|
||||
{ name = "itsdangerous" },
|
||||
{ name = "jinja2" },
|
||||
@@ -1169,6 +1314,9 @@ dependencies = [
|
||||
requires-dist = [
|
||||
{ name = "aiosqlite", specifier = ">=0.19.0" },
|
||||
{ name = "duckdb", specifier = ">=1.0.0" },
|
||||
{ name = "google-api-python-client", specifier = ">=2.100.0" },
|
||||
{ name = "google-auth", specifier = ">=2.23.0" },
|
||||
{ name = "httpx", specifier = ">=0.27.0" },
|
||||
{ name = "hypercorn", specifier = ">=0.17.0" },
|
||||
{ name = "itsdangerous", specifier = ">=2.1.0" },
|
||||
{ name = "jinja2", specifier = ">=3.1.0" },
|
||||
@@ -1408,6 +1556,33 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.27.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/02/8832cde80e7380c600fbf55090b6ab7b62bd6825dbedde6d6657c15a1f8e/proto_plus-1.27.1.tar.gz", hash = "sha256:912a7460446625b792f6448bade9e55cd4e41e6ac10e27009ef71a7f317fa147", size = 56929, upload-time = "2026-02-02T17:34:49.035Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl", hash = "sha256:e4643061f3a4d0de092d62aa4ad09fa4756b2cbb89d4627f3985018216f9fefc", size = 50480, upload-time = "2026-02-02T17:34:47.339Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "6.33.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptyprocess"
|
||||
version = "0.7.0"
|
||||
@@ -1476,6 +1651,27 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/50/f2/c0e76a0b451ffdf0cf788932e182758eb7558953f4f27f1aff8e2518b653/pyarrow-23.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:527e8d899f14bd15b740cd5a54ad56b7f98044955373a17179d5956ddb93d9ce", size = 28365807, upload-time = "2026-02-16T10:14:03.892Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1-modules"
|
||||
version = "0.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyasn1" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "3.0"
|
||||
@@ -1627,6 +1823,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyphen"
|
||||
version = "0.17.2"
|
||||
@@ -2039,6 +2244,18 @@ jupyter = [
|
||||
{ name = "ipywidgets" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "4.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyasn1" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruamel-yaml"
|
||||
version = "0.19.1"
|
||||
@@ -2383,6 +2600,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uritemplate"
|
||||
version = "4.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.6.3"
|
||||
|
||||
@@ -18,6 +18,9 @@ dependencies = [
|
||||
"duckdb>=1.0.0",
|
||||
"pyarrow>=23.0.1",
|
||||
"pyyaml>=6.0",
|
||||
"httpx>=0.27.0",
|
||||
"google-api-python-client>=2.100.0",
|
||||
"google-auth>=2.23.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
|
||||
@@ -1767,3 +1767,134 @@ async def _rebuild_article(article_id: int):
|
||||
body_html = await bake_scenario_cards(body_html, lang=lang)
|
||||
BUILD_DIR.mkdir(parents=True, exist_ok=True)
|
||||
(BUILD_DIR / f"{article['slug']}.html").write_text(body_html)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SEO Hub
|
||||
# =============================================================================
|
||||
|
||||
@bp.route("/seo")
|
||||
@role_required("admin")
|
||||
async def seo():
|
||||
"""SEO metrics hub — overview + tabs for search, funnel, scorecard."""
|
||||
from ..seo import get_search_performance, get_sync_status
|
||||
|
||||
date_range_days = int(request.args.get("days", "28") or "28")
|
||||
date_range_days = max(1, min(date_range_days, 730))
|
||||
|
||||
overview = await get_search_performance(date_range_days=date_range_days)
|
||||
sync_status = await get_sync_status()
|
||||
|
||||
return await render_template(
|
||||
"admin/seo.html",
|
||||
overview=overview,
|
||||
sync_status=sync_status,
|
||||
date_range_days=date_range_days,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/seo/search")
|
||||
@role_required("admin")
|
||||
async def seo_search():
|
||||
"""HTMX partial: search performance tab."""
|
||||
from ..seo import (
|
||||
get_country_breakdown,
|
||||
get_device_breakdown,
|
||||
get_top_pages,
|
||||
get_top_queries,
|
||||
)
|
||||
|
||||
days = int(request.args.get("days", "28") or "28")
|
||||
days = max(1, min(days, 730))
|
||||
source = request.args.get("source", "") or None
|
||||
|
||||
queries = await get_top_queries(date_range_days=days, source=source)
|
||||
pages = await get_top_pages(date_range_days=days, source=source)
|
||||
countries = await get_country_breakdown(date_range_days=days)
|
||||
devices = await get_device_breakdown(date_range_days=days)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/seo_search.html",
|
||||
queries=queries,
|
||||
pages=pages,
|
||||
countries=countries,
|
||||
devices=devices,
|
||||
date_range_days=days,
|
||||
current_source=source,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/seo/funnel")
|
||||
@role_required("admin")
|
||||
async def seo_funnel():
|
||||
"""HTMX partial: full funnel view."""
|
||||
from ..seo import get_funnel_metrics
|
||||
|
||||
days = int(request.args.get("days", "28") or "28")
|
||||
days = max(1, min(days, 730))
|
||||
funnel = await get_funnel_metrics(date_range_days=days)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/seo_funnel.html",
|
||||
funnel=funnel,
|
||||
date_range_days=days,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/seo/scorecard")
|
||||
@role_required("admin")
|
||||
async def seo_scorecard():
|
||||
"""HTMX partial: article scorecard."""
|
||||
from ..seo import get_article_scorecard
|
||||
|
||||
days = int(request.args.get("days", "28") or "28")
|
||||
days = max(1, min(days, 730))
|
||||
template_slug = request.args.get("template_slug", "") or None
|
||||
country_filter = request.args.get("country", "") or None
|
||||
language = request.args.get("language", "") or None
|
||||
sort_by = request.args.get("sort", "impressions")
|
||||
sort_dir = request.args.get("dir", "desc")
|
||||
|
||||
scorecard = await get_article_scorecard(
|
||||
date_range_days=days,
|
||||
template_slug=template_slug,
|
||||
country=country_filter,
|
||||
language=language,
|
||||
sort_by=sort_by,
|
||||
sort_dir=sort_dir,
|
||||
)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/seo_scorecard.html",
|
||||
scorecard=scorecard,
|
||||
date_range_days=days,
|
||||
current_template=template_slug,
|
||||
current_country=country_filter,
|
||||
current_language=language,
|
||||
current_sort=sort_by,
|
||||
current_dir=sort_dir,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/seo/sync", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def seo_sync_now():
|
||||
"""Manually trigger SEO data sync."""
|
||||
from ..worker import enqueue
|
||||
|
||||
form = await request.form
|
||||
source = form.get("source", "all")
|
||||
|
||||
if source == "all":
|
||||
await enqueue("sync_gsc")
|
||||
await enqueue("sync_bing")
|
||||
await enqueue("sync_umami")
|
||||
await flash("All SEO syncs queued.", "success")
|
||||
elif source in ("gsc", "bing", "umami"):
|
||||
await enqueue(f"sync_{source}")
|
||||
await flash(f"{source.upper()} sync queued.", "success")
|
||||
else:
|
||||
await flash("Unknown source.", "error")
|
||||
|
||||
return redirect(url_for("admin.seo"))
|
||||
|
||||
@@ -104,6 +104,12 @@
|
||||
Audiences
|
||||
</a>
|
||||
|
||||
<div class="admin-sidebar__section">Analytics</div>
|
||||
<a href="{{ url_for('admin.seo') }}" class="{% if admin_page == 'seo' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M2.25 18 9 11.25l4.306 4.306a11.95 11.95 0 0 1 5.814-5.518l2.74-1.22m0 0-5.94-2.281m5.94 2.28-2.28 5.941"/></svg>
|
||||
SEO Hub
|
||||
</a>
|
||||
|
||||
<div class="admin-sidebar__section">System</div>
|
||||
<a href="{{ url_for('admin.tasks') }}" class="{% if admin_page == 'tasks' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M10.5 6h9.75M10.5 6a1.5 1.5 0 1 1-3 0m3 0a1.5 1.5 0 1 0-3 0M3.75 6H7.5m3 12h9.75m-9.75 0a1.5 1.5 0 0 1-3 0m3 0a1.5 1.5 0 0 0-3 0m-3.75 0H7.5m9-6h3.75m-3.75 0a1.5 1.5 0 0 1-3 0m3 0a1.5 1.5 0 0 0-3 0m-9.75 0h9.75"/></svg>
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
<!-- Full Funnel: Search → Analytics → Conversions -->
|
||||
<style>
|
||||
.funnel-stage {
|
||||
display: flex; align-items: center; gap: 1rem; padding: 0.75rem 1rem;
|
||||
border-bottom: 1px solid #F1F5F9;
|
||||
}
|
||||
.funnel-stage:last-child { border-bottom: none; }
|
||||
.funnel-label { width: 120px; flex-shrink: 0; }
|
||||
.funnel-label span { font-size: 0.8125rem; font-weight: 600; color: #0F172A; }
|
||||
.funnel-label small { font-size: 0.6875rem; color: #94A3B8; display: block; }
|
||||
.funnel-bar-wrap { flex: 1; height: 28px; background: #F1F5F9; border-radius: 4px; overflow: hidden; }
|
||||
.funnel-bar { height: 100%; border-radius: 4px; min-width: 2px; transition: width 0.3s; }
|
||||
.funnel-value { width: 100px; text-align: right; flex-shrink: 0; }
|
||||
.funnel-value span { font-size: 0.9375rem; font-weight: 700; color: #0F172A; }
|
||||
.funnel-value small { font-size: 0.6875rem; color: #94A3B8; display: block; }
|
||||
.funnel-section-label {
|
||||
font-size: 0.5625rem; font-weight: 700; text-transform: uppercase;
|
||||
letter-spacing: 0.06em; color: #94A3B8; padding: 0.5rem 1rem 0.25rem;
|
||||
}
|
||||
</style>
|
||||
|
||||
{% set max_val = [funnel.impressions, funnel.clicks, funnel.pageviews, funnel.visitors, funnel.planner_users, funnel.leads] | max or 1 %}
|
||||
|
||||
<div class="card">
|
||||
<div class="funnel-section-label">Search (GSC + Bing)</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Impressions</span><small>Search results shown</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.impressions / max_val * 100) | round(1) }}%;background:#3B82F6"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.impressions | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Clicks</span><small>CTR: {{ "%.1f" | format(funnel.ctr * 100) }}%</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.clicks / max_val * 100) | round(1) }}%;background:#2563EB"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.clicks | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="funnel-section-label" style="border-top:1px solid #E2E8F0;margin-top:0.25rem;padding-top:0.75rem">Analytics (Umami)</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Pageviews</span><small>{% if funnel.clicks %}{{ "%.0f" | format(funnel.click_to_view * 100) }}% of clicks{% endif %}</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.pageviews / max_val * 100) | round(1) }}%;background:#8B5CF6"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.pageviews | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Visitors</span><small>Unique</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.visitors / max_val * 100) | round(1) }}%;background:#7C3AED"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.visitors | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="funnel-section-label" style="border-top:1px solid #E2E8F0;margin-top:0.25rem;padding-top:0.75rem">Conversions (App)</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Planner Users</span><small>{% if funnel.visitors %}{{ "%.1f" | format(funnel.visitor_to_planner * 100) }}% of visitors{% endif %}</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.planner_users / max_val * 100) | round(1) }}%;background:#10B981"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.planner_users | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="funnel-stage">
|
||||
<div class="funnel-label"><span>Lead Requests</span><small>{% if funnel.planner_users %}{{ "%.1f" | format(funnel.planner_to_lead * 100) }}% of planners{% endif %}</small></div>
|
||||
<div class="funnel-bar-wrap">
|
||||
<div class="funnel-bar" style="width:{{ (funnel.leads / max_val * 100) | round(1) }}%;background:#059669"></div>
|
||||
</div>
|
||||
<div class="funnel-value">
|
||||
<span>{{ "{:,}".format(funnel.leads | int) }}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if not funnel.impressions and not funnel.pageviews and not funnel.planner_users %}
|
||||
<div class="card text-center mt-4" style="padding:1.5rem">
|
||||
<p class="text-slate text-sm">No funnel data yet. Run a sync to populate search and analytics metrics.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,104 @@
|
||||
<!-- Article Scorecard: per-article performance with attention flags -->
|
||||
|
||||
<!-- Filters -->
|
||||
<div class="card mb-4" style="padding:0.75rem 1rem;">
|
||||
<form class="flex flex-wrap gap-3 items-end"
|
||||
hx-get="{{ url_for('admin.seo_scorecard') }}"
|
||||
hx-target="#tab-content"
|
||||
hx-trigger="change"
|
||||
hx-include="this">
|
||||
<input type="hidden" name="days" value="{{ date_range_days }}">
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Template</label>
|
||||
<select name="template_slug" class="form-input" style="min-width:140px">
|
||||
<option value="">All</option>
|
||||
{% for item in scorecard %}
|
||||
{% if item.template_slug and item.template_slug not in seen_templates %}
|
||||
<option value="{{ item.template_slug }}" {% if item.template_slug == current_template %}selected{% endif %}>{{ item.template_slug }}</option>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Language</label>
|
||||
<select name="language" class="form-input" style="min-width:80px">
|
||||
<option value="">All</option>
|
||||
<option value="en" {% if current_language == 'en' %}selected{% endif %}>EN</option>
|
||||
<option value="de" {% if current_language == 'de' %}selected{% endif %}>DE</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Sort</label>
|
||||
<select name="sort" class="form-input" style="min-width:120px">
|
||||
{% for col, label in [('impressions', 'Impressions'), ('clicks', 'Clicks'), ('ctr', 'CTR'), ('position_avg', 'Position'), ('pageviews', 'Pageviews'), ('published_at', 'Published')] %}
|
||||
<option value="{{ col }}" {% if col == current_sort %}selected{% endif %}>{{ label }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Dir</label>
|
||||
<select name="dir" class="form-input" style="min-width:80px">
|
||||
<option value="desc" {% if current_dir == 'desc' %}selected{% endif %}>Desc</option>
|
||||
<option value="asc" {% if current_dir == 'asc' %}selected{% endif %}>Asc</option>
|
||||
</select>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{% if scorecard %}
|
||||
<div class="card" style="overflow-x:auto">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th style="text-align:right">Impressions</th>
|
||||
<th style="text-align:right">Clicks</th>
|
||||
<th style="text-align:right">CTR</th>
|
||||
<th style="text-align:right">Pos</th>
|
||||
<th style="text-align:right">Views</th>
|
||||
<th style="text-align:right">Bounce</th>
|
||||
<th>Published</th>
|
||||
<th>Flags</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for a in scorecard %}
|
||||
<tr>
|
||||
<td style="max-width:250px">
|
||||
<a href="{{ a.url_path }}" target="_blank" class="text-sm" title="{{ a.url_path }}">{{ a.title or a.url_path }}</a>
|
||||
{% if a.template_slug %}
|
||||
<br><span class="text-xs text-slate">{{ a.template_slug }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(a.impressions | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(a.clicks | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format((a.ctr or 0) * 100) }}%</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format(a.position_avg or 0) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(a.pageviews | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">
|
||||
{% if a.bounce_rate is not none %}{{ "%.0f" | format(a.bounce_rate * 100) }}%{% else %}-{% endif %}
|
||||
</td>
|
||||
<td class="mono text-sm">{{ a.published_at[:10] if a.published_at else '-' }}</td>
|
||||
<td>
|
||||
{% if a.flag_low_ctr %}
|
||||
<span class="badge-warning" style="font-size:0.625rem">Low CTR</span>
|
||||
{% endif %}
|
||||
{% if a.flag_no_clicks %}
|
||||
<span class="badge-danger" style="font-size:0.625rem">No Clicks</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<p class="text-xs text-slate mt-2">{{ scorecard | length }} articles shown</p>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:2rem">
|
||||
<p class="text-slate text-sm">No published articles match the current filters, or no search/analytics data synced yet.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,132 @@
|
||||
<!-- Source filter -->
|
||||
<div class="flex gap-2 mb-4">
|
||||
<button class="btn-outline btn-sm {% if not current_source %}font-bold{% endif %}"
|
||||
hx-get="{{ url_for('admin.seo_search') }}?days={{ date_range_days }}"
|
||||
hx-target="#tab-content">All</button>
|
||||
<button class="btn-outline btn-sm {% if current_source == 'gsc' %}font-bold{% endif %}"
|
||||
hx-get="{{ url_for('admin.seo_search') }}?days={{ date_range_days }}&source=gsc"
|
||||
hx-target="#tab-content">GSC</button>
|
||||
<button class="btn-outline btn-sm {% if current_source == 'bing' %}font-bold{% endif %}"
|
||||
hx-get="{{ url_for('admin.seo_search') }}?days={{ date_range_days }}&source=bing"
|
||||
hx-target="#tab-content">Bing</button>
|
||||
</div>
|
||||
|
||||
<div class="grid-2 mb-6" style="gap:1.5rem">
|
||||
<!-- Top Queries -->
|
||||
<section>
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">Top Queries</p>
|
||||
{% if queries %}
|
||||
<div class="card">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Query</th>
|
||||
<th style="text-align:right">Impressions</th>
|
||||
<th style="text-align:right">Clicks</th>
|
||||
<th style="text-align:right">CTR</th>
|
||||
<th style="text-align:right">Pos</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for q in queries[:20] %}
|
||||
<tr>
|
||||
<td class="text-sm">{{ q.query }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(q.impressions | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(q.clicks | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format((q.ctr or 0) * 100) }}%</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format(q.position_avg or 0) }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:1.5rem">
|
||||
<p class="text-slate text-sm">No query data yet. Run a sync to populate.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
|
||||
<!-- Top Pages -->
|
||||
<section>
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">Top Pages</p>
|
||||
{% if pages %}
|
||||
<div class="card">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Page</th>
|
||||
<th style="text-align:right">Impressions</th>
|
||||
<th style="text-align:right">Clicks</th>
|
||||
<th style="text-align:right">CTR</th>
|
||||
<th style="text-align:right">Pos</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for p in pages[:20] %}
|
||||
<tr>
|
||||
<td class="text-sm" style="max-width:200px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap" title="{{ p.page_url }}">{{ p.page_url }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(p.impressions | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(p.clicks | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format((p.ctr or 0) * 100) }}%</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "%.1f" | format(p.position_avg or 0) }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:1.5rem">
|
||||
<p class="text-slate text-sm">No page data yet.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<div class="grid-2" style="gap:1.5rem">
|
||||
<!-- Country Breakdown -->
|
||||
<section>
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">By Country</p>
|
||||
{% if countries %}
|
||||
<div class="card">
|
||||
<table class="table">
|
||||
<thead><tr><th>Country</th><th style="text-align:right">Impressions</th><th style="text-align:right">Clicks</th></tr></thead>
|
||||
<tbody>
|
||||
{% for c in countries[:15] %}
|
||||
<tr>
|
||||
<td class="text-sm">{{ c.country | upper }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(c.impressions | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(c.clicks | int) }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:1rem"><p class="text-slate text-sm">No country data.</p></div>
|
||||
{% endif %}
|
||||
</section>
|
||||
|
||||
<!-- Device Breakdown -->
|
||||
<section>
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">By Device (GSC)</p>
|
||||
{% if devices %}
|
||||
<div class="card">
|
||||
<table class="table">
|
||||
<thead><tr><th>Device</th><th style="text-align:right">Impressions</th><th style="text-align:right">Clicks</th></tr></thead>
|
||||
<tbody>
|
||||
{% for d in devices %}
|
||||
<tr>
|
||||
<td class="text-sm">{{ d.device | capitalize }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(d.impressions | int) }}</td>
|
||||
<td style="text-align:right" class="mono text-sm">{{ "{:,}".format(d.clicks | int) }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:1rem"><p class="text-slate text-sm">No device data (GSC only).</p></div>
|
||||
{% endif %}
|
||||
</section>
|
||||
</div>
|
||||
149
web/src/padelnomics/admin/templates/admin/seo.html
Normal file
149
web/src/padelnomics/admin/templates/admin/seo.html
Normal file
@@ -0,0 +1,149 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "seo" %}
|
||||
{% block title %}SEO Hub - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<style>
|
||||
.seo-tabs { display: flex; gap: 0; border-bottom: 2px solid #E2E8F0; margin-bottom: 1.5rem; }
|
||||
.seo-tabs button {
|
||||
padding: 0.625rem 1.25rem; font-size: 0.8125rem; font-weight: 600;
|
||||
color: #64748B; background: none; border: none; cursor: pointer;
|
||||
border-bottom: 2px solid transparent; margin-bottom: -2px; transition: all 0.15s;
|
||||
}
|
||||
.seo-tabs button:hover { color: #1D4ED8; }
|
||||
.seo-tabs button.active { color: #1D4ED8; border-bottom-color: #1D4ED8; }
|
||||
.date-pills { display: flex; gap: 4px; }
|
||||
.date-pills button {
|
||||
padding: 4px 12px; font-size: 0.75rem; font-weight: 600;
|
||||
border-radius: 999px; border: 1px solid #E2E8F0; background: #fff;
|
||||
color: #64748B; cursor: pointer; transition: all 0.15s;
|
||||
}
|
||||
.date-pills button:hover { border-color: #1D4ED8; color: #1D4ED8; }
|
||||
.date-pills button.active { background: #1D4ED8; color: #fff; border-color: #1D4ED8; }
|
||||
.sync-row { display: flex; gap: 1rem; align-items: center; flex-wrap: wrap; }
|
||||
.sync-badge { display: inline-flex; align-items: center; gap: 4px; font-size: 0.6875rem; padding: 2px 8px; border-radius: 4px; }
|
||||
.sync-ok { background: #F0FDF4; color: #16A34A; }
|
||||
.sync-fail { background: #FEF2F2; color: #DC2626; }
|
||||
.sync-none { background: #F8FAFC; color: #94A3B8; }
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<h1 class="text-2xl">SEO & Analytics Hub</h1>
|
||||
<p class="text-sm text-slate mt-1">Search performance, funnel metrics, and article scorecard</p>
|
||||
</div>
|
||||
<div class="flex gap-2 items-center">
|
||||
<form method="post" action="{{ url_for('admin.seo_sync_now') }}" class="m-0">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<input type="hidden" name="source" value="all">
|
||||
<button type="submit" class="btn btn-sm">Sync Now</button>
|
||||
</form>
|
||||
<a href="{{ url_for('admin.index') }}" class="btn-outline btn-sm">Dashboard</a>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Sync Status -->
|
||||
<div class="sync-row mb-4">
|
||||
<span class="text-xs font-semibold text-slate uppercase tracking-wider">Last sync:</span>
|
||||
{% for s in sync_status %}
|
||||
<span class="sync-badge {% if s.status == 'success' %}sync-ok{% elif s.status == 'failed' %}sync-fail{% else %}sync-none{% endif %}">
|
||||
{{ s.source | upper }}
|
||||
{% if s.status == 'success' %}
|
||||
{{ s.completed_at[:16] if s.completed_at else '' }} ({{ s.rows_synced }} rows)
|
||||
{% elif s.status == 'failed' %}
|
||||
failed
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% if not sync_status %}
|
||||
<span class="sync-badge sync-none">No syncs yet</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Date range selector -->
|
||||
<div class="flex justify-between items-center mb-6">
|
||||
<div class="date-pills" id="date-pills">
|
||||
{% for d, label in [(7, '7d'), (28, '28d'), (90, '3m'), (180, '6m'), (365, '12m')] %}
|
||||
<button class="{% if date_range_days == d %}active{% endif %}"
|
||||
onclick="setDateRange({{ d }})">{{ label }}</button>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Overview Cards -->
|
||||
<div style="display:grid;grid-template-columns:repeat(4,1fr);gap:0.75rem" class="mb-8">
|
||||
<div class="card text-center" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Impressions</p>
|
||||
<p class="text-xl font-bold text-navy">{{ "{:,}".format(overview.total_impressions | int) }}</p>
|
||||
</div>
|
||||
<div class="card text-center" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Clicks</p>
|
||||
<p class="text-xl font-bold text-navy">{{ "{:,}".format(overview.total_clicks | int) }}</p>
|
||||
</div>
|
||||
<div class="card text-center" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Avg CTR</p>
|
||||
<p class="text-xl font-bold text-navy">{{ "%.1f" | format(overview.avg_ctr * 100) }}%</p>
|
||||
</div>
|
||||
<div class="card text-center" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Avg Position</p>
|
||||
<p class="text-xl font-bold text-navy">{{ "%.1f" | format(overview.avg_position) }}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Tabs -->
|
||||
<div class="seo-tabs" id="seo-tabs">
|
||||
<button class="active" data-tab="search"
|
||||
hx-get="{{ url_for('admin.seo_search') }}?days={{ date_range_days }}"
|
||||
hx-target="#tab-content" hx-swap="innerHTML"
|
||||
hx-trigger="click">Search Performance</button>
|
||||
<button data-tab="funnel"
|
||||
hx-get="{{ url_for('admin.seo_funnel') }}?days={{ date_range_days }}"
|
||||
hx-target="#tab-content" hx-swap="innerHTML"
|
||||
hx-trigger="click">Full Funnel</button>
|
||||
<button data-tab="scorecard"
|
||||
hx-get="{{ url_for('admin.seo_scorecard') }}?days={{ date_range_days }}"
|
||||
hx-target="#tab-content" hx-swap="innerHTML"
|
||||
hx-trigger="click">Article Scorecard</button>
|
||||
</div>
|
||||
|
||||
<!-- Tab Content -->
|
||||
<div id="tab-content"
|
||||
hx-get="{{ url_for('admin.seo_search') }}?days={{ date_range_days }}"
|
||||
hx-trigger="load"
|
||||
hx-swap="innerHTML">
|
||||
<div class="card text-center" style="padding:2rem">
|
||||
<p class="text-slate">Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Tab switching
|
||||
document.getElementById('seo-tabs').addEventListener('click', function(e) {
|
||||
if (e.target.tagName === 'BUTTON') {
|
||||
this.querySelectorAll('button').forEach(b => b.classList.remove('active'));
|
||||
e.target.classList.add('active');
|
||||
}
|
||||
});
|
||||
|
||||
// Date range switching
|
||||
var currentDays = {{ date_range_days }};
|
||||
function setDateRange(days) {
|
||||
currentDays = days;
|
||||
// Update pills
|
||||
document.querySelectorAll('#date-pills button').forEach(function(b) {
|
||||
b.classList.toggle('active', b.textContent.trim() === {7:'7d',28:'28d',90:'3m',180:'6m',365:'12m'}[days]);
|
||||
});
|
||||
// Re-fetch active tab with new date range
|
||||
var activeTab = document.querySelector('#seo-tabs button.active');
|
||||
if (activeTab) {
|
||||
var url = activeTab.getAttribute('hx-get').split('?')[0] + '?days=' + days;
|
||||
activeTab.setAttribute('hx-get', url);
|
||||
htmx.ajax('GET', url, '#tab-content');
|
||||
}
|
||||
// Update overview cards
|
||||
window.location.href = '{{ url_for("admin.seo") }}?days=' + days;
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -54,6 +54,12 @@ class Config:
|
||||
UMAMI_API_TOKEN: str = os.getenv("UMAMI_API_TOKEN", "")
|
||||
UMAMI_WEBSITE_ID: str = "4474414b-58d6-4c6e-89a1-df5ea1f49d70"
|
||||
|
||||
# SEO metrics sync
|
||||
GSC_SERVICE_ACCOUNT_PATH: str = os.getenv("GSC_SERVICE_ACCOUNT_PATH", "")
|
||||
GSC_SITE_URL: str = os.getenv("GSC_SITE_URL", "")
|
||||
BING_WEBMASTER_API_KEY: str = os.getenv("BING_WEBMASTER_API_KEY", "")
|
||||
BING_SITE_URL: str = os.getenv("BING_SITE_URL", "")
|
||||
|
||||
RESEND_API_KEY: str = os.getenv("RESEND_API_KEY", "")
|
||||
EMAIL_FROM: str = _env("EMAIL_FROM", "hello@padelnomics.io")
|
||||
LEADS_EMAIL: str = _env("LEADS_EMAIL", "leads@padelnomics.io")
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
"""Add SEO metrics tables for GSC, Bing, and Umami data sync.
|
||||
|
||||
Three tables:
|
||||
- seo_search_metrics — daily search data per page+query (GSC + Bing)
|
||||
- seo_analytics_metrics — daily page analytics (Umami)
|
||||
- seo_sync_log — tracks sync state per source
|
||||
"""
|
||||
|
||||
|
||||
def up(conn):
|
||||
# ── 1. Search metrics (GSC + Bing) ─────────────────────────────────
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS seo_search_metrics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source TEXT NOT NULL,
|
||||
metric_date TEXT NOT NULL,
|
||||
page_url TEXT NOT NULL,
|
||||
query TEXT,
|
||||
country TEXT,
|
||||
device TEXT,
|
||||
clicks INTEGER NOT NULL DEFAULT 0,
|
||||
impressions INTEGER NOT NULL DEFAULT 0,
|
||||
ctr REAL,
|
||||
position_avg REAL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
)
|
||||
""")
|
||||
# COALESCE converts NULLs to '' for unique index (SQLite treats
|
||||
# NULL as distinct in UNIQUE constraints, causing duplicate rows)
|
||||
conn.execute("""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_seo_search_dedup
|
||||
ON seo_search_metrics(
|
||||
source, metric_date, page_url,
|
||||
COALESCE(query, ''), COALESCE(country, ''), COALESCE(device, '')
|
||||
)
|
||||
""")
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_seo_search_date"
|
||||
" ON seo_search_metrics(metric_date)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_seo_search_page"
|
||||
" ON seo_search_metrics(page_url)"
|
||||
)
|
||||
|
||||
# ── 2. Analytics metrics (Umami) ───────────────────────────────────
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS seo_analytics_metrics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
metric_date TEXT NOT NULL,
|
||||
page_url TEXT NOT NULL,
|
||||
pageviews INTEGER NOT NULL DEFAULT 0,
|
||||
visitors INTEGER NOT NULL DEFAULT 0,
|
||||
bounce_rate REAL,
|
||||
time_avg_seconds INTEGER,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_seo_analytics_dedup
|
||||
ON seo_analytics_metrics(metric_date, page_url)
|
||||
""")
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_seo_analytics_date"
|
||||
" ON seo_analytics_metrics(metric_date)"
|
||||
)
|
||||
|
||||
# ── 3. Sync log ────────────────────────────────────────────────────
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS seo_sync_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
rows_synced INTEGER NOT NULL DEFAULT 0,
|
||||
error TEXT,
|
||||
started_at TEXT NOT NULL,
|
||||
completed_at TEXT,
|
||||
duration_ms INTEGER
|
||||
)
|
||||
""")
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_seo_sync_source"
|
||||
" ON seo_sync_log(source, started_at)"
|
||||
)
|
||||
36
web/src/padelnomics/seo/__init__.py
Normal file
36
web/src/padelnomics/seo/__init__.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
SEO metrics sync and query module.
|
||||
|
||||
Syncs data from Google Search Console, Bing Webmaster Tools, and Umami
|
||||
into SQLite tables. Query functions support the admin SEO hub views.
|
||||
"""
|
||||
|
||||
from ._bing import sync_bing
|
||||
from ._gsc import sync_gsc
|
||||
from ._queries import (
|
||||
cleanup_old_metrics,
|
||||
get_article_scorecard,
|
||||
get_country_breakdown,
|
||||
get_device_breakdown,
|
||||
get_funnel_metrics,
|
||||
get_search_performance,
|
||||
get_sync_status,
|
||||
get_top_pages,
|
||||
get_top_queries,
|
||||
)
|
||||
from ._umami import sync_umami
|
||||
|
||||
__all__ = [
|
||||
"sync_gsc",
|
||||
"sync_bing",
|
||||
"sync_umami",
|
||||
"get_search_performance",
|
||||
"get_top_queries",
|
||||
"get_top_pages",
|
||||
"get_country_breakdown",
|
||||
"get_device_breakdown",
|
||||
"get_funnel_metrics",
|
||||
"get_article_scorecard",
|
||||
"get_sync_status",
|
||||
"cleanup_old_metrics",
|
||||
]
|
||||
142
web/src/padelnomics/seo/_bing.py
Normal file
142
web/src/padelnomics/seo/_bing.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Bing Webmaster Tools sync via REST API.
|
||||
|
||||
Uses an API key for auth. Fetches query stats and page stats.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
|
||||
from ..core import config, execute
|
||||
|
||||
_TIMEOUT_SECONDS = 30
|
||||
|
||||
|
||||
def _normalize_url(full_url: str) -> str:
|
||||
"""Strip a full URL to just the path."""
|
||||
parsed = urlparse(full_url)
|
||||
return parsed.path or "/"
|
||||
|
||||
|
||||
async def sync_bing(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) -> int:
|
||||
"""Sync Bing Webmaster query stats into seo_search_metrics. Returns rows synced."""
|
||||
assert 1 <= days_back <= 90, "days_back must be 1-90"
|
||||
assert 1 <= timeout_seconds <= 120, "timeout_seconds must be 1-120"
|
||||
|
||||
if not config.BING_WEBMASTER_API_KEY or not config.BING_SITE_URL:
|
||||
return 0 # Bing not configured — skip silently
|
||||
|
||||
started_at = datetime.utcnow()
|
||||
|
||||
try:
|
||||
rows_synced = 0
|
||||
async with httpx.AsyncClient(timeout=timeout_seconds) as client:
|
||||
# Fetch query stats for the date range
|
||||
response = await client.get(
|
||||
"https://ssl.bing.com/webmaster/api.svc/json/GetQueryStats",
|
||||
params={
|
||||
"apikey": config.BING_WEBMASTER_API_KEY,
|
||||
"siteUrl": config.BING_SITE_URL,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
# Bing returns {"d": [{"Query": ..., "Date": ..., ...}, ...]}
|
||||
entries = data.get("d", []) if isinstance(data, dict) else data
|
||||
if not isinstance(entries, list):
|
||||
entries = []
|
||||
|
||||
cutoff = datetime.utcnow() - timedelta(days=days_back)
|
||||
|
||||
for entry in entries:
|
||||
# Bing date format: "/Date(1708905600000)/" (ms since epoch)
|
||||
date_str = entry.get("Date", "")
|
||||
if "/Date(" in date_str:
|
||||
ms = int(date_str.split("(")[1].split(")")[0])
|
||||
entry_date = datetime.utcfromtimestamp(ms / 1000)
|
||||
else:
|
||||
continue
|
||||
|
||||
if entry_date < cutoff:
|
||||
continue
|
||||
|
||||
metric_date = entry_date.strftime("%Y-%m-%d")
|
||||
query = entry.get("Query", "")
|
||||
|
||||
await execute(
|
||||
"""INSERT OR REPLACE INTO seo_search_metrics
|
||||
(source, metric_date, page_url, query, country, device,
|
||||
clicks, impressions, ctr, position_avg)
|
||||
VALUES ('bing', ?, '/', ?, NULL, NULL, ?, ?, ?, ?)""",
|
||||
(
|
||||
metric_date, query,
|
||||
entry.get("Clicks", 0),
|
||||
entry.get("Impressions", 0),
|
||||
entry.get("AvgCTR", 0.0),
|
||||
entry.get("AvgClickPosition", 0.0),
|
||||
),
|
||||
)
|
||||
rows_synced += 1
|
||||
|
||||
# Also fetch page-level stats
|
||||
page_response = await client.get(
|
||||
"https://ssl.bing.com/webmaster/api.svc/json/GetPageStats",
|
||||
params={
|
||||
"apikey": config.BING_WEBMASTER_API_KEY,
|
||||
"siteUrl": config.BING_SITE_URL,
|
||||
},
|
||||
)
|
||||
page_response.raise_for_status()
|
||||
page_data = page_response.json()
|
||||
|
||||
page_entries = page_data.get("d", []) if isinstance(page_data, dict) else page_data
|
||||
if not isinstance(page_entries, list):
|
||||
page_entries = []
|
||||
|
||||
for entry in page_entries:
|
||||
date_str = entry.get("Date", "")
|
||||
if "/Date(" in date_str:
|
||||
ms = int(date_str.split("(")[1].split(")")[0])
|
||||
entry_date = datetime.utcfromtimestamp(ms / 1000)
|
||||
else:
|
||||
continue
|
||||
|
||||
if entry_date < cutoff:
|
||||
continue
|
||||
|
||||
metric_date = entry_date.strftime("%Y-%m-%d")
|
||||
page_url = _normalize_url(entry.get("Url", "/"))
|
||||
|
||||
await execute(
|
||||
"""INSERT OR REPLACE INTO seo_search_metrics
|
||||
(source, metric_date, page_url, query, country, device,
|
||||
clicks, impressions, ctr, position_avg)
|
||||
VALUES ('bing', ?, ?, '', NULL, NULL, ?, ?, NULL, NULL)""",
|
||||
(
|
||||
metric_date, page_url,
|
||||
entry.get("Clicks", 0),
|
||||
entry.get("Impressions", 0),
|
||||
),
|
||||
)
|
||||
rows_synced += 1
|
||||
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, started_at, completed_at, duration_ms)
|
||||
VALUES ('bing', 'success', ?, ?, ?, ?)""",
|
||||
(rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
return rows_synced
|
||||
|
||||
except Exception as exc:
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
|
||||
VALUES ('bing', 'failed', 0, ?, ?, ?, ?)""",
|
||||
(str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
raise
|
||||
142
web/src/padelnomics/seo/_gsc.py
Normal file
142
web/src/padelnomics/seo/_gsc.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Google Search Console sync via Search Analytics API.
|
||||
|
||||
Uses a service account JSON key file for auth. The google-api-python-client
|
||||
is synchronous, so sync runs in asyncio.to_thread().
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ..core import config, execute
|
||||
|
||||
# GSC returns max 25K rows per request
|
||||
_ROWS_PER_PAGE = 25_000
|
||||
|
||||
|
||||
def _fetch_gsc_data(
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
max_pages: int,
|
||||
) -> list[dict]:
|
||||
"""Synchronous GSC fetch — called via asyncio.to_thread().
|
||||
|
||||
Returns list of dicts with keys: date, page, query, country, device,
|
||||
clicks, impressions, ctr, position.
|
||||
"""
|
||||
from google.oauth2.service_account import Credentials
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
key_path = Path(config.GSC_SERVICE_ACCOUNT_PATH)
|
||||
assert key_path.exists(), f"GSC service account key not found: {key_path}"
|
||||
|
||||
credentials = Credentials.from_service_account_file(
|
||||
str(key_path),
|
||||
scopes=["https://www.googleapis.com/auth/webmasters.readonly"],
|
||||
)
|
||||
service = build("searchconsole", "v1", credentials=credentials)
|
||||
|
||||
all_rows = []
|
||||
start_row = 0
|
||||
|
||||
for _page_num in range(max_pages):
|
||||
body = {
|
||||
"startDate": start_date,
|
||||
"endDate": end_date,
|
||||
"dimensions": ["date", "page", "query", "country", "device"],
|
||||
"rowLimit": _ROWS_PER_PAGE,
|
||||
"startRow": start_row,
|
||||
}
|
||||
response = service.searchanalytics().query(
|
||||
siteUrl=config.GSC_SITE_URL,
|
||||
body=body,
|
||||
).execute()
|
||||
|
||||
rows = response.get("rows", [])
|
||||
if not rows:
|
||||
break
|
||||
|
||||
for row in rows:
|
||||
keys = row["keys"]
|
||||
all_rows.append({
|
||||
"date": keys[0],
|
||||
"page": keys[1],
|
||||
"query": keys[2],
|
||||
"country": keys[3],
|
||||
"device": keys[4],
|
||||
"clicks": row.get("clicks", 0),
|
||||
"impressions": row.get("impressions", 0),
|
||||
"ctr": row.get("ctr", 0.0),
|
||||
"position": row.get("position", 0.0),
|
||||
})
|
||||
|
||||
if len(rows) < _ROWS_PER_PAGE:
|
||||
break
|
||||
start_row += _ROWS_PER_PAGE
|
||||
|
||||
return all_rows
|
||||
|
||||
|
||||
def _normalize_url(full_url: str) -> str:
|
||||
"""Strip a full URL to just the path (no domain).
|
||||
|
||||
Example: 'https://padelnomics.io/en/markets/germany/berlin' → '/en/markets/germany/berlin'
|
||||
"""
|
||||
parsed = urlparse(full_url)
|
||||
return parsed.path or "/"
|
||||
|
||||
|
||||
async def sync_gsc(days_back: int = 3, max_pages: int = 10) -> int:
|
||||
"""Sync GSC search analytics into seo_search_metrics. Returns rows synced."""
|
||||
assert 1 <= days_back <= 90, "days_back must be 1-90"
|
||||
assert 1 <= max_pages <= 20, "max_pages must be 1-20"
|
||||
|
||||
if not config.GSC_SERVICE_ACCOUNT_PATH or not config.GSC_SITE_URL:
|
||||
return 0 # GSC not configured — skip silently
|
||||
|
||||
started_at = datetime.utcnow()
|
||||
|
||||
# GSC has ~2 day delay; fetch from days_back ago to 2 days ago
|
||||
end_date = (datetime.utcnow() - timedelta(days=2)).strftime("%Y-%m-%d")
|
||||
start_date = (datetime.utcnow() - timedelta(days=days_back + 2)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
rows = await asyncio.to_thread(
|
||||
_fetch_gsc_data, start_date, end_date, max_pages,
|
||||
)
|
||||
|
||||
rows_synced = 0
|
||||
for row in rows:
|
||||
page_url = _normalize_url(row["page"])
|
||||
await execute(
|
||||
"""INSERT OR REPLACE INTO seo_search_metrics
|
||||
(source, metric_date, page_url, query, country, device,
|
||||
clicks, impressions, ctr, position_avg)
|
||||
VALUES ('gsc', ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
row["date"], page_url, row["query"], row["country"],
|
||||
row["device"], row["clicks"], row["impressions"],
|
||||
row["ctr"], row["position"],
|
||||
),
|
||||
)
|
||||
rows_synced += 1
|
||||
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, started_at, completed_at, duration_ms)
|
||||
VALUES ('gsc', 'success', ?, ?, ?, ?)""",
|
||||
(rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
return rows_synced
|
||||
|
||||
except Exception as exc:
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
|
||||
VALUES ('gsc', 'failed', 0, ?, ?, ?, ?)""",
|
||||
(str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
raise
|
||||
379
web/src/padelnomics/seo/_queries.py
Normal file
379
web/src/padelnomics/seo/_queries.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""SQL query functions for the admin SEO hub views.
|
||||
|
||||
All heavy lifting happens in SQL. Functions accept filter parameters
|
||||
and return plain dicts/lists.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..core import execute, fetch_all, fetch_one
|
||||
|
||||
|
||||
def _date_cutoff(date_range_days: int) -> str:
|
||||
"""Return ISO date string for N days ago."""
|
||||
return (datetime.utcnow() - timedelta(days=date_range_days)).strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
async def get_search_performance(
|
||||
date_range_days: int = 28,
|
||||
source: str | None = None,
|
||||
) -> dict:
|
||||
"""Aggregate search performance: total clicks, impressions, avg CTR, avg position."""
|
||||
assert 1 <= date_range_days <= 730
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
source_filter = "AND source = ?" if source else ""
|
||||
params = [cutoff]
|
||||
if source:
|
||||
params.append(source)
|
||||
|
||||
row = await fetch_one(
|
||||
f"""SELECT
|
||||
COALESCE(SUM(clicks), 0) AS total_clicks,
|
||||
COALESCE(SUM(impressions), 0) AS total_impressions,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN CAST(SUM(clicks) AS REAL) / SUM(impressions)
|
||||
ELSE 0 END AS avg_ctr,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN SUM(position_avg * impressions) / SUM(impressions)
|
||||
ELSE 0 END AS avg_position
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ? {source_filter}""",
|
||||
tuple(params),
|
||||
)
|
||||
return dict(row) if row else {
|
||||
"total_clicks": 0, "total_impressions": 0,
|
||||
"avg_ctr": 0, "avg_position": 0,
|
||||
}
|
||||
|
||||
|
||||
async def get_top_queries(
|
||||
date_range_days: int = 28,
|
||||
source: str | None = None,
|
||||
limit: int = 50,
|
||||
) -> list[dict]:
|
||||
"""Top queries by impressions with clicks, CTR, avg position."""
|
||||
assert 1 <= date_range_days <= 730
|
||||
assert 1 <= limit <= 500
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
source_filter = "AND source = ?" if source else ""
|
||||
params: list = [cutoff]
|
||||
if source:
|
||||
params.append(source)
|
||||
params.append(limit)
|
||||
|
||||
rows = await fetch_all(
|
||||
f"""SELECT
|
||||
query,
|
||||
SUM(clicks) AS clicks,
|
||||
SUM(impressions) AS impressions,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN CAST(SUM(clicks) AS REAL) / SUM(impressions)
|
||||
ELSE 0 END AS ctr,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN SUM(position_avg * impressions) / SUM(impressions)
|
||||
ELSE 0 END AS position_avg
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?
|
||||
AND query IS NOT NULL AND query != ''
|
||||
{source_filter}
|
||||
GROUP BY query
|
||||
ORDER BY impressions DESC
|
||||
LIMIT ?""",
|
||||
tuple(params),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_top_pages(
|
||||
date_range_days: int = 28,
|
||||
source: str | None = None,
|
||||
limit: int = 50,
|
||||
) -> list[dict]:
|
||||
"""Top pages by impressions with clicks, CTR, avg position."""
|
||||
assert 1 <= date_range_days <= 730
|
||||
assert 1 <= limit <= 500
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
source_filter = "AND source = ?" if source else ""
|
||||
params: list = [cutoff]
|
||||
if source:
|
||||
params.append(source)
|
||||
params.append(limit)
|
||||
|
||||
rows = await fetch_all(
|
||||
f"""SELECT
|
||||
page_url,
|
||||
SUM(clicks) AS clicks,
|
||||
SUM(impressions) AS impressions,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN CAST(SUM(clicks) AS REAL) / SUM(impressions)
|
||||
ELSE 0 END AS ctr,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN SUM(position_avg * impressions) / SUM(impressions)
|
||||
ELSE 0 END AS position_avg
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?
|
||||
{source_filter}
|
||||
GROUP BY page_url
|
||||
ORDER BY impressions DESC
|
||||
LIMIT ?""",
|
||||
tuple(params),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_country_breakdown(
|
||||
date_range_days: int = 28,
|
||||
) -> list[dict]:
|
||||
"""Clicks and impressions by country."""
|
||||
assert 1 <= date_range_days <= 730
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
rows = await fetch_all(
|
||||
"""SELECT
|
||||
country,
|
||||
SUM(clicks) AS clicks,
|
||||
SUM(impressions) AS impressions
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?
|
||||
AND country IS NOT NULL AND country != ''
|
||||
GROUP BY country
|
||||
ORDER BY impressions DESC
|
||||
LIMIT 50""",
|
||||
(cutoff,),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_device_breakdown(
|
||||
date_range_days: int = 28,
|
||||
) -> list[dict]:
|
||||
"""Clicks and impressions by device type (GSC only)."""
|
||||
assert 1 <= date_range_days <= 730
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
rows = await fetch_all(
|
||||
"""SELECT
|
||||
device,
|
||||
SUM(clicks) AS clicks,
|
||||
SUM(impressions) AS impressions
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?
|
||||
AND source = 'gsc'
|
||||
AND device IS NOT NULL AND device != ''
|
||||
GROUP BY device
|
||||
ORDER BY impressions DESC""",
|
||||
(cutoff,),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_funnel_metrics(
|
||||
date_range_days: int = 28,
|
||||
) -> dict:
|
||||
"""Full funnel: search → analytics → conversions.
|
||||
|
||||
Combines search metrics (GSC/Bing), analytics (Umami), and
|
||||
business metrics (planner users, leads) from SQLite.
|
||||
"""
|
||||
assert 1 <= date_range_days <= 730
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
|
||||
# Search layer
|
||||
search = await fetch_one(
|
||||
"""SELECT
|
||||
COALESCE(SUM(impressions), 0) AS impressions,
|
||||
COALESCE(SUM(clicks), 0) AS clicks
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?""",
|
||||
(cutoff,),
|
||||
)
|
||||
|
||||
# Analytics layer
|
||||
analytics = await fetch_one(
|
||||
"""SELECT
|
||||
COALESCE(SUM(pageviews), 0) AS pageviews,
|
||||
COALESCE(SUM(visitors), 0) AS visitors
|
||||
FROM seo_analytics_metrics
|
||||
WHERE metric_date >= ?
|
||||
AND page_url != '/'""",
|
||||
(cutoff,),
|
||||
)
|
||||
|
||||
# Business layer (from existing SQLite tables)
|
||||
planner_users = await fetch_one(
|
||||
"""SELECT COUNT(DISTINCT user_id) AS cnt
|
||||
FROM scenarios
|
||||
WHERE deleted_at IS NULL
|
||||
AND created_at >= ?""",
|
||||
(cutoff,),
|
||||
)
|
||||
|
||||
leads = await fetch_one(
|
||||
"""SELECT COUNT(*) AS cnt
|
||||
FROM lead_requests
|
||||
WHERE lead_type = 'quote'
|
||||
AND created_at >= ?""",
|
||||
(cutoff,),
|
||||
)
|
||||
|
||||
imp = search["impressions"] if search else 0
|
||||
clicks = search["clicks"] if search else 0
|
||||
pvs = analytics["pageviews"] if analytics else 0
|
||||
vis = analytics["visitors"] if analytics else 0
|
||||
planners = planner_users["cnt"] if planner_users else 0
|
||||
lead_count = leads["cnt"] if leads else 0
|
||||
|
||||
return {
|
||||
"impressions": imp,
|
||||
"clicks": clicks,
|
||||
"pageviews": pvs,
|
||||
"visitors": vis,
|
||||
"planner_users": planners,
|
||||
"leads": lead_count,
|
||||
# Conversion rates between stages
|
||||
"ctr": clicks / imp if imp > 0 else 0,
|
||||
"click_to_view": pvs / clicks if clicks > 0 else 0,
|
||||
"view_to_visitor": vis / pvs if pvs > 0 else 0,
|
||||
"visitor_to_planner": planners / vis if vis > 0 else 0,
|
||||
"planner_to_lead": lead_count / planners if planners > 0 else 0,
|
||||
}
|
||||
|
||||
|
||||
async def get_article_scorecard(
|
||||
date_range_days: int = 28,
|
||||
template_slug: str | None = None,
|
||||
country: str | None = None,
|
||||
language: str | None = None,
|
||||
sort_by: str = "impressions",
|
||||
sort_dir: str = "desc",
|
||||
limit: int = 100,
|
||||
) -> list[dict]:
|
||||
"""Per-article scorecard joining articles + search + analytics metrics.
|
||||
|
||||
Returns article metadata enriched with search and analytics data,
|
||||
plus attention flags for articles needing action.
|
||||
"""
|
||||
assert 1 <= date_range_days <= 730
|
||||
assert 1 <= limit <= 500
|
||||
assert sort_dir in ("asc", "desc")
|
||||
|
||||
# Allowlist sort columns to prevent SQL injection
|
||||
sort_columns = {
|
||||
"impressions", "clicks", "ctr", "position_avg",
|
||||
"pageviews", "title", "published_at",
|
||||
}
|
||||
if sort_by not in sort_columns:
|
||||
sort_by = "impressions"
|
||||
|
||||
cutoff = _date_cutoff(date_range_days)
|
||||
|
||||
wheres = ["a.status = 'published'"]
|
||||
params: list = [cutoff, cutoff]
|
||||
|
||||
if template_slug:
|
||||
wheres.append("a.template_slug = ?")
|
||||
params.append(template_slug)
|
||||
if country:
|
||||
wheres.append("a.country = ?")
|
||||
params.append(country)
|
||||
if language:
|
||||
wheres.append("a.language = ?")
|
||||
params.append(language)
|
||||
|
||||
where_clause = " AND ".join(wheres)
|
||||
params.append(limit)
|
||||
|
||||
rows = await fetch_all(
|
||||
f"""SELECT
|
||||
a.id,
|
||||
a.title,
|
||||
a.url_path,
|
||||
a.template_slug,
|
||||
a.country,
|
||||
a.language,
|
||||
a.published_at,
|
||||
COALESCE(s.impressions, 0) AS impressions,
|
||||
COALESCE(s.clicks, 0) AS clicks,
|
||||
COALESCE(s.ctr, 0) AS ctr,
|
||||
COALESCE(s.position_avg, 0) AS position_avg,
|
||||
COALESCE(u.pageviews, 0) AS pageviews,
|
||||
COALESCE(u.visitors, 0) AS visitors,
|
||||
u.bounce_rate,
|
||||
u.time_avg_seconds,
|
||||
-- Attention flags
|
||||
CASE WHEN COALESCE(s.impressions, 0) > 100
|
||||
AND COALESCE(s.ctr, 0) < 0.02
|
||||
THEN 1 ELSE 0 END AS flag_low_ctr,
|
||||
CASE WHEN COALESCE(s.clicks, 0) = 0
|
||||
AND a.published_at <= date('now', '-30 days')
|
||||
THEN 1 ELSE 0 END AS flag_no_clicks
|
||||
FROM articles a
|
||||
LEFT JOIN (
|
||||
SELECT page_url,
|
||||
SUM(impressions) AS impressions,
|
||||
SUM(clicks) AS clicks,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN CAST(SUM(clicks) AS REAL) / SUM(impressions)
|
||||
ELSE 0 END AS ctr,
|
||||
CASE WHEN SUM(impressions) > 0
|
||||
THEN SUM(position_avg * impressions) / SUM(impressions)
|
||||
ELSE 0 END AS position_avg
|
||||
FROM seo_search_metrics
|
||||
WHERE metric_date >= ?
|
||||
GROUP BY page_url
|
||||
) s ON s.page_url = a.url_path
|
||||
LEFT JOIN (
|
||||
SELECT page_url,
|
||||
SUM(pageviews) AS pageviews,
|
||||
SUM(visitors) AS visitors,
|
||||
AVG(bounce_rate) AS bounce_rate,
|
||||
AVG(time_avg_seconds) AS time_avg_seconds
|
||||
FROM seo_analytics_metrics
|
||||
WHERE metric_date >= ?
|
||||
GROUP BY page_url
|
||||
) u ON u.page_url = a.url_path
|
||||
WHERE {where_clause}
|
||||
ORDER BY {sort_by} {sort_dir}
|
||||
LIMIT ?""",
|
||||
tuple(params),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_sync_status() -> list[dict]:
|
||||
"""Last sync status for each source (gsc, bing, umami)."""
|
||||
rows = await fetch_all(
|
||||
"""SELECT source, status, rows_synced, error,
|
||||
started_at, completed_at, duration_ms
|
||||
FROM seo_sync_log
|
||||
WHERE id IN (
|
||||
SELECT MAX(id) FROM seo_sync_log GROUP BY source
|
||||
)
|
||||
ORDER BY source"""
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def cleanup_old_metrics(retention_days: int = 365) -> int:
|
||||
"""Delete metrics older than retention_days. Returns rows deleted."""
|
||||
assert 30 <= retention_days <= 1095
|
||||
|
||||
cutoff = _date_cutoff(retention_days)
|
||||
|
||||
deleted_search = await execute(
|
||||
"DELETE FROM seo_search_metrics WHERE metric_date < ?", (cutoff,)
|
||||
)
|
||||
deleted_analytics = await execute(
|
||||
"DELETE FROM seo_analytics_metrics WHERE metric_date < ?", (cutoff,)
|
||||
)
|
||||
# Sync log: keep 30 days
|
||||
sync_cutoff = _date_cutoff(30)
|
||||
deleted_sync = await execute(
|
||||
"DELETE FROM seo_sync_log WHERE started_at < ?", (sync_cutoff,)
|
||||
)
|
||||
|
||||
return (deleted_search or 0) + (deleted_analytics or 0) + (deleted_sync or 0)
|
||||
116
web/src/padelnomics/seo/_umami.py
Normal file
116
web/src/padelnomics/seo/_umami.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Umami analytics sync via REST API.
|
||||
|
||||
Uses bearer token auth. Self-hosted instance, no rate limits.
|
||||
Config already exists: UMAMI_API_URL, UMAMI_API_TOKEN, UMAMI_WEBSITE_ID.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import httpx
|
||||
|
||||
from ..core import config, execute
|
||||
|
||||
_TIMEOUT_SECONDS = 15
|
||||
|
||||
|
||||
async def sync_umami(days_back: int = 3, timeout_seconds: int = _TIMEOUT_SECONDS) -> int:
|
||||
"""Sync Umami per-URL metrics into seo_analytics_metrics. Returns rows synced."""
|
||||
assert 1 <= days_back <= 90, "days_back must be 1-90"
|
||||
assert 1 <= timeout_seconds <= 120, "timeout_seconds must be 1-120"
|
||||
|
||||
if not config.UMAMI_API_TOKEN or not config.UMAMI_API_URL:
|
||||
return 0 # Umami not configured — skip silently
|
||||
|
||||
started_at = datetime.utcnow()
|
||||
|
||||
try:
|
||||
rows_synced = 0
|
||||
headers = {"Authorization": f"Bearer {config.UMAMI_API_TOKEN}"}
|
||||
base = config.UMAMI_API_URL.rstrip("/")
|
||||
website_id = config.UMAMI_WEBSITE_ID
|
||||
|
||||
async with httpx.AsyncClient(timeout=timeout_seconds, headers=headers) as client:
|
||||
# Fetch per-URL metrics for each day individually
|
||||
# (Umami's metrics endpoint returns totals for the period,
|
||||
# so we query one day at a time for daily granularity)
|
||||
for day_offset in range(days_back):
|
||||
day = datetime.utcnow() - timedelta(days=day_offset + 1)
|
||||
metric_date = day.strftime("%Y-%m-%d")
|
||||
start_ms = int(day.replace(hour=0, minute=0, second=0).timestamp() * 1000)
|
||||
end_ms = int(day.replace(hour=23, minute=59, second=59).timestamp() * 1000)
|
||||
|
||||
# Get URL-level metrics
|
||||
response = await client.get(
|
||||
f"{base}/api/websites/{website_id}/metrics",
|
||||
params={
|
||||
"startAt": start_ms,
|
||||
"endAt": end_ms,
|
||||
"type": "url",
|
||||
"limit": 500,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
url_metrics = response.json()
|
||||
|
||||
if not isinstance(url_metrics, list):
|
||||
continue
|
||||
|
||||
for entry in url_metrics:
|
||||
page_url = entry.get("x", "")
|
||||
pageviews = entry.get("y", 0)
|
||||
|
||||
if not page_url:
|
||||
continue
|
||||
|
||||
await execute(
|
||||
"""INSERT OR REPLACE INTO seo_analytics_metrics
|
||||
(metric_date, page_url, pageviews, visitors,
|
||||
bounce_rate, time_avg_seconds)
|
||||
VALUES (?, ?, ?, 0, NULL, NULL)""",
|
||||
(metric_date, page_url, pageviews),
|
||||
)
|
||||
rows_synced += 1
|
||||
|
||||
# Try to get overall stats for bounce rate and visit duration
|
||||
# (Umami doesn't provide per-URL bounce rate, only site-wide)
|
||||
stats_response = await client.get(
|
||||
f"{base}/api/websites/{website_id}/stats",
|
||||
params={"startAt": start_ms, "endAt": end_ms},
|
||||
)
|
||||
if stats_response.status_code == 200:
|
||||
stats = stats_response.json()
|
||||
visitors = stats.get("visitors", {}).get("value", 0)
|
||||
bounce_rate = stats.get("bounces", {}).get("value", 0)
|
||||
total_time = stats.get("totaltime", {}).get("value", 0)
|
||||
page_count = stats.get("pageviews", {}).get("value", 1) or 1
|
||||
|
||||
# Store site-wide stats on the root URL for the day
|
||||
avg_time = int(total_time / max(visitors, 1))
|
||||
br = bounce_rate / max(visitors, 1) if visitors else 0
|
||||
|
||||
await execute(
|
||||
"""INSERT OR REPLACE INTO seo_analytics_metrics
|
||||
(metric_date, page_url, pageviews, visitors,
|
||||
bounce_rate, time_avg_seconds)
|
||||
VALUES (?, '/', ?, ?, ?, ?)""",
|
||||
(metric_date, page_count, visitors, br, avg_time),
|
||||
)
|
||||
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, started_at, completed_at, duration_ms)
|
||||
VALUES ('umami', 'success', ?, ?, ?, ?)""",
|
||||
(rows_synced, started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
return rows_synced
|
||||
|
||||
except Exception as exc:
|
||||
duration_ms = int((datetime.utcnow() - started_at).total_seconds() * 1000)
|
||||
await execute(
|
||||
"""INSERT INTO seo_sync_log
|
||||
(source, status, rows_synced, error, started_at, completed_at, duration_ms)
|
||||
VALUES ('umami', 'failed', 0, ?, ?, ?, ?)""",
|
||||
(str(exc), started_at.isoformat(), datetime.utcnow().isoformat(), duration_ms),
|
||||
)
|
||||
raise
|
||||
@@ -670,6 +670,45 @@ async def handle_cleanup_tasks(payload: dict) -> None:
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SEO Metrics Sync
|
||||
# =============================================================================
|
||||
|
||||
@task("sync_gsc")
|
||||
async def handle_sync_gsc(payload: dict) -> None:
|
||||
"""Sync Google Search Console data."""
|
||||
from .seo import sync_gsc
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_gsc(days_back=days_back)
|
||||
print(f"[WORKER] GSC sync complete: {rows} rows")
|
||||
|
||||
|
||||
@task("sync_bing")
|
||||
async def handle_sync_bing(payload: dict) -> None:
|
||||
"""Sync Bing Webmaster data."""
|
||||
from .seo import sync_bing
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_bing(days_back=days_back)
|
||||
print(f"[WORKER] Bing sync complete: {rows} rows")
|
||||
|
||||
|
||||
@task("sync_umami")
|
||||
async def handle_sync_umami(payload: dict) -> None:
|
||||
"""Sync Umami analytics data."""
|
||||
from .seo import sync_umami
|
||||
days_back = payload.get("days_back", 3)
|
||||
rows = await sync_umami(days_back=days_back)
|
||||
print(f"[WORKER] Umami sync complete: {rows} rows")
|
||||
|
||||
|
||||
@task("cleanup_seo_metrics")
|
||||
async def handle_cleanup_seo_metrics(payload: dict) -> None:
|
||||
"""Delete SEO metrics older than 12 months."""
|
||||
from .seo import cleanup_old_metrics
|
||||
deleted = await cleanup_old_metrics(retention_days=365)
|
||||
print(f"[WORKER] Cleaned up {deleted} old SEO metric rows")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Worker Loop
|
||||
# =============================================================================
|
||||
@@ -723,6 +762,7 @@ async def run_scheduler() -> None:
|
||||
await init_db()
|
||||
|
||||
last_credit_refill = None
|
||||
last_seo_sync_date = None
|
||||
|
||||
while True:
|
||||
try:
|
||||
@@ -741,6 +781,17 @@ async def run_scheduler() -> None:
|
||||
last_credit_refill = this_month
|
||||
print(f"[SCHEDULER] Queued monthly credit refill for {this_month}")
|
||||
|
||||
# Daily SEO metrics sync — run once per day after 6am UTC
|
||||
# (GSC data has ~2 day delay, syncing at 6am ensures data is ready)
|
||||
today_date = today.strftime("%Y-%m-%d")
|
||||
if last_seo_sync_date != today_date and today.hour >= 6:
|
||||
await enqueue("sync_gsc")
|
||||
await enqueue("sync_bing")
|
||||
await enqueue("sync_umami")
|
||||
await enqueue("cleanup_seo_metrics")
|
||||
last_seo_sync_date = today_date
|
||||
print(f"[SCHEDULER] Queued SEO metric syncs for {today_date}")
|
||||
|
||||
await asyncio.sleep(3600) # 1 hour
|
||||
|
||||
except Exception as e:
|
||||
|
||||
523
web/tests/test_seo.py
Normal file
523
web/tests/test_seo.py
Normal file
@@ -0,0 +1,523 @@
|
||||
"""Tests for the SEO metrics module: queries, sync functions, admin routes."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from padelnomics.seo._queries import (
|
||||
cleanup_old_metrics,
|
||||
get_article_scorecard,
|
||||
get_country_breakdown,
|
||||
get_device_breakdown,
|
||||
get_funnel_metrics,
|
||||
get_search_performance,
|
||||
get_sync_status,
|
||||
get_top_pages,
|
||||
get_top_queries,
|
||||
)
|
||||
|
||||
from padelnomics import core
|
||||
|
||||
# ── Fixtures ──────────────────────────────────────────────────
|
||||
|
||||
def _today():
|
||||
return datetime.utcnow().strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def _days_ago(n: int) -> str:
|
||||
return (datetime.utcnow() - timedelta(days=n)).strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def seo_data(db):
|
||||
"""Populate seo_search_metrics and seo_analytics_metrics with sample data."""
|
||||
today = _today()
|
||||
yesterday = _days_ago(1)
|
||||
|
||||
# GSC search data
|
||||
rows = [
|
||||
("gsc", today, "/en/markets/germany/berlin", "padel berlin", "de", "mobile", 50, 500, 0.10, 5.2),
|
||||
("gsc", today, "/en/markets/germany/munich", "padel munich", "de", "desktop", 30, 300, 0.10, 8.1),
|
||||
("gsc", today, "/en/markets/germany/berlin", "padel court cost", "de", "desktop", 10, 200, 0.05, 12.0),
|
||||
("gsc", yesterday, "/en/markets/germany/berlin", "padel berlin", "de", "mobile", 45, 480, 0.09, 5.5),
|
||||
# Bing data
|
||||
("bing", today, "/", "padel business plan", None, None, 5, 100, 0.05, 15.0),
|
||||
]
|
||||
for source, d, page, query, country, device, clicks, imp, ctr, pos in rows:
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_search_metrics
|
||||
(source, metric_date, page_url, query, country, device,
|
||||
clicks, impressions, ctr, position_avg)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(source, d, page, query, country, device, clicks, imp, ctr, pos),
|
||||
)
|
||||
|
||||
# Umami analytics data
|
||||
analytics_rows = [
|
||||
(today, "/en/markets/germany/berlin", 120, 80, 0.35, 45),
|
||||
(today, "/en/markets/germany/munich", 60, 40, 0.40, 30),
|
||||
(today, "/", 200, 150, 0.50, 20),
|
||||
]
|
||||
for d, page, pv, vis, br, t in analytics_rows:
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_analytics_metrics
|
||||
(metric_date, page_url, pageviews, visitors, bounce_rate, time_avg_seconds)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(d, page, pv, vis, br, t),
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def articles_data(db, seo_data):
|
||||
"""Create articles that match the SEO data URLs."""
|
||||
now = datetime.utcnow().isoformat()
|
||||
pub = _days_ago(10)
|
||||
|
||||
for title, url, tpl, lang in [
|
||||
("Padel in Berlin", "/en/markets/germany/berlin", "city-cost-de", "en"),
|
||||
("Padel in Munich", "/en/markets/germany/munich", "city-cost-de", "en"),
|
||||
]:
|
||||
await db.execute(
|
||||
"""INSERT INTO articles
|
||||
(url_path, slug, title, template_slug, language, status, published_at, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, 'published', ?, ?)""",
|
||||
(url, url.split("/")[-1], title, tpl, lang, pub, now),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def admin_client(app, db):
|
||||
"""Authenticated admin client."""
|
||||
now = datetime.utcnow().isoformat()
|
||||
async with db.execute(
|
||||
"INSERT INTO users (email, name, created_at) VALUES (?, ?, ?)",
|
||||
("admin@test.com", "Admin", now),
|
||||
) as cursor:
|
||||
admin_id = cursor.lastrowid
|
||||
await db.execute(
|
||||
"INSERT INTO user_roles (user_id, role) VALUES (?, 'admin')", (admin_id,)
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
async with app.test_client() as c:
|
||||
async with c.session_transaction() as sess:
|
||||
sess["user_id"] = admin_id
|
||||
yield c
|
||||
|
||||
|
||||
# ── Query function tests ─────────────────────────────────────
|
||||
|
||||
class TestSearchPerformance:
|
||||
"""Tests for get_search_performance()."""
|
||||
|
||||
async def test_returns_aggregate_metrics(self, db, seo_data):
|
||||
result = await get_search_performance(date_range_days=28)
|
||||
assert result["total_clicks"] > 0
|
||||
assert result["total_impressions"] > 0
|
||||
assert 0 < result["avg_ctr"] < 1
|
||||
assert result["avg_position"] > 0
|
||||
|
||||
async def test_filter_by_source(self, db, seo_data):
|
||||
gsc = await get_search_performance(date_range_days=28, source="gsc")
|
||||
bing = await get_search_performance(date_range_days=28, source="bing")
|
||||
combined = await get_search_performance(date_range_days=28)
|
||||
assert gsc["total_clicks"] + bing["total_clicks"] == combined["total_clicks"]
|
||||
|
||||
async def test_empty_data(self, db):
|
||||
result = await get_search_performance(date_range_days=28)
|
||||
assert result["total_clicks"] == 0
|
||||
assert result["total_impressions"] == 0
|
||||
|
||||
async def test_date_range_filter(self, db, seo_data):
|
||||
# Only today's data should match 1-day range
|
||||
result = await get_search_performance(date_range_days=1)
|
||||
# Yesterday's data excluded — fewer total clicks
|
||||
full = await get_search_performance(date_range_days=28)
|
||||
assert result["total_clicks"] <= full["total_clicks"]
|
||||
|
||||
|
||||
class TestTopQueries:
|
||||
"""Tests for get_top_queries()."""
|
||||
|
||||
async def test_returns_queries_sorted_by_impressions(self, db, seo_data):
|
||||
queries = await get_top_queries(date_range_days=28)
|
||||
assert len(queries) > 0
|
||||
# Should be sorted desc by impressions
|
||||
for i in range(len(queries) - 1):
|
||||
assert queries[i]["impressions"] >= queries[i + 1]["impressions"]
|
||||
|
||||
async def test_limit(self, db, seo_data):
|
||||
queries = await get_top_queries(date_range_days=28, limit=2)
|
||||
assert len(queries) <= 2
|
||||
|
||||
async def test_filter_by_source(self, db, seo_data):
|
||||
gsc_queries = await get_top_queries(source="gsc")
|
||||
for q in gsc_queries:
|
||||
assert q["query"] != "padel business plan" # that's bing data
|
||||
|
||||
|
||||
class TestTopPages:
|
||||
"""Tests for get_top_pages()."""
|
||||
|
||||
async def test_returns_pages(self, db, seo_data):
|
||||
pages = await get_top_pages(date_range_days=28)
|
||||
assert len(pages) > 0
|
||||
# Berlin page should be first (most impressions)
|
||||
assert pages[0]["page_url"] == "/en/markets/germany/berlin"
|
||||
|
||||
|
||||
class TestCountryBreakdown:
|
||||
"""Tests for get_country_breakdown()."""
|
||||
|
||||
async def test_returns_countries(self, db, seo_data):
|
||||
countries = await get_country_breakdown(date_range_days=28)
|
||||
assert len(countries) > 0
|
||||
assert any(c["country"] == "de" for c in countries)
|
||||
|
||||
|
||||
class TestDeviceBreakdown:
|
||||
"""Tests for get_device_breakdown()."""
|
||||
|
||||
async def test_returns_devices_gsc_only(self, db, seo_data):
|
||||
devices = await get_device_breakdown(date_range_days=28)
|
||||
assert len(devices) > 0
|
||||
device_names = [d["device"] for d in devices]
|
||||
assert "mobile" in device_names
|
||||
assert "desktop" in device_names
|
||||
|
||||
|
||||
class TestFunnelMetrics:
|
||||
"""Tests for get_funnel_metrics()."""
|
||||
|
||||
async def test_returns_all_stages(self, db, seo_data):
|
||||
funnel = await get_funnel_metrics(date_range_days=28)
|
||||
assert "impressions" in funnel
|
||||
assert "clicks" in funnel
|
||||
assert "pageviews" in funnel
|
||||
assert "visitors" in funnel
|
||||
assert "planner_users" in funnel
|
||||
assert "leads" in funnel
|
||||
|
||||
async def test_conversion_rates(self, db, seo_data):
|
||||
funnel = await get_funnel_metrics(date_range_days=28)
|
||||
assert funnel["ctr"] > 0 # We have clicks and impressions
|
||||
assert 0 <= funnel["ctr"] <= 1
|
||||
|
||||
async def test_empty_data(self, db):
|
||||
funnel = await get_funnel_metrics(date_range_days=28)
|
||||
assert funnel["impressions"] == 0
|
||||
assert funnel["planner_users"] == 0
|
||||
|
||||
|
||||
class TestArticleScorecard:
|
||||
"""Tests for get_article_scorecard()."""
|
||||
|
||||
async def test_joins_articles_with_metrics(self, db, articles_data):
|
||||
scorecard = await get_article_scorecard(date_range_days=28)
|
||||
assert len(scorecard) == 2
|
||||
berlin = next(a for a in scorecard if "berlin" in a["url_path"])
|
||||
assert berlin["impressions"] > 0
|
||||
assert berlin["pageviews"] > 0
|
||||
|
||||
async def test_filter_by_template(self, db, articles_data):
|
||||
scorecard = await get_article_scorecard(
|
||||
date_range_days=28, template_slug="city-cost-de",
|
||||
)
|
||||
assert len(scorecard) == 2
|
||||
for a in scorecard:
|
||||
assert a["template_slug"] == "city-cost-de"
|
||||
|
||||
async def test_sort_by_clicks(self, db, articles_data):
|
||||
scorecard = await get_article_scorecard(
|
||||
date_range_days=28, sort_by="clicks", sort_dir="desc",
|
||||
)
|
||||
if len(scorecard) >= 2:
|
||||
assert scorecard[0]["clicks"] >= scorecard[1]["clicks"]
|
||||
|
||||
async def test_attention_flags(self, db, articles_data):
|
||||
"""Berlin has >100 impressions and low CTR — should flag."""
|
||||
scorecard = await get_article_scorecard(date_range_days=28)
|
||||
berlin = next(a for a in scorecard if "berlin" in a["url_path"])
|
||||
# Berlin: 1180 impressions total, 105 clicks → CTR ~8.9% → no flag
|
||||
# Flags depend on actual data; just check fields exist
|
||||
assert "flag_low_ctr" in berlin
|
||||
assert "flag_no_clicks" in berlin
|
||||
|
||||
async def test_invalid_sort_defaults_to_impressions(self, db, articles_data):
|
||||
scorecard = await get_article_scorecard(
|
||||
date_range_days=28, sort_by="invalid_column",
|
||||
)
|
||||
# Should not crash — falls back to impressions
|
||||
assert len(scorecard) >= 0
|
||||
|
||||
|
||||
class TestSyncStatus:
|
||||
"""Tests for get_sync_status()."""
|
||||
|
||||
async def test_returns_last_sync_per_source(self, db):
|
||||
now = datetime.utcnow().isoformat()
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms)
|
||||
VALUES ('gsc', 'success', 100, ?, ?, 500)""",
|
||||
(now, now),
|
||||
)
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_sync_log (source, status, rows_synced, started_at, completed_at, duration_ms)
|
||||
VALUES ('umami', 'failed', 0, ?, ?, 200)""",
|
||||
(now, now),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
status = await get_sync_status()
|
||||
assert len(status) == 2
|
||||
sources = {s["source"] for s in status}
|
||||
assert "gsc" in sources
|
||||
assert "umami" in sources
|
||||
|
||||
async def test_empty_when_no_syncs(self, db):
|
||||
status = await get_sync_status()
|
||||
assert status == []
|
||||
|
||||
|
||||
class TestCleanupOldMetrics:
|
||||
"""Tests for cleanup_old_metrics()."""
|
||||
|
||||
async def test_deletes_old_data(self, db):
|
||||
old_date = (datetime.utcnow() - timedelta(days=400)).strftime("%Y-%m-%d")
|
||||
recent_date = _today()
|
||||
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_search_metrics
|
||||
(source, metric_date, page_url, clicks, impressions)
|
||||
VALUES ('gsc', ?, '/old', 1, 10)""",
|
||||
(old_date,),
|
||||
)
|
||||
await db.execute(
|
||||
"""INSERT INTO seo_search_metrics
|
||||
(source, metric_date, page_url, clicks, impressions)
|
||||
VALUES ('gsc', ?, '/recent', 1, 10)""",
|
||||
(recent_date,),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
deleted = await cleanup_old_metrics(retention_days=365)
|
||||
assert deleted >= 1
|
||||
|
||||
rows = await core.fetch_all("SELECT * FROM seo_search_metrics")
|
||||
assert len(rows) == 1
|
||||
assert rows[0]["page_url"] == "/recent"
|
||||
|
||||
|
||||
# ── Sync function tests (mocked HTTP) ────────────────────────
|
||||
|
||||
class TestSyncUmami:
|
||||
"""Tests for sync_umami() with mocked HTTP."""
|
||||
|
||||
async def test_skips_when_not_configured(self, db):
|
||||
original = core.config.UMAMI_API_TOKEN
|
||||
core.config.UMAMI_API_TOKEN = ""
|
||||
try:
|
||||
from padelnomics.seo._umami import sync_umami
|
||||
result = await sync_umami(days_back=1)
|
||||
assert result == 0
|
||||
finally:
|
||||
core.config.UMAMI_API_TOKEN = original
|
||||
|
||||
async def test_syncs_url_metrics(self, db):
|
||||
from padelnomics.seo._umami import sync_umami
|
||||
|
||||
core.config.UMAMI_API_TOKEN = "test-token"
|
||||
core.config.UMAMI_API_URL = "https://umami.test.io"
|
||||
|
||||
mock_metrics = [
|
||||
{"x": "/en/markets/germany/berlin", "y": 50},
|
||||
{"x": "/en/markets/germany/munich", "y": 30},
|
||||
]
|
||||
mock_stats = {
|
||||
"visitors": {"value": 100},
|
||||
"bounces": {"value": 30},
|
||||
"totaltime": {"value": 5000},
|
||||
"pageviews": {"value": 200},
|
||||
}
|
||||
|
||||
mock_response_metrics = MagicMock()
|
||||
mock_response_metrics.status_code = 200
|
||||
mock_response_metrics.json.return_value = mock_metrics
|
||||
mock_response_metrics.raise_for_status = MagicMock()
|
||||
|
||||
mock_response_stats = MagicMock()
|
||||
mock_response_stats.status_code = 200
|
||||
mock_response_stats.json.return_value = mock_stats
|
||||
mock_response_stats.raise_for_status = MagicMock()
|
||||
|
||||
async def mock_get(url, **kwargs):
|
||||
if "/metrics" in url:
|
||||
return mock_response_metrics
|
||||
return mock_response_stats
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get = mock_get
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
with patch("padelnomics.seo._umami.httpx.AsyncClient", return_value=mock_client):
|
||||
result = await sync_umami(days_back=1)
|
||||
|
||||
assert result == 2 # 2 URL metrics
|
||||
|
||||
# Verify data stored
|
||||
rows = await core.fetch_all(
|
||||
"SELECT * FROM seo_analytics_metrics WHERE page_url != '/'"
|
||||
)
|
||||
assert len(rows) == 2
|
||||
|
||||
# Verify sync log
|
||||
log = await core.fetch_all("SELECT * FROM seo_sync_log WHERE source = 'umami'")
|
||||
assert len(log) == 1
|
||||
assert log[0]["status"] == "success"
|
||||
|
||||
|
||||
class TestSyncBing:
|
||||
"""Tests for sync_bing() with mocked HTTP."""
|
||||
|
||||
async def test_skips_when_not_configured(self, db):
|
||||
original_key = core.config.BING_WEBMASTER_API_KEY
|
||||
core.config.BING_WEBMASTER_API_KEY = ""
|
||||
try:
|
||||
from padelnomics.seo._bing import sync_bing
|
||||
result = await sync_bing(days_back=1)
|
||||
assert result == 0
|
||||
finally:
|
||||
core.config.BING_WEBMASTER_API_KEY = original_key
|
||||
|
||||
|
||||
class TestSyncGsc:
|
||||
"""Tests for sync_gsc() with mocked Google API."""
|
||||
|
||||
async def test_skips_when_not_configured(self, db):
|
||||
original = core.config.GSC_SERVICE_ACCOUNT_PATH
|
||||
core.config.GSC_SERVICE_ACCOUNT_PATH = ""
|
||||
try:
|
||||
from padelnomics.seo._gsc import sync_gsc
|
||||
result = await sync_gsc(days_back=1)
|
||||
assert result == 0
|
||||
finally:
|
||||
core.config.GSC_SERVICE_ACCOUNT_PATH = original
|
||||
|
||||
|
||||
# ── Admin route tests ─────────────────────────────────────────
|
||||
|
||||
class TestSeoAdminRoutes:
|
||||
"""Tests for the SEO hub admin routes."""
|
||||
|
||||
async def test_seo_hub_loads(self, admin_client, db):
|
||||
resp = await admin_client.get("/admin/seo")
|
||||
assert resp.status_code == 200
|
||||
text = await resp.get_data(as_text=True)
|
||||
assert "SEO" in text
|
||||
|
||||
async def test_seo_hub_with_data(self, admin_client, db, seo_data):
|
||||
resp = await admin_client.get("/admin/seo?days=28")
|
||||
assert resp.status_code == 200
|
||||
|
||||
async def test_seo_search_partial(self, admin_client, db, seo_data):
|
||||
resp = await admin_client.get("/admin/seo/search?days=28")
|
||||
assert resp.status_code == 200
|
||||
text = await resp.get_data(as_text=True)
|
||||
assert "Top Queries" in text
|
||||
|
||||
async def test_seo_search_filter_by_source(self, admin_client, db, seo_data):
|
||||
resp = await admin_client.get("/admin/seo/search?days=28&source=gsc")
|
||||
assert resp.status_code == 200
|
||||
|
||||
async def test_seo_funnel_partial(self, admin_client, db, seo_data):
|
||||
resp = await admin_client.get("/admin/seo/funnel?days=28")
|
||||
assert resp.status_code == 200
|
||||
text = await resp.get_data(as_text=True)
|
||||
assert "Impressions" in text
|
||||
|
||||
async def test_seo_scorecard_partial(self, admin_client, db, articles_data):
|
||||
resp = await admin_client.get("/admin/seo/scorecard?days=28")
|
||||
assert resp.status_code == 200
|
||||
text = await resp.get_data(as_text=True)
|
||||
assert "Berlin" in text or "scorecard" in text.lower() or "articles" in text.lower()
|
||||
|
||||
async def test_seo_scorecard_filter(self, admin_client, db, articles_data):
|
||||
resp = await admin_client.get(
|
||||
"/admin/seo/scorecard?days=28&template_slug=city-cost-de&sort=clicks&dir=desc"
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
|
||||
async def test_seo_sync_requires_auth(self, client, db):
|
||||
resp = await client.post("/admin/seo/sync")
|
||||
# Should redirect to login (302) or return 403
|
||||
assert resp.status_code in (302, 403)
|
||||
|
||||
async def test_seo_sync_now(self, admin_client, db):
|
||||
"""Sync Now enqueues tasks."""
|
||||
async with admin_client.session_transaction() as sess:
|
||||
sess["csrf_token"] = "test"
|
||||
|
||||
resp = await admin_client.post(
|
||||
"/admin/seo/sync",
|
||||
form={"source": "all", "csrf_token": "test"},
|
||||
)
|
||||
# Should redirect back to SEO hub
|
||||
assert resp.status_code == 302
|
||||
|
||||
# Verify tasks enqueued
|
||||
tasks = await core.fetch_all(
|
||||
"SELECT task_name FROM tasks WHERE task_name LIKE 'sync_%'"
|
||||
)
|
||||
task_names = {t["task_name"] for t in tasks}
|
||||
assert "sync_gsc" in task_names
|
||||
assert "sync_bing" in task_names
|
||||
assert "sync_umami" in task_names
|
||||
|
||||
async def test_seo_sync_single_source(self, admin_client, db):
|
||||
async with admin_client.session_transaction() as sess:
|
||||
sess["csrf_token"] = "test"
|
||||
|
||||
resp = await admin_client.post(
|
||||
"/admin/seo/sync",
|
||||
form={"source": "gsc", "csrf_token": "test"},
|
||||
)
|
||||
assert resp.status_code == 302
|
||||
|
||||
tasks = await core.fetch_all("SELECT task_name FROM tasks WHERE task_name = 'sync_gsc'")
|
||||
assert len(tasks) == 1
|
||||
|
||||
async def test_seo_hub_date_range(self, admin_client, db, seo_data):
|
||||
for days in [7, 28, 90, 365]:
|
||||
resp = await admin_client.get(f"/admin/seo?days={days}")
|
||||
assert resp.status_code == 200
|
||||
|
||||
async def test_seo_sidebar_link(self, admin_client, db):
|
||||
resp = await admin_client.get("/admin/")
|
||||
text = await resp.get_data(as_text=True)
|
||||
assert "SEO Hub" in text
|
||||
|
||||
|
||||
# ── Assertion boundary tests ─────────────────────────────────
|
||||
|
||||
class TestQueryBounds:
|
||||
"""Test that query functions validate their bounds."""
|
||||
|
||||
async def test_search_performance_rejects_zero_days(self, db):
|
||||
with pytest.raises(AssertionError):
|
||||
await get_search_performance(date_range_days=0)
|
||||
|
||||
async def test_top_queries_rejects_zero_limit(self, db):
|
||||
with pytest.raises(AssertionError):
|
||||
await get_top_queries(limit=0)
|
||||
|
||||
async def test_cleanup_rejects_short_retention(self, db):
|
||||
with pytest.raises(AssertionError):
|
||||
await cleanup_old_metrics(retention_days=7)
|
||||
|
||||
async def test_scorecard_rejects_invalid_sort_dir(self, db):
|
||||
with pytest.raises(AssertionError):
|
||||
await get_article_scorecard(sort_dir="invalid")
|
||||
Reference in New Issue
Block a user