Compare commits
41 Commits
v4
...
v202602282
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0c3f38c0a | ||
|
|
d1a10ff243 | ||
|
|
34065fa2ac | ||
|
|
b7e44ac5b3 | ||
|
|
c2dfefcc1e | ||
|
|
4b5c237bee | ||
|
|
8c4a4078f9 | ||
|
|
4ac17af503 | ||
|
|
0984657e72 | ||
|
|
c1e1f42aad | ||
|
|
24ec7060b3 | ||
|
|
5c22ea9780 | ||
|
|
aee3733b49 | ||
|
|
51d9aab4a0 | ||
|
|
1fdd2d07a4 | ||
|
|
2214d7a58f | ||
|
|
0f360fd230 | ||
|
|
85b6aa0d0a | ||
|
|
bc7e40b531 | ||
|
|
ef85d3bb36 | ||
|
|
4d45b99cd8 | ||
|
|
e62aad148b | ||
|
|
b5db9d16b9 | ||
|
|
2e149fc1db | ||
|
|
6fb1e990e3 | ||
|
|
6edf8ba65e | ||
|
|
ed0a578050 | ||
|
|
c1cdeec6be | ||
|
|
710624f417 | ||
|
|
6cf98f44d4 | ||
|
|
60659a5ec5 | ||
|
|
beb4195f16 | ||
|
|
88cc857f3a | ||
|
|
9116625884 | ||
|
|
1af65bb46f | ||
|
|
9b0bfc478d | ||
|
|
adf22924f6 | ||
|
|
09665b7786 | ||
|
|
93349923bd | ||
|
|
642041b32b | ||
|
|
bb70a5372b |
@@ -56,9 +56,10 @@ WORKFLOWS_PATH=ENC[AES256_GCM,data:PehxEUMb1K3F1557BY3IqKD7sbJcoaIjnQvboBRJ1g==,
|
||||
ALERT_WEBHOOK_URL=
|
||||
NTFY_TOKEN=
|
||||
#ENC[AES256_GCM,data:BCyQYjRnTx8yW9A=,iv:4OPCP+xzRLUJrpoFewVnbZRKnZH4sAbV76SM//2k5wU=,tag:HxwEp7VFVZUN/VjPiL/+Vw==,type:comment]
|
||||
PROXY_URLS=ENC[AES256_GCM,data:CzRaK0piUQfvuYYsdz0i2MEQIphKi0BhNvHw9alo46aTH+kqEKvoS7dKEKzyU9VJ4TyNweInlVMxB962DsvRoBtnHwo/pUmYtVeEr2881clNgEiZVYRDFRdEbpULcLPDJa3ey1leqAAHlmiL0RQ6Qa57gPCOVBzVG6npGLKO+K8XVIb+BZMs9kEUOlw7iuqTJW5xPN/t4X/jHidEqfTSAl9b4vU4bsYVuY3yQrL+/V5QpTbyXlf+cMq3flpA3zE2Fxhalzg+c/wHMTrCksFwrCkrInW0kY9yPkA7usUWr1xwwaV3wIDoNQsLXpMd/3RztipNvKtOMRhRJOmjzP7BKhCJvvvKTV5p+mBCulFijbMQgArg3BqcFanfw3YZ4wPd4hp8q/vOhE/U9Wu0yrMmyWYFHYGQnFVARlBH7pwn/ez8W4KqRFveEAuev9CE7K7s5RqzPLelSkoa9UuiiULJ+t0LFgKlgxuLtQ8GdFdgsmBCxY/4U/xzvNdC82hD549z5nMWWlaUJm4onPWirT/RYm7j3v6z4mmNImI2W6rCNbvEvsXwWsciquVaBIgReA47p6/GTzZ9VZMyGr4PdzB87BJGAgX1W57WNdPAsRIF49XP2BU72RtRFxsUG8Ha2dc=,iv:a10Vpk7Zv8QqORuEcMlpcvtHO/zjBLaFphWPYBXwysc=,tag:8N66/R+CLqEZ45wj+tCt6w==,type:str]
|
||||
RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:YWM=,iv:iY5+uMazLAFdwyLT7Gr7MaF1QHBIgHuoi6nF2VbSsOA=,tag:dc6AmuJdTQ55gVe16uzs6A==,type:str]
|
||||
PROXY_URLS_FALLBACK=ENC[AES256_GCM,data:95rwI7kKUj1YxLpjChtrM4f2EFUDzQdAg1e1MOHnLwQ9ZY54UNH7v4JcqTsvDk9D+0N/BIdwFSDi7pnCSd6BWFV+cQ==,iv:rm9HdBsibSne7JR6vWl+ao/GHb1rbuVdZZDUWhVbTnE=,tag:NJ2STxmFZPvFayfTrEEYbg==,type:str]
|
||||
PROXY_URLS_RESIDENTIAL=ENC[AES256_GCM,data:lfmlsjXFtL+zo40SNFLiFKaZiYvE7CNH+zRwjMK5pqPfCs0TlMX+Y9e1KmzAS+y/cI69TP5sgMPRBzER0Jn7RvH0KA==,iv:jBN/4/K5L5886G4rSzxt8V8u/57tAuj3R76haltzqeU=,tag:Xe6o9eg2PodfktDqmLgVNA==,type:str]
|
||||
PROXY_URLS_DATACENTER=ENC[AES256_GCM,data:X6xpxz5u8Xh3OXjkIz3UwqH847qLvY9cVWVktW5B+lqhmXAKTzoTzHds8vlRGJf5Up9Yx44XcigbvuK33ZJDSq9ovkAIbY55OK4=,iv:3hHyFD+H9HMzQ/27bPjGr59+7yWmEneUdN9XPQasCig=,tag:oBXsSuV5idB7HqNrNOruwg==,type:str]
|
||||
WEBSHARE_DOWNLOAD_URL=ENC[AES256_GCM,data:1D9VRZ3MCXPQWfiMH8+CLcrxeYnVVcQgZDvt5kltvbSTuSHQ2hHDmZpBkTOMIBJnw4JLZ2JQKHgG4OaYDtsM2VltFPnfwaRgVI9G5PSenR3o4PeQmYO1AqWOmjn19jPxNXRhEXdupP9UT+xQNXoBJsl6RR20XOpMA5AipUHmSjD0UIKXoZLU,iv:uWUkAydac//qrOTPUThuOLKAKXK4xcZmK9qBVFwpqt4=,tag:1vYhukBW9kEuSXCLAiZZmQ==,type:str]
|
||||
CIRCUIT_BREAKER_THRESHOLD=
|
||||
#ENC[AES256_GCM,data:ZcX/OEbrMfKizIQYq3CYGnvzeTEX7KsmQaz2+Jj1rG5tbTy2aljQBIEkjtiwuo8NsNAD+FhIGRGVfBmKe1CAKME1MuiCbgSG,iv:4BSkeD3jZFawP09qECcqyuiWcDnCNSgbIjBATYhazq4=,tag:Ep1d2Uk700MOlWcLWaQ/ig==,type:comment]
|
||||
GSC_SERVICE_ACCOUNT_PATH=
|
||||
@@ -70,7 +71,7 @@ GEONAMES_USERNAME=ENC[AES256_GCM,data:aSkVdLNrhiF6tlg=,iv:eemFGwDIv3EG/P3lVHGZj9
|
||||
CENSUS_API_KEY=ENC[AES256_GCM,data:qqG971573aGq9MiHI2xLlanKKFwjfcNNoMXtm8LNbyh0rMbQN2XukQ==,iv:az2i0ldH75nHGah4DeOxaXmDbVYqmC1c77ptZqFA9BI=,tag:zoDdKj9bR7fgIDo1/dEU2g==,type:str]
|
||||
sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxNWNmUzVNUGdWRnE0ZFpF\nM0JQZWZ3UDdEVzlwTmIxakxOZXBkT2x2ZlNrClRtV2M3S2daSGxUZmFDSWQ2Nmh4\neU51QndFcUxlSE00RFovOVJTcDZmUUUKLS0tIDcvL3hRMDRoMWZZSXljNzA3WG5o\nMWFic21MV0krMzlIaldBTVU0ZDdlTE0K7euGQtA+9lHNws+x7TMCArZamm9att96\nL8cXoUDWe5fNI5+M1bXReqVfNwPTwZsV6j/+ZtYKybklIzWz02Ex4A==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a
|
||||
sops_lastmodified=2026-02-26T14:31:14Z
|
||||
sops_mac=ENC[AES256_GCM,data:iqFuTexTS9U/Nv8xoTpHljTNQTGX9ITcJ3AjhDEtxrh0Z9/lngfBvGtjiKmpwFGlobQw/x+/YLM+u3MhciwXF7qNwFfJ/StN2Y66uF71SxWotbL70Dxl4oWSVL3sU+2NYbw5yP0p+xCbE+rEd5SqAe6K5yyq5X25hz8fIapxlYA=,iv:foqoWQVMipuOAQ0Kp799PaIhCIrxV8T5cC811wIzxR8=,tag:yNfxSV3R21XEXksjmdsKBw==,type:str]
|
||||
sops_lastmodified=2026-02-28T15:50:46Z
|
||||
sops_mac=ENC[AES256_GCM,data:HiLZTLa+p3mqa4hw+tKOK27F/bsJOy4jmDi8MHToi6S7tRfBA/TzcEzXvXUIkkwAixN73NQHvBVeRnbcEsApVpkaxH1OqnjvvyT+B3YFkTEtxczaKGWlCvbqFZNmXYsFvGR9njaWYWsTQPkRIjrroXrSrhr7uxC8F40v7ByxJKo=,iv:qj2IpzWRIh/mM1HtjjkNbyFuhtORKXslVnf/vdEC9Uw=,tag:fr9CZsL74HxRJLXn9eS0xQ==,type:str]
|
||||
sops_unencrypted_suffix=_unencrypted
|
||||
sops_version=3.12.1
|
||||
|
||||
@@ -43,7 +43,10 @@ ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:4sXQk8zklruC525J279TUUatdDJQ43qweuoPhtpI82
|
||||
NTFY_TOKEN=ENC[AES256_GCM,data:YlOxhsRJ8P1y4kk6ugWm41iyRCsM6oAWjvbU9lGcD0A=,iv:JZXOvi3wTOPV9A46c7fMiqbszNCvXkOgh9i/H1hob24=,tag:8xnPimgy7sesOAnxhaXmpg==,type:str]
|
||||
SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:mg==,iv:KgqMVYj12FjOzWxtA1T0r0pqCDJ6MtHzMjE+4W/W+s4=,tag:czFaOqhHG8nqrQ8AZ8QiGw==,type:str]
|
||||
#ENC[AES256_GCM,data:hzAZvCWc4RTk290=,iv:RsSI4OpAOQGcFVpfXDZ6t705yWmlO0JEWwWF5uQu9As=,tag:UPqFtA2tXiSa0vzJAv8qXg==,type:comment]
|
||||
PROXY_URLS=ENC[AES256_GCM,data:nm4B++SkZZgN3p2xru3WrpVA0X6O8yvb45tH/ovF4006zBy28xqVxbsd44Mz6b5FMinjOXRmGwoI/GDWmdJLzBYdpryQ/FhpbzSUpr1ZOjOz+7P0vn2jfBGAB8ksU3i5kuYglud3EyQGFL+v+uooxwrIUCjfzmmB4vCmf7phssKDsK1CqzmdZ1c54ehSu4bRRdmGp9d0+r+j1SpXb/JbZ8LTqUIhLlZXrHFqkCfN1czhFK9IwMVgR00Q4v2YkjaRBME4lVqwk1NwwatbS9Fq8LlzwuT1uKk+T6ZDkFKC8ZoPW1YRqF13X7hFGFXCNRqABRDZ45lqxYQbBoRrWmH2tfMiAmTrIuRsdPM8bZ/Ol5mXSDhs0HyWX2urX+LD65rIOO0zN/lwjXSwh5mwwBdB61akdzsWRyLZsdafuQUmgGul8y0eGMEbFWaty3bdrtAmqtsvHwxD/Dp/gQWScESXvPd1arn55zaXmefOy+ZLwcmx+FAJPpTMXRaq6Y/Z+D1PZZ+Uhu2D6tsAR4VvqqwlUgpsrAFXk6chJzOry8rmmxoMuIj9mXfjG+BqPFhV2oQsKSuIqFQqd/ZidJLO8ZSxA7L+h1eH4cQjcUd2nfzroG8nnKZ+cA8hQMfLuFiMY1I=,iv:nTaNQlC3px/lnodLphnILWbPVnelaUKKOZAFAaHi8MU=,tag:TYkIX1nrc+PKbvvnWYcvbg==,type:str]
|
||||
PROXY_URLS_RESIDENTIAL=ENC[AES256_GCM,data:x/F0toXDc8stsUNxaepCmxq1+WuacqqPtdc+R5mxTwcAzsKxCdwt8KpBZWMvz7ku4tHDGsKD949QAX2ANXP9oCMTgW0=,iv:6G9gE9/v7GaYj8aqVTmMrpw6AcQK9yMSCAohNdAD1Ws=,tag:2Jimr1ldVSfkh8LPEwdN3w==,type:str]
|
||||
PROXY_URLS_DATACENTER=ENC[AES256_GCM,data:6BfXBYmyHpgZU/kJWpZLf8eH5VowVK1n0r6GzFTNAx/OmyaaS1RZVPC1JPkPBnTwEmo0WHYRW8uiUdkABmH9F5ZqqlsAesyfW7zvU9r7yD+D7w==,iv:3CBn2qCoTueQy8xVcQqZS4E3F0qoFYnNbzTZTpJ1veo=,tag:wC3Ecl4uNTwPiT23ATvRZg==,type:str]
|
||||
WEBSHARE_DOWNLOAD_URL=ENC[AES256_GCM,data:/N77CFf6tJWCk7HrnBOm2Q1ynx7XoblzfbzJySeCjrxqiu4r+CB90aDkaPahlQKI00DUZih3pcy7WhnjdAwI30G5kJZ3P8H8/R0tP7OBK1wPVbsJq8prQJPFOAWewsS4KWNtSURZPYSCxslcBb7DHLX6ZAjv6A5KFOjRK2N8usR9sIabrCWh,iv:G3Ropu/JGytZK/zKsNGFjjSu3Wt6fvHaAqI9RpUHvlI=,tag:fv6xuS94OR+4xfiyKrYELA==,type:str]
|
||||
PROXY_CONCURRENCY=ENC[AES256_GCM,data:vdEZ,iv:+eTNQO+s/SsVDBLg1/+fneMzEEsFkuEFxo/FcVV+mWc=,tag:i/EPwi/jOoWl3xW8H0XMdw==,type:str]
|
||||
RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:L2s=,iv:fV3mCKmK5fxUmIWRePELBDAPTb8JZqasVIhnAl55kYw=,tag:XL+PO6sblz/7WqHC3dtk1w==,type:str]
|
||||
#ENC[AES256_GCM,data:RC+t2vqLwLjapdAUql8rQls=,iv:Kkiz3ND0g0MRAgcPJysIYMzSQS96Rq+3YP5yO7yWfIY=,tag:Y6TbZd81ihIwn+U515qd1g==,type:comment]
|
||||
GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:Vki6yHk+gd4n,iv:rxzKvwrGnAkLcpS41EZ097E87NrIpNZGFfl4iXFvr40=,tag:EZkBJpCq5rSpKYVC4H3JHQ==,type:str]
|
||||
@@ -59,7 +62,7 @@ sops_age__list_1__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb2
|
||||
sops_age__list_1__map_recipient=age1wjepykv3glvsrtegu25tevg7vyn3ngpl607u3yjc9ucay04s045s796msw
|
||||
sops_age__list_2__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFeHhaOURNZnRVMEwxNThu\nUjF4Q0kwUXhTUE1QSzZJbmpubnh3RnpQTmdvCjRmWWxpNkxFUmVGb3NRbnlydW5O\nWEg3ZXJQTU4vcndzS2pUQXY3Q0ttYjAKLS0tIE9IRFJ1c2ZxbGVHa2xTL0swbGN1\nTzgwMThPUDRFTWhuZHJjZUYxOTZrU00KY62qrNBCUQYxwcLMXFEnLkwncxq3BPJB\nKm4NzeHBU87XmPWVrgrKuf+PH1mxJlBsl7Hev8xBTy7l6feiZjLIvQ==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_2__map_recipient=age1c783ym2q5x9tv7py5d28uc4k44aguudjn03g97l9nzs00dd9tsrqum8h4d
|
||||
sops_lastmodified=2026-02-26T14:32:28Z
|
||||
sops_mac=ENC[AES256_GCM,data:pyHQHwTtjh7OLiMqbqhUjfrmetEtYS7yB342C/TWfDCwEotWLVwnGWlC4+HIl53pw9+3AgoBVRnW0t86e4kG9O8KyHnk68S9qBcpUsybW3lyGPNXmBydv1W9gQHuK8f/4WGIbkhNxyIToKg9ZAmYWFxNhRKSoYKm5P9Uh7B7CF4=,iv:syrX8VdL3JsDsawvFWbX04Ygcr18hjSSHfEwHkyKETk=,tag:qrhWkh/e+21OKGU2+rCeyg==,type:str]
|
||||
sops_lastmodified=2026-02-28T17:03:44Z
|
||||
sops_mac=ENC[AES256_GCM,data:IQ9jpRxVUssaMK+qFcM3nPdzXHkiqp6E+DhEey1TfqUu5GCBNsWeVy9m9A6p9RWhu2NtJV7aKdUeqneuMtD1q5Tnm6L96zuyot2ESnx2N2ssD9ilrDauQxoBJcrJVnGV61CgaCz9458w8BuVUZydn3MoHeRaU7bOBBzQlTI6vZk=,iv:qHqdt3av/KZRQHr/OS/9KdAJUgKlKEDgan7qI3Zzkck=,tag:fOvdO9iRTTF1Siobu2mLqg==,type:str]
|
||||
sops_unencrypted_suffix=_unencrypted
|
||||
sops_version=3.12.1
|
||||
|
||||
@@ -17,9 +17,9 @@ jobs:
|
||||
- run: uv run pytest web/tests/ -x -q -p no:faulthandler
|
||||
- run: uv run ruff check web/src/ web/tests/
|
||||
|
||||
# Creates v<N> tag after tests pass. The on-server supervisor polls for new
|
||||
# tags every 60s and deploys automatically. No SSH keys or deploy credentials
|
||||
# needed in CI — only the built-in github.token.
|
||||
# Creates a v{YYYYMMDDHHMM} tag after tests pass on master.
|
||||
# The on-server supervisor polls for new tags every 60s and deploys
|
||||
# automatically. No SSH keys or deploy credentials needed in CI.
|
||||
tag:
|
||||
needs: [test]
|
||||
runs-on: ubuntu-latest
|
||||
@@ -32,5 +32,6 @@ jobs:
|
||||
run: |
|
||||
git config user.name "CI"
|
||||
git config user.email "ci@noreply"
|
||||
git tag "v${{ github.run_number }}"
|
||||
git push origin "v${{ github.run_number }}"
|
||||
TAG="v$(date -u +%Y%m%d%H%M)"
|
||||
git tag "$TAG"
|
||||
git push origin "$TAG"
|
||||
|
||||
21
CHANGELOG.md
21
CHANGELOG.md
@@ -6,6 +6,27 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
- **Affiliate product system** — "Wirecutter for padel" editorial affiliate cards embedded in articles via `[product:slug]` and `[product-group:category]` markers, baked at build time into static HTML. `/go/<slug>` click-tracking redirect (302, GDPR-compliant daily-rotated IP hash). Admin CRUD (`/admin/affiliate`) with live preview, inline status toggle, HTMX search/filter. Click stats dashboard (pure CSS bar chart, top products/articles/retailers). 10 German equipment review article scaffolds seeded.
|
||||
- `web/src/padelnomics/migrations/versions/0026_affiliate_products.py`: `affiliate_products` + `affiliate_clicks` tables; `UNIQUE(slug, language)` constraint mirrors articles schema
|
||||
- `web/src/padelnomics/affiliate.py`: `get_product()`, `get_products_by_category()`, `get_all_products()`, `log_click()`, `hash_ip()`, `get_click_stats()`, `get_click_counts()`, `get_distinct_retailers()`
|
||||
- `web/src/padelnomics/content/routes.py`: `PRODUCT_RE`, `PRODUCT_GROUP_RE`, `bake_product_cards()` — chained after `bake_scenario_cards()` in `generate_articles()` and `preview_article()`
|
||||
- `web/src/padelnomics/app.py`: `/go/<slug>` route with rate limiting (60/min per IP) and referer-based article/language extraction
|
||||
- `web/src/padelnomics/admin/routes.py`: affiliate CRUD routes + `bake_product_cards()` chained in article rebuild flows
|
||||
- New templates: `partials/product_card.html`, `partials/product_group.html`, `admin/affiliate_products.html`, `admin/affiliate_form.html`, `admin/affiliate_dashboard.html`, `admin/partials/affiliate_results.html`, `admin/partials/affiliate_row.html`
|
||||
- `locales/en.json` + `locales/de.json`: 6 new affiliate i18n keys
|
||||
- `data/content/articles/`: 10 new German equipment review scaffolds (rackets, balls, shoes, accessories, gifts)
|
||||
- 26 tests in `web/tests/test_affiliate.py`
|
||||
|
||||
### Added
|
||||
- **Three-tier proxy system** for extraction pipeline: free (Webshare auto-fetched) → datacenter (`PROXY_URLS_DATACENTER`) → residential (`PROXY_URLS_RESIDENTIAL`). Webshare free proxies are now auto-fetched from their download API on each run — no more manually copying stale proxy lists.
|
||||
- `proxy.py`: added `fetch_webshare_proxies()` (stdlib urllib, bounded read + timeout), `load_proxy_tiers()` (assembles N tiers from env), generalised `make_tiered_cycler()` to accept `list[list[str]]` with N-level escalation. Exposes `is_exhausted()`, `active_tier_index()`, `tier_count()`.
|
||||
- `playtomic_availability.py`: both `extract()` and `extract_recheck()` now use `load_proxy_tiers()` + N-tier cycler. `_fetch_venues_parallel` `fallback_urls` param removed. `is_fallback_active()` replaced by `is_exhausted()`.
|
||||
- `playtomic_tenants.py`: uses `load_proxy_tiers()` flattened for simple round-robin.
|
||||
|
||||
### Changed
|
||||
- **Env vars renamed** (breaking): `PROXY_URLS` → removed, `PROXY_URLS_FALLBACK` → removed. New vars: `WEBSHARE_DOWNLOAD_URL`, `PROXY_URLS_DATACENTER`, `PROXY_URLS_RESIDENTIAL`.
|
||||
|
||||
### Added
|
||||
- **Phase 2a — NUTS-1 regional income differentiation** (`opportunity_score`): Munich and Berlin no longer share the same income figure as Chemnitz.
|
||||
- `eurostat.py`: added `nama_10r_2hhinc` dataset config (NUTS-2 cube with NUTS-1 entries); filter params now appended to API URL so the server pre-filters the large cube before download (also makes `ilc_di03` requests smaller).
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Padelnomics — Project Tracker
|
||||
|
||||
> Move tasks across columns as you work. Add new tasks at the top of the relevant column.
|
||||
> Last updated: 2026-02-27 (Phase 2b — EU NUTS-2 spatial join + US state income).
|
||||
> Last updated: 2026-02-28 (Affiliate product system — editorial gear cards + click tracking).
|
||||
|
||||
---
|
||||
|
||||
@@ -132,6 +132,7 @@
|
||||
- [x] **pSEO article noindex** — `noindex` column on articles (migration 0025), `NOINDEX_THRESHOLDS` per-template lambdas in `content/__init__.py`, robots meta tag in `article_detail.html`, sitemap exclusion, pSEO dashboard count card + article row badge; 20 tests
|
||||
- [x] **group_key static article grouping** — migration 0020 adds `group_key TEXT` column; `_sync_static_articles()` auto-upserts `data/content/articles/*.md` on admin page load; `_get_article_list_grouped()` groups by `COALESCE(group_key, url_path)` so EN/DE static cornerstones pair into one row
|
||||
- [x] **Email-gated report PDF** — `reports/` blueprint with email capture gate + PDF download; premium WeasyPrint PDF (full-bleed navy cover, Padelnomics wordmark watermark, gold/teal accents); `make report-pdf` target; EN + DE i18n (26 keys, native German); state-of-padel report moved to `data/content/reports/`
|
||||
- [x] **Affiliate product system** — "Wirecutter for padel" editorial gear cards embedded in articles via `[product:slug]` / `[product-group:category]` markers, baked at build time; `/go/<slug>` click-tracking redirect (302, GDPR daily-rotated IP hash, rate-limited); admin CRUD with live preview, HTMX filter/search, status toggle; click stats dashboard (pure CSS charts); 10 German equipment review article scaffolds; 26 tests
|
||||
|
||||
### SEO & Legal
|
||||
- [x] Sitemap (both language variants, `<lastmod>` on all entries)
|
||||
@@ -243,7 +244,6 @@
|
||||
|
||||
### Marketing & Content
|
||||
- [ ] LinkedIn presence (ongoing — founder posts, thought leadership)
|
||||
- [ ] "Wirecutter for padel" affiliate site (racket reviews, gear guides)
|
||||
- [ ] "The Padel Business Report" newsletter
|
||||
- [ ] Equipment supplier affiliate partnerships (€500–1,000/lead or 5%)
|
||||
- [ ] Padel podcasts (guest appearances)
|
||||
|
||||
27
README.md
27
README.md
@@ -396,18 +396,19 @@ docker compose logs -f app # tail logs
|
||||
|
||||
## CI/CD
|
||||
|
||||
Go to GitLab → padelnomics → Settings → CI/CD → Variables and add:
|
||||
Pull-based deployment via Gitea Actions — no SSH keys or deploy credentials in CI.
|
||||
|
||||
| Variable | Value | Notes |
|
||||
|----------|-------|-------|
|
||||
| SSH_PRIVATE_KEY | Your ed25519 private key | Mask it, type "Variable" |
|
||||
| DEPLOY_HOST | Your Hetzner server IP | e.g. 1.2.3.4 |
|
||||
| DEPLOY_USER | SSH username on the server | e.g. deploy or root |
|
||||
| SSH_KNOWN_HOSTS | Server host key | Run `ssh-keyscan $YOUR_SERVER_IP` |
|
||||
1. Push to master → Gitea Actions runs tests (`.gitea/workflows/ci.yaml`)
|
||||
2. On success, CI creates tag `v<run_number>` using the built-in `github.token`
|
||||
3. On-server supervisor polls for new tags every 60s and deploys automatically
|
||||
|
||||
Server-side one-time setup:
|
||||
1. Add the matching public key to `~/.ssh/authorized_keys` for the deploy user
|
||||
2. Clone the repo to `/opt/padelnomics`
|
||||
3. Create `.env` from `padelnomics/.env.example` with production values
|
||||
4. `chmod +x deploy.sh && ./deploy.sh` for the first deploy
|
||||
5. Point reverse proxy to port 5000
|
||||
**Server-side one-time setup:**
|
||||
```bash
|
||||
bash infra/setup_server.sh # creates padelnomics_service user, keys, dirs
|
||||
ssh root@<server> 'bash -s' < infra/bootstrap_supervisor.sh
|
||||
```
|
||||
|
||||
1. `setup_server.sh` generates an ed25519 SSH deploy key — add the printed public key to Gitea:
|
||||
`git.padelnomics.io → padelnomics → Settings → Deploy Keys → Add key (read-only)`
|
||||
2. Add the printed age public key to `.sops.yaml`, re-encrypt, commit + push
|
||||
3. Run `bootstrap_supervisor.sh` — clones from `git.padelnomics.io:2222`, decrypts secrets, starts systemd supervisor
|
||||
|
||||
88
data/content/articles/beste-padelschlaeger-de.md
Normal file
88
data/content/articles/beste-padelschlaeger-de.md
Normal file
@@ -0,0 +1,88 @@
|
||||
---
|
||||
title: "Die besten Padelschläger 2026: Unser ausführlicher Vergleich"
|
||||
slug: beste-padelschlaeger-de
|
||||
language: de
|
||||
url_path: /beste-padelschlaeger-2026
|
||||
meta_description: "Welcher Padelschläger ist der beste 2026? Wir haben die wichtigsten Modelle für Anfänger, Fortgeschrittene und Profis getestet und verglichen."
|
||||
---
|
||||
|
||||
# Die besten Padelschläger 2026: Unser ausführlicher Vergleich
|
||||
|
||||
<!-- TODO: Einleitung mit Hauptkeyword und USP dieser Seite (200–300 Wörter) -->
|
||||
|
||||
Wer einen neuen Padelschläger kaufen will, steht vor einer unüberschaubaren Auswahl. Mehr als 50 Marken, Hunderte von Modellen — und kein einziges unabhängiges Testlabor. Wir haben die meistverkauften und meistempfohlenen Schläger zusammengetragen und nach drei Kriterien bewertet: Spielgefühl, Haltbarkeit und Preis-Leistungs-Verhältnis.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-Empfehlungen
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Detail
|
||||
|
||||
<!-- TODO: Ausführliche Besprechung der Top 3–5 Modelle, je 300–500 Wörter pro Schläger -->
|
||||
|
||||
### Platz 1: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-1-amazon]
|
||||
|
||||
<!-- TODO: Erfahrungsbericht + Vor- und Nachteile im Prosatext -->
|
||||
|
||||
### Platz 2: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-2-amazon]
|
||||
|
||||
### Platz 3: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-3-amazon]
|
||||
|
||||
---
|
||||
|
||||
## So haben wir getestet
|
||||
|
||||
<!-- TODO: Kurze Beschreibung der Testmethodik (2–3 Absätze) -->
|
||||
|
||||
---
|
||||
|
||||
## Kaufberatung: Welcher Schläger passt zu mir?
|
||||
|
||||
<!-- TODO: Entscheidungsbaum / Tabelle nach Spielertyp -->
|
||||
|
||||
| Spielertyp | Empfohlene Form | Empfohlenes Gewicht |
|
||||
|---|---|---|
|
||||
| Anfänger | Rund | 355–365 g |
|
||||
| Allspieler | Tropfen | 360–370 g |
|
||||
| Fortgeschrittener | Diamant | 365–380 g |
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie oft sollte man einen Padelschläger wechseln?</summary>
|
||||
|
||||
<!-- TODO: Antwort (50–100 Wörter) -->
|
||||
|
||||
Bei regelmäßigem Spielen (2–3 Mal pro Woche) empfehlen wir einen Wechsel alle 12 bis 18 Monate. Der größte Qualitätsverlust entsteht nicht durch sichtbare Schäden, sondern durch den Abbau der Schaumstoffkerns, der das Spielgefühl verändert.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Was kostet ein guter Padelschläger?</summary>
|
||||
|
||||
<!-- TODO: Preisklassen-Überblick -->
|
||||
|
||||
Gute Einstiegsschläger gibt es ab 50 Euro. Für Fortgeschrittene empfehlen wir 100–200 Euro, für ambitionierte Spieler 200–350 Euro. Über 400 Euro kostet nur das Pro-Segment, das für die meisten Freizeitspieler überdimensioniert ist.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Runder oder Diamant-Schläger — was ist besser?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Runde Schläger verzeihen mehr Fehlschläge und eignen sich für Anfänger und defensive Spieler. Diamant-Schläger liefern mehr Power und werden von Angriffsspielern bevorzugt. Für die meisten Freizeitspieler ist eine Tropfen- oder runde Form die sicherere Wahl.
|
||||
|
||||
</details>
|
||||
69
data/content/articles/padel-ausruestung-anfaenger-de.md
Normal file
69
data/content/articles/padel-ausruestung-anfaenger-de.md
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
title: "Padel-Ausrüstung für Anfänger: Was brauche ich wirklich?"
|
||||
slug: padel-ausruestung-anfaenger-de
|
||||
language: de
|
||||
url_path: /padel-ausruestung-anfaenger
|
||||
meta_description: "Was braucht man für Padel? Unser Ausrüstungsguide für Einsteiger — von Schläger und Schuhen bis zur Schutztasche. Was ist unverzichtbar, was ist Luxus?"
|
||||
---
|
||||
|
||||
# Padel-Ausrüstung für Anfänger: Was brauche ich wirklich?
|
||||
|
||||
<!-- TODO: Einleitung — klare Orientierung für Einsteiger -->
|
||||
|
||||
Padel ist im Vergleich zu vielen anderen Sportarten günstig einzusteigen. Wer zum ersten Mal auf den Court geht, braucht eigentlich nur drei Dinge: einen Schläger, die richtigen Schuhe und Bälle. Der Rest ist komfortsteigerndes Zubehör — notwendig wird es erst, wenn man ernsthafter spielt.
|
||||
|
||||
---
|
||||
|
||||
## Die unverzichtbare Grundausstattung
|
||||
|
||||
### 1. Schläger
|
||||
|
||||
[product:platzhalter-anfaenger-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: 1–2 Absätze zum Einstiegsschläger -->
|
||||
|
||||
### 2. Schuhe
|
||||
|
||||
[product:platzhalter-padelschuh-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### 3. Bälle
|
||||
|
||||
[product:platzhalter-ball-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Was kann ich mir zunächst sparen?
|
||||
|
||||
<!-- TODO: Schläger-Tasche, Griffband, Sportbrille — wann sinnvoll? -->
|
||||
|
||||
---
|
||||
|
||||
## Das komplette Anfänger-Set: Unsere Empfehlung
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie viel kostet ein komplettes Padel-Starterpaket?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für rund 150 Euro bekommt man einen soliden Anfängerschläger (60–90 €), passende Padelschuhe (50–70 €) und eine Dose Bälle (6–10 €). Alles darüber hinaus ist optional.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Kann ich mit geliehener Ausrüstung starten?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, für die ersten Einheiten ist das sinnvoll. Die meisten Padel-Center verleihen Schläger für 2–5 Euro pro Einheit. Wer mehr als 3–4 Mal spielen will, lohnt sich ein eigener Schläger — schon allein wegen des vertrauten Spielgefühls.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padel-geschenke-de.md
Normal file
67
data/content/articles/padel-geschenke-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padel-Geschenke: Die besten Ideen für Padelbegeisterte"
|
||||
slug: padel-geschenke-de
|
||||
language: de
|
||||
url_path: /padel-geschenke
|
||||
meta_description: "Padel-Geschenke für Geburtstage, Weihnachten oder als Überraschung. Von der günstigen Kleinigkeit bis zum hochwertigen Schläger — für jeden Budget."
|
||||
---
|
||||
|
||||
# Padel-Geschenke: Die besten Ideen für Padelbegeisterte
|
||||
|
||||
<!-- TODO: Einleitung — Padel boomt, Geschenkideen gefragt -->
|
||||
|
||||
Padel ist der am schnellsten wachsende Sport Europas — und viele haben gerade erst damit begonnen. Wer einem Padel-Fan ein Geschenk machen will, steht vor der Frage: Was fehlt ihm noch? Dieser Guide listet die besten Ideen nach Preisklassen, vom kleinen Mitbringsel bis zum Wunschschläger.
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 15 Euro
|
||||
|
||||
[product-group:grip]
|
||||
|
||||
<!-- TODO: Griffband, Bälle, kleine Accessoires -->
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 50 Euro
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
<!-- TODO: Sporttasche, Cover, Trainingszubehör -->
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 100 Euro
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
[product:platzhalter-schuh-amazon]
|
||||
|
||||
---
|
||||
|
||||
## Das perfekte Geschenk: Ein neuer Schläger
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
<!-- TODO: Hinweis auf Wunschliste / Amazon-Wunschliste-Tipp -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie finde ich heraus, welcher Schläger passt?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Fragen Sie die beschenkte Person nach ihrem aktuellen Modell oder lassen Sie sie aus einer Empfehlungsliste wählen. Schläger sind sehr persönlich — eine Gutscheinkarte für einen Fachhandel ist oft die sicherste Option.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Gibt es Padel-Geschenksets?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Einige Marken bieten Starter-Sets an (Schläger + Bälle + Cover). Diese sind im Vergleich zum Einzelkauf oft günstiger und eignen sich als Komplett-Einstiegsgeschenk für Neuspieler.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padel-zubehoer-de.md
Normal file
67
data/content/articles/padel-zubehoer-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padel-Zubehör: Das braucht jeder Spieler wirklich"
|
||||
slug: padel-zubehoer-de
|
||||
language: de
|
||||
url_path: /padel-zubehoer
|
||||
meta_description: "Welches Padel-Zubehör lohnt sich wirklich? Von Griffband und Vibrationsdämpfer bis zur Sporttasche — was ist nützlich, was ist Marketing?"
|
||||
---
|
||||
|
||||
# Padel-Zubehör: Das braucht jeder Spieler wirklich
|
||||
|
||||
<!-- TODO: Einleitung — Zubehör gibt es viel, sinnvoll ist wenig -->
|
||||
|
||||
Wer Padel ernsthafter betreibt, wird früh von Empfehlungen überhäuft: Griffband kaufen! Schutzhülle! Vibrationsdämpfer! Nicht alles davon ist sinnvoll — aber einiges tatsächlich unverzichtbar. Dieser Guide hilft dabei, nützliches Zubehör von überteuertem Marketing zu trennen.
|
||||
|
||||
---
|
||||
|
||||
## Das sinnvollste Zubehör im Überblick
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
---
|
||||
|
||||
## Griffband: Ja, unbedingt
|
||||
|
||||
<!-- TODO: Erklärung, welches Griffband sich lohnt -->
|
||||
|
||||
[product:platzhalter-griffband-amazon]
|
||||
|
||||
---
|
||||
|
||||
## Schläger-Schutzhülle: Ja, wenn man häufig transportiert
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Vibrationsdämpfer: Geschmackssache
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Sporttasche: Erst ab regelmäßigem Spiel
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie oft sollte man das Griffband wechseln?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Bei regelmäßigem Spielen empfehlen wir einen Wechsel alle 4–8 Wochen. Ein abgenutztes Griffband erhöht das Risiko, den Schläger wegzuschleudern, und mindert die Kontrolle.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Brauche ich eine spezielle Padeltasche?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Eine Padeltasche schützt den Schläger vor Beschädigungen beim Transport. Für gelegentliche Spieler reicht ein einfaches Cover. Wer mehrere Schläger trägt oder regelmäßig zum Club fährt, profitiert von einer Sporttasche mit gepolstertem Schlägerfach.
|
||||
|
||||
</details>
|
||||
70
data/content/articles/padelbaelle-vergleich-de.md
Normal file
70
data/content/articles/padelbaelle-vergleich-de.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "Beste Padelbälle 2026: Test und Vergleich der populärsten Modelle"
|
||||
slug: padelbaelle-vergleich-de
|
||||
language: de
|
||||
url_path: /padelbaelle-vergleich
|
||||
meta_description: "Welche Padelbälle sind am besten? Wir vergleichen die beliebtesten Modelle nach Druckhaltigkeit, Spielgefühl und Preis-Leistungs-Verhältnis."
|
||||
---
|
||||
|
||||
# Beste Padelbälle 2026: Test und Vergleich der populärsten Modelle
|
||||
|
||||
<!-- TODO: Einleitung — warum Bälle oft unterschätzt werden -->
|
||||
|
||||
Der Ball ist das am häufigsten unterschätzte Equipment im Padel. Dabei entscheidet seine Druckhaltigkeit maßgeblich über das Spielgefühl. Ein Padelball verliert nach 4–6 Stunden intensivem Spiel merklich an Druck — und damit an Tempo, Kontrolle und Spaß.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlungen
|
||||
|
||||
[product-group:ball]
|
||||
|
||||
---
|
||||
|
||||
## Druckhaltigkeit: Was wirklich zählt
|
||||
|
||||
<!-- TODO: Erklärung des Druckverlusts + Testzeitraum -->
|
||||
|
||||
---
|
||||
|
||||
## Turnier- vs. Freizeitball
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Überblick
|
||||
|
||||
[product:platzhalter-ball-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie lange hält ein Padelball?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ein hochwertiger Padelball ist nach etwa 4–8 Stunden Spielzeit merklich weicher. Im Freizeitbereich merkt man den Unterschied oft erst später. Profis und ambitionierte Spieler wechseln Bälle bereits nach einem Set.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Muss ich WCT- oder FIP-zertifizierte Bälle kaufen?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für den Freizeiteinsatz nein. Für Turniere und Ligaspiele ja — die meisten Ligen schreiben zugelassene Ballmodelle vor. Im Training können beliebige Qualitätsbälle verwendet werden.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Wie lagere ich Padelbälle richtig?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kühl und trocken lagern, nicht im Auto lassen. Manche Spieler verwenden Druckbehälter, um den Druckverlust zu verlangsamen — das funktioniert tatsächlich für bereits angebrochene Dosen.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padelschlaeger-anfaenger-de.md
Normal file
67
data/content/articles/padelschlaeger-anfaenger-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padelschläger für Anfänger 2026: Die 5 besten Einstiegsmodelle"
|
||||
slug: padelschlaeger-anfaenger-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-anfaenger
|
||||
meta_description: "Welcher Padelschläger eignet sich für Anfänger? Unsere Empfehlungen für Einsteiger: verzeihendes Spielgefühl, robuste Verarbeitung, fairer Preis."
|
||||
---
|
||||
|
||||
# Padelschläger für Anfänger 2026: Die 5 besten Einstiegsmodelle
|
||||
|
||||
<!-- TODO: Einleitung, warum Anfängerschläger sich von Profimodellen unterscheiden (150–200 Wörter) -->
|
||||
|
||||
Für den Einstieg ins Padel braucht man keinen teuren Profischaft. Im Gegenteil: Die meisten Hochleistungsschläger sind für Anfänger kontraproduktiv — ihr kleines Sweetspot-Fenster bestraft Fehlschläge, die in der Lernphase normal sind. Ein guter Anfängerschläger ist leicht, hat eine runde Form und verzeiht ungenaue Treffpunkte.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-5 für Einsteiger
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Was macht einen guten Anfängerschläger aus?
|
||||
|
||||
<!-- TODO: Erklärung der relevanten Schläger-Eigenschaften (Form, Gewicht, Material) -->
|
||||
|
||||
### Schlägerkopfform: Rund schlägt Diamant
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Gewicht: Leichter ist nicht immer besser
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Material: EVA vs. Foam
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlung im Detail
|
||||
|
||||
[product:platzhalter-anfaenger-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: Ausführliche Besprechung mit Praxistest -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Ab welchem Preis lohnt sich ein eigener Schläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wer mehr als einmal pro Woche spielt, sollte in einen eigenen Schläger investieren. Leihschläger im Club sind oft abgenutzt und vermitteln ein falsches Spielgefühl. Ab 60–80 Euro gibt es solide Einsteigerschläger.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Kann ich als Anfänger direkt mit einem 150-Euro-Schläger starten?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, sofern es sich um ein anfängerfreundliches Modell aus diesem Preisbereich handelt. Preisschilder allein sagen wenig — ein 150-Euro-Diamantschläger kann für Einsteiger schlechter sein als ein 70-Euro-Rundschläger.
|
||||
|
||||
</details>
|
||||
55
data/content/articles/padelschlaeger-defensiv-de.md
Normal file
55
data/content/articles/padelschlaeger-defensiv-de.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: "Padelschläger für defensive Spieler: Die besten Kontrollschläger 2026"
|
||||
slug: padelschlaeger-defensiv-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-defensiv
|
||||
meta_description: "Die besten Padelschläger für defensive und kontrollbetonte Spieler. Runde und Tropfenform mit großem Sweetspot für sicheres Spiel vom Grundfeld."
|
||||
---
|
||||
|
||||
# Padelschläger für defensive Spieler: Die besten Kontrollschläger 2026
|
||||
|
||||
<!-- TODO: Einleitung zur defensiven Spielweise und warum der Schläger einen Unterschied macht -->
|
||||
|
||||
Im Padel entscheidet das Grundfeld. Wer vom hinteren Drittel sauber und kontrolliert spielen kann, zwingt den Gegner zu Fehlern. Für diesen Spielstil braucht man einen Schläger mit großem Sweetspot, weichem EVA-Kern und einer runden oder Tropfenform — nicht die auffälligsten Geräte, aber die effektivsten.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlungen für defensive Spieler
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Warum Kontrolle wichtiger ist als Power
|
||||
|
||||
<!-- TODO: Erklärung Spielstil + Schlägercharakteristik -->
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Detail
|
||||
|
||||
[product:platzhalter-defensiv-schlaeger-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Was ist der Unterschied zwischen einem Kontroll- und einem Powerschläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kontrollschläger (runde Form, weicher Kern) vergrößern den Sweetspot und ermöglichen feingefühliges Spiel. Powerschläger (Diamantform, harter Kern) bieten mehr Hebelwirkung beim Smash, verzeihen aber weniger Fehlschläge.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Für welche Spielstufe sind Kontrollschläger geeignet?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kontrollschläger sind für Anfänger, Freizeitspieler und taktisch orientierte Spieler aller Stufen geeignet. Auch viele erfahrene Spieler bevorzugen sie, weil Konsistenz auf Dauer mehr Punkte bringt als gelegentliche Powerschläge.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padelschlaeger-fortgeschrittene-de.md
Normal file
67
data/content/articles/padelschlaeger-fortgeschrittene-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padelschläger für Fortgeschrittene: Die besten Modelle 2026"
|
||||
slug: padelschlaeger-fortgeschrittene-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-fortgeschrittene
|
||||
meta_description: "Die besten Padelschläger für fortgeschrittene und ambitionierte Spieler. High-End-Modelle mit Carbon, Kevlar und ausgereifter Schlagbalance für Spieler ab 3.0."
|
||||
---
|
||||
|
||||
# Padelschläger für Fortgeschrittene: Die besten Modelle 2026
|
||||
|
||||
<!-- TODO: Einleitung — wann ist man bereit für einen Fortgeschrittenenschläger? -->
|
||||
|
||||
Ab einem gewissen Spielniveau lohnt sich der Griff zu einem anspruchsvolleren Schläger. Wer sauber trifft, kann von einer härteren Bespannung und einer präziseren Balance profitieren. Die Schläger in dieser Liste sind kein Selbstläufer — aber in den richtigen Händen ein echter Vorteil.
|
||||
|
||||
---
|
||||
|
||||
## Top-Schläger für Fortgeschrittene im Überblick
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Carbon, Kevlar, Glasfaser: Was steckt drin?
|
||||
|
||||
<!-- TODO: Materialüberblick mit Vor- und Nachteilen -->
|
||||
|
||||
### Carbon-Rahmen
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### 3K vs. 12K Carbon
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Kevlar-Einlagen
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Testbericht: Unser Empfehlungsschläger
|
||||
|
||||
[product:platzhalter-fortgeschrittene-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: Praxistest -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Ab welcher Spielstufe lohnt sich ein Fortgeschrittenenschläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wer regelmäßig spielt (2–3 Mal pro Woche), seit mindestens einem Jahr dabei ist und an Taktik und Technik arbeitet, kann von einem hochwertigeren Schläger profitieren. Für gelegentliche Spieler ist der Unterschied zu einem Mittelklassemodell kaum spürbar.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Müssen Fortgeschrittenenschläger teurer sein?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Nicht zwingend. Es gibt ausgezeichnete Modelle im 150–200-Euro-Segment, die professionell verarbeitete Carbon-Elemente enthalten. Alles über 300 Euro richtet sich meist an Spieler mit Wettkampfambitionen.
|
||||
|
||||
</details>
|
||||
55
data/content/articles/padelschlaeger-unter-100-de.md
Normal file
55
data/content/articles/padelschlaeger-unter-100-de.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: "Padelschläger unter 100 Euro: Die besten günstigen Modelle 2026"
|
||||
slug: padelschlaeger-unter-100-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-unter-100
|
||||
meta_description: "Gute Padelschläger müssen nicht teuer sein. Die besten Modelle unter 100 Euro — mit echtem Spielgefühl, ohne Kompromisse bei der Verarbeitung."
|
||||
---
|
||||
|
||||
# Padelschläger unter 100 Euro: Die besten günstigen Modelle 2026
|
||||
|
||||
<!-- TODO: Einleitung — Gibt es wirklich gute Schläger für unter 100 Euro? -->
|
||||
|
||||
Wer sagt, dass Padel teuer sein muss? In der 50-100-Euro-Klasse gibt es Schläger, die sich von 200-Euro-Modellen im Freizeitspiel kaum unterscheiden. Der entscheidende Unterschied liegt oft im Material des Rahmens und im Kern — nicht im Spielgefühl.
|
||||
|
||||
---
|
||||
|
||||
## Die besten Schläger unter 100 Euro
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Was bekommt man unter 100 Euro?
|
||||
|
||||
<!-- TODO: Realistische Erwartungen setzen -->
|
||||
|
||||
---
|
||||
|
||||
## Unser Preisklassen-Tipp
|
||||
|
||||
[product:platzhalter-budget-schlaeger-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Sind günstige Padelschläger schlechter verarbeitet?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Nicht zwangsläufig. Im Bereich 60–100 Euro findet man solide Fiberglas-Schläger bekannter Marken. Der Hauptunterschied zu teureren Modellen ist das Rahmenmaterial (kein Carbon) und ein schlichtes Design.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Lohnt es sich, für einen Einsteiger 100 Euro auszugeben?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, wenn er weiß, dass er das Spiel ernsthafter betreiben will. Für einen ersten Test reicht auch ein 50-Euro-Schläger — aber wer nach der ersten Saison weiterspielen will, wird früh aufwerten wollen.
|
||||
|
||||
</details>
|
||||
61
data/content/articles/padelschuhe-test-de.md
Normal file
61
data/content/articles/padelschuhe-test-de.md
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
title: "Padelschuhe Test 2026: Die besten Schuhe für Sand- und Kunstgras"
|
||||
slug: padelschuhe-test-de
|
||||
language: de
|
||||
url_path: /padelschuhe-test
|
||||
meta_description: "Welche Padelschuhe sind am besten? Unser Test der beliebtesten Modelle — für Sand, Kunstgras und Kunststoffbelag mit optimaler Dämpfung und Stabilität."
|
||||
---
|
||||
|
||||
# Padelschuhe Test 2026: Die besten Schuhe für Sand- und Kunstgras
|
||||
|
||||
<!-- TODO: Einleitung — warum normale Tennisschuhe nicht reichen -->
|
||||
|
||||
Padelschuhe werden häufig unterschätzt. Auf dem Sandbelag des Padel-Courts braucht man eine völlig andere Sohle als auf Tennishartplatz oder Hallenboden. Ein falscher Schuh erhöht nicht nur das Verletzungsrisiko — er kostet auch Punkte, weil man in Kurven wegrutscht.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-Empfehlungen
|
||||
|
||||
[product-group:shoe]
|
||||
|
||||
---
|
||||
|
||||
## Welche Sohle für welchen Belag?
|
||||
|
||||
<!-- TODO: Sohlentypen und Untergrundtabelle -->
|
||||
|
||||
| Belag | Empfohlene Sohle |
|
||||
|---|---|
|
||||
| Sand (feiner Quarzsand) | Fishbone / Fischgrät |
|
||||
| Kunstgras | Multicourt / Omnidirectional |
|
||||
| Kunststoff/Beton | Glatte Multicourt-Sohle |
|
||||
|
||||
---
|
||||
|
||||
## Testbericht: Bester Allround-Schuh
|
||||
|
||||
[product:platzhalter-padelschuh-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Kann ich Tennisschuhe für Padel verwenden?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für den gelegentlichen Einstieg ja. Auf Dauer ist es nicht empfehlenswert: Tennisschuhe bieten auf Sand zu wenig Halt, und die Abnutzung ist höher. Nach 3–4 Monaten regelmäßigen Spielens zahlen sich dedizierte Padelschuhe aus.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Wie erkenne ich verschlissene Padelschuhe?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wenn die Außenfläche der Sohle glatt wird oder das Profil auf unter 2 mm abgenutzt ist, verliert der Schuh seinen Halt. Bei Padel ist das gefährlicher als bei vielen anderen Sportarten, weil häufige Richtungswechsel auf losem Sand stattfinden.
|
||||
|
||||
</details>
|
||||
@@ -59,10 +59,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
@@ -81,10 +81,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -97,10 +97,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -114,10 +114,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
@@ -136,10 +136,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -152,10 +152,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /opt/padelnomics/data:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ from pathlib import Path
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging, ua_for_proxy
|
||||
from .proxy import load_fallback_proxy_urls, load_proxy_urls, make_tiered_cycler
|
||||
from .proxy import load_proxy_tiers, make_tiered_cycler
|
||||
from .utils import (
|
||||
compress_jsonl_atomic,
|
||||
flush_partial_batch,
|
||||
@@ -52,6 +52,9 @@ MAX_VENUES_PER_RUN = 20_000
|
||||
MAX_RETRIES_PER_VENUE = 2
|
||||
RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "30"))
|
||||
CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10")
|
||||
# Worker count: defaults to MAX_PROXY_CONCURRENCY (200). Override via PROXY_CONCURRENCY env var.
|
||||
_PROXY_CONCURRENCY = os.environ.get("PROXY_CONCURRENCY", "").strip()
|
||||
MAX_PROXY_CONCURRENCY = 200
|
||||
|
||||
# Parallel mode submits futures in batches so the circuit breaker can stop
|
||||
# new submissions after it opens. Already-inflight futures in the current
|
||||
@@ -76,8 +79,10 @@ def _load_tenant_ids(landing_dir: Path) -> list[str]:
|
||||
if not playtomic_dir.exists():
|
||||
return []
|
||||
|
||||
# Prefer JSONL (new format), fall back to blob (old format)
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.jsonl.gz"), reverse=True)
|
||||
# Prefer daily partition (YYYY/MM/DD), fall back to older monthly/weekly partitions
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/*/tenants.jsonl.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.jsonl.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.json.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
@@ -190,14 +195,13 @@ def _fetch_venues_parallel(
|
||||
start_max_str: str,
|
||||
worker_count: int,
|
||||
cycler: dict,
|
||||
fallback_urls: list[str],
|
||||
on_result=None,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Fetch availability for multiple venues in parallel.
|
||||
|
||||
Submits futures in batches of PARALLEL_BATCH_SIZE. After each batch
|
||||
completes, checks the circuit breaker: if it opened and there is no
|
||||
fallback configured, stops submitting further batches.
|
||||
completes, checks the circuit breaker: if all proxy tiers are exhausted,
|
||||
stops submitting further batches.
|
||||
|
||||
on_result: optional callable(result: dict) invoked inside the lock for
|
||||
each successful result — used for incremental partial-file flushing.
|
||||
@@ -215,10 +219,10 @@ def _fetch_venues_parallel(
|
||||
|
||||
with ThreadPoolExecutor(max_workers=worker_count) as pool:
|
||||
for batch_start in range(0, len(tenant_ids), PARALLEL_BATCH_SIZE):
|
||||
# Stop submitting new work if circuit is open with no fallback
|
||||
if cycler["is_fallback_active"]() and not fallback_urls:
|
||||
# Stop submitting new work if all proxy tiers are exhausted
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error(
|
||||
"Circuit open with no fallback — stopping after %d/%d venues",
|
||||
"All proxy tiers exhausted — stopping after %d/%d venues",
|
||||
completed_count, len(tenant_ids),
|
||||
)
|
||||
break
|
||||
@@ -294,10 +298,9 @@ def extract(
|
||||
venues_to_process = [tid for tid in all_venues_to_process if tid not in already_done]
|
||||
|
||||
# Set up tiered proxy cycler with circuit breaker
|
||||
proxy_urls = load_proxy_urls()
|
||||
fallback_urls = load_fallback_proxy_urls()
|
||||
worker_count = len(proxy_urls) if proxy_urls else 1
|
||||
cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD)
|
||||
tiers = load_proxy_tiers()
|
||||
worker_count = min(int(_PROXY_CONCURRENCY), MAX_PROXY_CONCURRENCY) if _PROXY_CONCURRENCY else (MAX_PROXY_CONCURRENCY if tiers else 1)
|
||||
cycler = make_tiered_cycler(tiers, CIRCUIT_BREAKER_THRESHOLD)
|
||||
|
||||
start_min_str = start_min.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
start_max_str = start_max.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
@@ -325,9 +328,9 @@ def extract(
|
||||
venues_errored = 0
|
||||
|
||||
if worker_count > 1:
|
||||
logger.info("Parallel mode: %d workers, %d proxies", worker_count, len(proxy_urls))
|
||||
logger.info("Parallel mode: %d workers, %d tier(s)", worker_count, len(tiers))
|
||||
new_venues_data, venues_errored = _fetch_venues_parallel(
|
||||
venues_to_process, start_min_str, start_max_str, worker_count, cycler, fallback_urls,
|
||||
venues_to_process, start_min_str, start_max_str, worker_count, cycler,
|
||||
on_result=_on_result,
|
||||
)
|
||||
else:
|
||||
@@ -342,9 +345,9 @@ def extract(
|
||||
_on_result(result)
|
||||
else:
|
||||
venues_errored += 1
|
||||
circuit_opened = cycler["record_failure"]()
|
||||
if circuit_opened and not fallback_urls:
|
||||
logger.error("Circuit open with no fallback — writing partial results")
|
||||
cycler["record_failure"]()
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error("All proxy tiers exhausted — writing partial results")
|
||||
break
|
||||
|
||||
if (i + 1) % 100 == 0:
|
||||
@@ -485,14 +488,13 @@ def extract_recheck(
|
||||
start_max_str = window_end.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
# Set up tiered proxy cycler with circuit breaker
|
||||
proxy_urls = load_proxy_urls()
|
||||
fallback_urls = load_fallback_proxy_urls()
|
||||
worker_count = len(proxy_urls) if proxy_urls else 1
|
||||
cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD)
|
||||
tiers = load_proxy_tiers()
|
||||
worker_count = min(int(_PROXY_CONCURRENCY), MAX_PROXY_CONCURRENCY) if _PROXY_CONCURRENCY else (MAX_PROXY_CONCURRENCY if tiers else 1)
|
||||
cycler = make_tiered_cycler(tiers, CIRCUIT_BREAKER_THRESHOLD)
|
||||
|
||||
if worker_count > 1 and len(venues_to_recheck) > 10:
|
||||
venues_data, venues_errored = _fetch_venues_parallel(
|
||||
venues_to_recheck, start_min_str, start_max_str, worker_count, cycler, fallback_urls,
|
||||
venues_to_recheck, start_min_str, start_max_str, worker_count, cycler,
|
||||
)
|
||||
else:
|
||||
venues_data = []
|
||||
@@ -504,9 +506,9 @@ def extract_recheck(
|
||||
cycler["record_success"]()
|
||||
else:
|
||||
venues_errored += 1
|
||||
circuit_opened = cycler["record_failure"]()
|
||||
if circuit_opened and not fallback_urls:
|
||||
logger.error("Circuit open with no fallback — writing partial recheck results")
|
||||
cycler["record_failure"]()
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error("All proxy tiers exhausted — writing partial recheck results")
|
||||
break
|
||||
|
||||
# Write recheck file as JSONL — one venue per line with metadata injected
|
||||
|
||||
@@ -25,12 +25,13 @@ import json
|
||||
import sqlite3
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging, ua_for_proxy
|
||||
from .proxy import load_proxy_urls, make_round_robin_cycler
|
||||
from .proxy import load_proxy_tiers, make_round_robin_cycler
|
||||
from .utils import compress_jsonl_atomic, landing_path
|
||||
|
||||
logger = setup_logging("padelnomics.extract.playtomic_tenants")
|
||||
@@ -69,25 +70,31 @@ def _fetch_pages_parallel(pages: list[int], next_proxy) -> list[tuple[int, list[
|
||||
|
||||
def extract(
|
||||
landing_dir: Path,
|
||||
year_month: str,
|
||||
year_month: str, # noqa: ARG001 — unused; tenants uses ISO week partition instead
|
||||
conn: sqlite3.Connection,
|
||||
session: niquests.Session,
|
||||
) -> dict:
|
||||
"""Fetch all Playtomic venues via global pagination. Returns run metrics."""
|
||||
year, month = year_month.split("/")
|
||||
dest_dir = landing_path(landing_dir, "playtomic", year, month)
|
||||
"""Fetch all Playtomic venues via global pagination. Returns run metrics.
|
||||
|
||||
Partitioned by ISO week (e.g. 2026/W09) so each weekly run produces a
|
||||
fresh file. _load_tenant_ids() in playtomic_availability globs across all
|
||||
partitions and picks the most recent one.
|
||||
"""
|
||||
today = datetime.now(UTC)
|
||||
year, month, day = today.strftime("%Y"), today.strftime("%m"), today.strftime("%d")
|
||||
dest_dir = landing_path(landing_dir, "playtomic", year, month, day)
|
||||
dest = dest_dir / "tenants.jsonl.gz"
|
||||
old_blob = dest_dir / "tenants.json.gz"
|
||||
if dest.exists() or old_blob.exists():
|
||||
logger.info("Already have tenants for %s/%s — skipping", year, month)
|
||||
if dest.exists():
|
||||
logger.info("Already have tenants for %s/%s/%s — skipping", year, month, day)
|
||||
return {"files_written": 0, "files_skipped": 1, "bytes_written": 0}
|
||||
|
||||
proxy_urls = load_proxy_urls()
|
||||
next_proxy = make_round_robin_cycler(proxy_urls) if proxy_urls else None
|
||||
batch_size = len(proxy_urls) if proxy_urls else 1
|
||||
tiers = load_proxy_tiers()
|
||||
all_proxies = [url for tier in tiers for url in tier]
|
||||
next_proxy = make_round_robin_cycler(all_proxies) if all_proxies else None
|
||||
batch_size = len(all_proxies) if all_proxies else 1
|
||||
|
||||
if next_proxy:
|
||||
logger.info("Parallel mode: %d pages per batch (%d proxies)", batch_size, len(proxy_urls))
|
||||
logger.info("Parallel mode: %d pages per batch (%d proxies across %d tier(s))", batch_size, len(all_proxies), len(tiers))
|
||||
else:
|
||||
logger.info("Serial mode: 1 page at a time (no proxies)")
|
||||
|
||||
@@ -154,7 +161,7 @@ def extract(
|
||||
"files_written": 1,
|
||||
"files_skipped": 0,
|
||||
"bytes_written": bytes_written,
|
||||
"cursor_value": year_month,
|
||||
"cursor_value": f"{year}/{month}/{day}",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,41 +1,97 @@
|
||||
"""Optional proxy rotation for parallel HTTP fetching.
|
||||
|
||||
Proxies are configured via the PROXY_URLS environment variable (comma-separated).
|
||||
When unset, all functions return None/no-op — extractors fall back to direct requests.
|
||||
Proxies are configured via environment variables. When unset, all functions
|
||||
return None/no-op — extractors fall back to direct requests.
|
||||
|
||||
Tiered proxy with circuit breaker:
|
||||
Primary tier (PROXY_URLS) is used by default — typically cheap datacenter proxies.
|
||||
Fallback tier (PROXY_URLS_FALLBACK) activates once consecutive failures >= threshold.
|
||||
Once the circuit opens it stays open for the duration of the run (no auto-recovery).
|
||||
Three-tier escalation: free → datacenter → residential.
|
||||
Tier 1 (free): WEBSHARE_DOWNLOAD_URL — auto-fetched from Webshare API
|
||||
Tier 2 (datacenter): PROXY_URLS_DATACENTER — comma-separated paid DC proxies
|
||||
Tier 3 (residential): PROXY_URLS_RESIDENTIAL — comma-separated paid residential proxies
|
||||
|
||||
Tiered circuit breaker:
|
||||
Active tier is used until consecutive failures >= threshold, then escalates
|
||||
to the next tier. Once all tiers are exhausted, is_exhausted() returns True.
|
||||
Escalation is permanent for the duration of the run — no auto-recovery.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MAX_WEBSHARE_PROXIES = 20
|
||||
WEBSHARE_FETCH_TIMEOUT_SECONDS = 10
|
||||
WEBSHARE_MAX_RESPONSE_BYTES = 1024 * 1024 # 1MB
|
||||
|
||||
def load_proxy_urls() -> list[str]:
|
||||
"""Read PROXY_URLS env var (comma-separated). Returns [] if unset.
|
||||
|
||||
Format: http://user:pass@host:port or socks5://host:port
|
||||
def fetch_webshare_proxies(download_url: str, max_proxies: int = MAX_WEBSHARE_PROXIES) -> list[str]:
|
||||
"""Fetch proxy list from the Webshare download API. Returns [] on any error.
|
||||
|
||||
Expected line format: ip:port:username:password
|
||||
Converts to: http://username:password@ip:port
|
||||
|
||||
Bounded: reads at most WEBSHARE_MAX_RESPONSE_BYTES, returns at most max_proxies.
|
||||
"""
|
||||
raw = os.environ.get("PROXY_URLS", "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
assert max_proxies > 0, f"max_proxies must be positive, got {max_proxies}"
|
||||
assert download_url, "download_url must not be empty"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
download_url,
|
||||
headers={"User-Agent": "padelnomics-extract/1.0"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=WEBSHARE_FETCH_TIMEOUT_SECONDS) as resp:
|
||||
raw = resp.read(WEBSHARE_MAX_RESPONSE_BYTES).decode("utf-8")
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch Webshare proxies: %s", e)
|
||||
return []
|
||||
|
||||
urls = []
|
||||
for line in raw.splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split(":")
|
||||
if len(parts) != 4:
|
||||
logger.debug("Skipping malformed proxy line: %r", line)
|
||||
continue
|
||||
ip, port, username, password = parts
|
||||
urls.append(f"http://{username}:{password}@{ip}:{port}")
|
||||
if len(urls) >= max_proxies:
|
||||
break
|
||||
|
||||
logger.info("Fetched %d proxies from Webshare", len(urls))
|
||||
return urls
|
||||
|
||||
|
||||
def load_fallback_proxy_urls() -> list[str]:
|
||||
"""Read PROXY_URLS_FALLBACK env var (comma-separated). Returns [] if unset.
|
||||
def load_proxy_tiers() -> list[list[str]]:
|
||||
"""Assemble proxy tiers in escalation order: free → datacenter → residential.
|
||||
|
||||
Used as the residential/reliable fallback tier when the primary tier fails.
|
||||
Format: http://user:pass@host:port or socks5://host:port
|
||||
Tier 1 (free): fetched from WEBSHARE_DOWNLOAD_URL if set.
|
||||
Tier 2 (datacenter): PROXY_URLS_DATACENTER (comma-separated).
|
||||
Tier 3 (residential): PROXY_URLS_RESIDENTIAL (comma-separated).
|
||||
|
||||
Empty tiers are omitted. Returns [] if no proxies configured anywhere.
|
||||
"""
|
||||
raw = os.environ.get("PROXY_URLS_FALLBACK", "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
return urls
|
||||
tiers: list[list[str]] = []
|
||||
|
||||
webshare_url = os.environ.get("WEBSHARE_DOWNLOAD_URL", "").strip()
|
||||
if webshare_url:
|
||||
free_proxies = fetch_webshare_proxies(webshare_url)
|
||||
if free_proxies:
|
||||
tiers.append(free_proxies)
|
||||
|
||||
for var in ("PROXY_URLS_DATACENTER", "PROXY_URLS_RESIDENTIAL"):
|
||||
raw = os.environ.get(var, "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
if urls:
|
||||
tiers.append(urls)
|
||||
|
||||
return tiers
|
||||
|
||||
|
||||
def make_round_robin_cycler(proxy_urls: list[str]):
|
||||
@@ -78,83 +134,96 @@ def make_sticky_selector(proxy_urls: list[str]):
|
||||
return select_proxy
|
||||
|
||||
|
||||
def make_tiered_cycler(
|
||||
primary_urls: list[str],
|
||||
fallback_urls: list[str],
|
||||
threshold: int,
|
||||
) -> dict:
|
||||
"""Thread-safe tiered proxy cycler with circuit breaker.
|
||||
def make_tiered_cycler(tiers: list[list[str]], threshold: int) -> dict:
|
||||
"""Thread-safe N-tier proxy cycler with circuit breaker.
|
||||
|
||||
Uses primary_urls until consecutive failures >= threshold, then switches
|
||||
permanently to fallback_urls for the rest of the run. No auto-recovery —
|
||||
once the circuit opens it stays open to avoid flapping.
|
||||
Uses tiers[0] until consecutive failures >= threshold, then escalates
|
||||
to tiers[1], then tiers[2], etc. Once all tiers are exhausted,
|
||||
is_exhausted() returns True and next_proxy() returns None.
|
||||
|
||||
Failure counter resets on each escalation — the new tier gets a fresh start.
|
||||
Once exhausted, further record_failure() calls are no-ops.
|
||||
|
||||
Returns a dict of callables:
|
||||
next_proxy() -> str | None — returns URL from the active tier
|
||||
record_success() — resets consecutive failure counter
|
||||
record_failure() -> bool — increments counter; True if circuit just opened
|
||||
is_fallback_active() -> bool — whether fallback tier is currently active
|
||||
next_proxy() -> str | None — URL from the active tier, or None
|
||||
record_success() -> None — resets consecutive failure counter
|
||||
record_failure() -> bool — True if just escalated to next tier
|
||||
is_exhausted() -> bool — True if all tiers exhausted
|
||||
active_tier_index() -> int — 0-based index of current tier
|
||||
tier_count() -> int — total number of tiers
|
||||
|
||||
If primary_urls is empty: always returns from fallback_urls (no circuit breaker needed).
|
||||
If both are empty: next_proxy() always returns None.
|
||||
Edge cases:
|
||||
Empty tiers list: next_proxy() always returns None, is_exhausted() True.
|
||||
Single tier: behaves like the primary-only case, is_exhausted() after threshold.
|
||||
"""
|
||||
assert threshold > 0, f"threshold must be positive, got {threshold}"
|
||||
assert isinstance(tiers, list), f"tiers must be a list, got {type(tiers)}"
|
||||
|
||||
lock = threading.Lock()
|
||||
cycles = [itertools.cycle(t) for t in tiers]
|
||||
state = {
|
||||
"active_tier": 0,
|
||||
"consecutive_failures": 0,
|
||||
"fallback_active": False,
|
||||
}
|
||||
|
||||
primary_cycle = itertools.cycle(primary_urls) if primary_urls else None
|
||||
fallback_cycle = itertools.cycle(fallback_urls) if fallback_urls else None
|
||||
|
||||
# No primary proxies — skip circuit breaker, use fallback directly
|
||||
if not primary_urls:
|
||||
state["fallback_active"] = True
|
||||
|
||||
def next_proxy() -> str | None:
|
||||
with lock:
|
||||
if state["fallback_active"]:
|
||||
return next(fallback_cycle) if fallback_cycle else None
|
||||
return next(primary_cycle) if primary_cycle else None
|
||||
idx = state["active_tier"]
|
||||
if idx >= len(cycles):
|
||||
return None
|
||||
return next(cycles[idx])
|
||||
|
||||
def record_success() -> None:
|
||||
with lock:
|
||||
state["consecutive_failures"] = 0
|
||||
|
||||
def record_failure() -> bool:
|
||||
"""Increment failure counter. Returns True if circuit just opened."""
|
||||
"""Increment failure counter. Returns True if just escalated to next tier."""
|
||||
with lock:
|
||||
if state["fallback_active"]:
|
||||
# Already on fallback — don't trip the circuit again
|
||||
idx = state["active_tier"]
|
||||
if idx >= len(tiers):
|
||||
# Already exhausted — no-op
|
||||
return False
|
||||
state["consecutive_failures"] += 1
|
||||
if state["consecutive_failures"] >= threshold:
|
||||
state["fallback_active"] = True
|
||||
if fallback_urls:
|
||||
logger.warning(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"switching to fallback residential proxies",
|
||||
state["consecutive_failures"],
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"no fallback configured, aborting run",
|
||||
state["consecutive_failures"],
|
||||
)
|
||||
return True
|
||||
return False
|
||||
if state["consecutive_failures"] < threshold:
|
||||
return False
|
||||
# Threshold reached — escalate
|
||||
state["consecutive_failures"] = 0
|
||||
state["active_tier"] += 1
|
||||
new_idx = state["active_tier"]
|
||||
if new_idx < len(tiers):
|
||||
logger.warning(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"escalating to proxy tier %d/%d",
|
||||
threshold,
|
||||
new_idx + 1,
|
||||
len(tiers),
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"All %d proxy tier(s) exhausted after %d consecutive failures — "
|
||||
"no more fallbacks",
|
||||
len(tiers),
|
||||
threshold,
|
||||
)
|
||||
return True
|
||||
|
||||
def is_fallback_active() -> bool:
|
||||
def is_exhausted() -> bool:
|
||||
with lock:
|
||||
return state["fallback_active"]
|
||||
return state["active_tier"] >= len(tiers)
|
||||
|
||||
def active_tier_index() -> int:
|
||||
with lock:
|
||||
return state["active_tier"]
|
||||
|
||||
def tier_count() -> int:
|
||||
return len(tiers)
|
||||
|
||||
return {
|
||||
"next_proxy": next_proxy,
|
||||
"record_success": record_success,
|
||||
"record_failure": record_failure,
|
||||
"is_fallback_active": is_fallback_active,
|
||||
"is_exhausted": is_exhausted,
|
||||
"active_tier_index": active_tier_index,
|
||||
"tier_count": tier_count,
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ schedule = "monthly"
|
||||
|
||||
[playtomic_tenants]
|
||||
module = "padelnomics_extract.playtomic_tenants"
|
||||
schedule = "weekly"
|
||||
schedule = "daily"
|
||||
|
||||
[playtomic_availability]
|
||||
module = "padelnomics_extract.playtomic_availability"
|
||||
|
||||
@@ -192,9 +192,9 @@ def run_workflow(conn, workflow: dict) -> None:
|
||||
entry_fn = getattr(module, entry_name)
|
||||
entry_fn()
|
||||
logger.info("Workflow %s completed successfully", workflow["name"])
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Workflow %s failed", workflow["name"])
|
||||
send_alert(f"Workflow '{workflow['name']}' failed")
|
||||
send_alert(f"[extract] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
raise
|
||||
|
||||
|
||||
@@ -233,8 +233,8 @@ def run_due_workflows(conn, workflows: list[dict]) -> bool:
|
||||
# Transform + Export + Deploy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bool:
|
||||
"""Run a shell command. Returns True on success."""
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> tuple[bool, str]:
|
||||
"""Run a shell command. Returns (success, error_snippet)."""
|
||||
logger.info("Shell: %s", cmd)
|
||||
result = subprocess.run(
|
||||
cmd, shell=True, capture_output=True, text=True, timeout=timeout_seconds
|
||||
@@ -242,47 +242,56 @@ def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bo
|
||||
if result.returncode != 0:
|
||||
logger.error("Shell failed (rc=%d): %s\nstdout: %s\nstderr: %s",
|
||||
result.returncode, cmd, result.stdout[-500:], result.stderr[-500:])
|
||||
return False
|
||||
return True
|
||||
raw = (result.stderr or result.stdout).strip()
|
||||
snippet = next((ln.strip() for ln in raw.splitlines() if ln.strip()), raw)[:120]
|
||||
return False, snippet
|
||||
return True, ""
|
||||
|
||||
|
||||
def run_transform() -> None:
|
||||
"""Run SQLMesh — it evaluates model staleness internally."""
|
||||
logger.info("Running SQLMesh transform")
|
||||
ok = run_shell(
|
||||
ok, err = run_shell(
|
||||
"uv run sqlmesh -p transform/sqlmesh_padelnomics plan prod --auto-apply",
|
||||
)
|
||||
if not ok:
|
||||
send_alert("SQLMesh transform failed")
|
||||
send_alert(f"[transform] {err}")
|
||||
|
||||
|
||||
def run_export() -> None:
|
||||
"""Export serving tables to analytics.duckdb."""
|
||||
logger.info("Exporting serving tables")
|
||||
ok = run_shell(
|
||||
ok, err = run_shell(
|
||||
f"DUCKDB_PATH={DUCKDB_PATH} SERVING_DUCKDB_PATH={SERVING_DUCKDB_PATH} "
|
||||
f"uv run python src/padelnomics/export_serving.py"
|
||||
)
|
||||
if not ok:
|
||||
send_alert("Serving export failed")
|
||||
send_alert(f"[export] {err}")
|
||||
|
||||
|
||||
def web_code_changed() -> bool:
|
||||
"""Check if web app code changed since last deploy (after git pull)."""
|
||||
"""Check if web app code or secrets changed since last deploy (after git pull)."""
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-only", "HEAD~1", "HEAD", "--", "web/", "Dockerfile"],
|
||||
["git", "diff", "--name-only", "HEAD~1", "HEAD", "--",
|
||||
"web/", "Dockerfile", ".env.prod.sops"],
|
||||
capture_output=True, text=True, timeout=30,
|
||||
)
|
||||
return bool(result.stdout.strip())
|
||||
|
||||
|
||||
def current_deployed_tag() -> str | None:
|
||||
"""Return the tag currently checked out, or None if not on a tag."""
|
||||
"""Return the highest-version tag pointing at HEAD, or None.
|
||||
|
||||
Uses the same sort order as latest_remote_tag() so that when multiple
|
||||
tags point to the same commit (e.g. a date-based tag and a CI integer
|
||||
tag), we always compare apples-to-apples.
|
||||
"""
|
||||
result = subprocess.run(
|
||||
["git", "describe", "--tags", "--exact-match", "HEAD"],
|
||||
["git", "tag", "--list", "--sort=-version:refname", "--points-at", "HEAD", "v*"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return result.stdout.strip() or None
|
||||
tags = result.stdout.strip().splitlines()
|
||||
return tags[0] if tags else None
|
||||
|
||||
|
||||
def latest_remote_tag() -> str | None:
|
||||
@@ -317,7 +326,12 @@ def git_pull_and_sync() -> None:
|
||||
|
||||
logger.info("New tag %s available (current: %s) — deploying", latest, current)
|
||||
run_shell(f"git checkout --detach {latest}")
|
||||
run_shell("sops --input-type dotenv --output-type dotenv -d .env.prod.sops > .env")
|
||||
run_shell("uv sync --all-packages")
|
||||
# Re-exec so the new code is loaded. os.execv replaces this process in-place;
|
||||
# systemd sees it as the same PID and does not restart the unit.
|
||||
logger.info("Deploy complete — re-execing to load new code")
|
||||
os.execv(sys.executable, sys.argv)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -365,11 +379,11 @@ def tick() -> None:
|
||||
# Deploy web app if code changed
|
||||
if os.getenv("SUPERVISOR_GIT_PULL") and web_code_changed():
|
||||
logger.info("Web code changed — deploying")
|
||||
ok = run_shell("./deploy.sh")
|
||||
ok, err = run_shell("./deploy.sh")
|
||||
if ok:
|
||||
send_alert("Deploy succeeded")
|
||||
send_alert("[deploy] ok")
|
||||
else:
|
||||
send_alert("Deploy FAILED — check journalctl -u padelnomics-supervisor")
|
||||
send_alert(f"[deploy] failed: {err}")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@@ -386,9 +400,9 @@ def supervisor_loop() -> None:
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Supervisor stopped (KeyboardInterrupt)")
|
||||
break
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Supervisor tick failed — backing off %ds", BACKOFF_SECONDS)
|
||||
send_alert("Supervisor tick failed")
|
||||
send_alert(f"[supervisor] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
time.sleep(BACKOFF_SECONDS)
|
||||
else:
|
||||
time.sleep(TICK_INTERVAL_SECONDS)
|
||||
|
||||
@@ -2,22 +2,14 @@
|
||||
-- One row per available 60-minute booking slot per court per venue per day.
|
||||
-- "Available" = the slot was NOT booked at capture time. Missing slots = booked.
|
||||
--
|
||||
-- Reads BOTH morning snapshots and recheck files:
|
||||
-- Morning (new): availability_{date}.jsonl.gz → snapshot_type = 'morning'
|
||||
-- Morning (old): availability_{date}.json.gz → snapshot_type = 'morning'
|
||||
-- Recheck (new): availability_{date}_recheck_{HH}.jsonl.gz → snapshot_type = 'recheck'
|
||||
-- Recheck (old): availability_{date}_recheck_{HH}.json.gz → snapshot_type = 'recheck'
|
||||
-- Reads morning snapshots and recheck files (JSONL format):
|
||||
-- Morning: availability_{date}.jsonl.gz → snapshot_type = 'morning'
|
||||
-- Recheck: availability_{date}_recheck_{HH}.jsonl.gz → snapshot_type = 'recheck'
|
||||
--
|
||||
-- Only 60-min duration slots are kept (canonical hourly rate + occupancy unit).
|
||||
-- Price parsed from strings like "14.56 EUR" or "48 GBP".
|
||||
--
|
||||
-- Supports two morning landing formats (UNION ALL during migration):
|
||||
-- New: availability_{date}.jsonl.gz — one venue per line, columns: tenant_id, slots, date, captured_at_utc
|
||||
-- Old: availability_{date}.json.gz — {"date":..., "venues": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Requires: at least one availability file in the landing zone.
|
||||
-- A seed file (data/landing/playtomic/1970/01/availability_1970-01-01.json.gz)
|
||||
-- with empty venues[] ensures this model runs before real data arrives.
|
||||
-- Source: data/landing/playtomic/{year}/{month}/availability_*.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_availability,
|
||||
@@ -27,7 +19,6 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one venue per JSONL line — no outer UNNEST needed
|
||||
morning_jsonl AS (
|
||||
SELECT
|
||||
date AS snapshot_date,
|
||||
@@ -50,35 +41,6 @@ morning_jsonl AS (
|
||||
WHERE filename NOT LIKE '%_recheck_%'
|
||||
AND tenant_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"date":..., "venues": [...]} blob — kept for transition
|
||||
morning_blob AS (
|
||||
SELECT
|
||||
af.date AS snapshot_date,
|
||||
af.captured_at_utc,
|
||||
'morning' AS snapshot_type,
|
||||
NULL::INTEGER AS recheck_hour,
|
||||
venue_json ->> 'tenant_id' AS tenant_id,
|
||||
venue_json -> 'slots' AS slots_json
|
||||
FROM (
|
||||
SELECT date, captured_at_utc, venues
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/availability_*.json.gz',
|
||||
format = 'auto',
|
||||
columns = {
|
||||
date: 'VARCHAR',
|
||||
captured_at_utc: 'VARCHAR',
|
||||
venues: 'JSON[]'
|
||||
},
|
||||
filename = true,
|
||||
maximum_object_size = 134217728 -- 128 MB; daily files grow with venue count
|
||||
)
|
||||
WHERE filename NOT LIKE '%_recheck_%'
|
||||
AND venues IS NOT NULL
|
||||
AND json_array_length(venues) > 0
|
||||
) af,
|
||||
LATERAL UNNEST(af.venues) AS t(venue_json)
|
||||
),
|
||||
-- Recheck snapshots (new JSONL format — one venue per line)
|
||||
recheck_jsonl AS (
|
||||
SELECT
|
||||
date AS snapshot_date,
|
||||
@@ -101,43 +63,10 @@ recheck_jsonl AS (
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
),
|
||||
-- Recheck snapshots (old blob format, kept for transition)
|
||||
recheck_blob AS (
|
||||
SELECT
|
||||
rf.date AS snapshot_date,
|
||||
rf.captured_at_utc,
|
||||
'recheck' AS snapshot_type,
|
||||
TRY_CAST(
|
||||
regexp_extract(rf.filename, '_recheck_(\d+)', 1) AS INTEGER
|
||||
) AS recheck_hour,
|
||||
venue_json ->> 'tenant_id' AS tenant_id,
|
||||
venue_json -> 'slots' AS slots_json
|
||||
FROM (
|
||||
SELECT date, captured_at_utc, venues, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/availability_*_recheck_*.json.gz',
|
||||
format = 'auto',
|
||||
columns = {
|
||||
date: 'VARCHAR',
|
||||
captured_at_utc: 'VARCHAR',
|
||||
venues: 'JSON[]'
|
||||
},
|
||||
filename = true,
|
||||
maximum_object_size = 134217728 -- 128 MB; matches morning snapshot limit
|
||||
)
|
||||
WHERE venues IS NOT NULL
|
||||
AND json_array_length(venues) > 0
|
||||
) rf,
|
||||
LATERAL UNNEST(rf.venues) AS t(venue_json)
|
||||
),
|
||||
all_venues AS (
|
||||
SELECT * FROM morning_jsonl
|
||||
UNION ALL
|
||||
SELECT * FROM morning_blob
|
||||
UNION ALL
|
||||
SELECT * FROM recheck_jsonl
|
||||
UNION ALL
|
||||
SELECT * FROM recheck_blob
|
||||
),
|
||||
raw_resources AS (
|
||||
SELECT
|
||||
|
||||
@@ -5,11 +5,7 @@
|
||||
-- DuckDB auto-infers opening_hours as STRUCT, so we access each day by literal
|
||||
-- key (no dynamic access) and UNION ALL to unpivot.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant per line, opening_hours is a top-level JSON column
|
||||
-- Old: tenants.json.gz — {"tenants": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_opening_hours,
|
||||
@@ -19,40 +15,18 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line
|
||||
jsonl_venues AS (
|
||||
venues AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
opening_hours AS oh
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {tenant_id: 'VARCHAR', opening_hours: 'JSON'}
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
AND opening_hours IS NOT NULL
|
||||
),
|
||||
-- Old format: blob
|
||||
blob_venues AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
tenant -> 'opening_hours' AS oh
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
AND (tenant -> 'opening_hours') IS NOT NULL
|
||||
),
|
||||
venues AS (
|
||||
SELECT * FROM jsonl_venues
|
||||
UNION ALL
|
||||
SELECT * FROM blob_venues
|
||||
),
|
||||
-- Unpivot by UNION ALL — 7 literal key accesses
|
||||
unpivoted AS (
|
||||
SELECT tenant_id, 'MONDAY' AS day_of_week, 1 AS day_number,
|
||||
@@ -104,6 +78,4 @@ SELECT
|
||||
FROM unpivoted
|
||||
WHERE opening_time IS NOT NULL
|
||||
AND closing_time IS NOT NULL
|
||||
-- Enforce grain: if both old blob and new JSONL exist for the same month,
|
||||
-- the UNION ALL produces duplicate (tenant_id, day_of_week) pairs — deduplicate.
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY tenant_id, day_of_week ORDER BY tenant_id) = 1
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
-- Reads resources array from the landing zone to extract court type, size,
|
||||
-- surface, and booking config.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant per line, resources is a top-level JSON column
|
||||
-- Old: tenants.json.gz — {"tenants": [...]} blob (double UNNEST: tenants → resources)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_resources,
|
||||
@@ -16,41 +12,18 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line — single UNNEST for resources
|
||||
jsonl_unnested AS (
|
||||
unnested AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
UPPER(address ->> 'country_code') AS country_code,
|
||||
UNNEST(from_json(resources, '["JSON"]')) AS resource_json
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {tenant_id: 'VARCHAR', address: 'JSON', resources: 'JSON'}
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
AND resources IS NOT NULL
|
||||
),
|
||||
-- Old format: blob — double UNNEST (tenants → resources)
|
||||
blob_unnested AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
UPPER(tenant -> 'address' ->> 'country_code') AS country_code,
|
||||
UNNEST(from_json(tenant -> 'resources', '["JSON"]')) AS resource_json
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
AND (tenant -> 'resources') IS NOT NULL
|
||||
),
|
||||
unnested AS (
|
||||
SELECT * FROM jsonl_unnested
|
||||
UNION ALL
|
||||
SELECT * FROM blob_unnested
|
||||
)
|
||||
SELECT
|
||||
tenant_id,
|
||||
@@ -68,6 +41,4 @@ SELECT
|
||||
FROM unnested
|
||||
WHERE (resource_json ->> 'resource_id') IS NOT NULL
|
||||
AND (resource_json ->> 'sport_id') = 'PADEL'
|
||||
-- Enforce grain: if both old blob and new JSONL exist for the same month,
|
||||
-- the UNION ALL produces duplicate (tenant_id, resource_id) pairs — deduplicate.
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY tenant_id, resource_json ->> 'resource_id' ORDER BY tenant_id) = 1
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
-- including address, opening hours, court resources, VAT rate, and facilities.
|
||||
-- Deduplicates on tenant_id (keeps most recent extraction).
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant JSON object per line (no UNNEST needed)
|
||||
-- Old: tenants.json.gz — {"tenants": [{...}]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_venues,
|
||||
@@ -17,8 +13,7 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line — no UNNEST, access columns directly
|
||||
jsonl_parsed AS (
|
||||
parsed AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
tenant_name,
|
||||
@@ -45,7 +40,7 @@ jsonl_parsed AS (
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
filename = true,
|
||||
columns = {
|
||||
@@ -59,49 +54,6 @@ jsonl_parsed AS (
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"tenants": [...]} blob — keep for transition until old files rotate out
|
||||
blob_parsed AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
tenant ->> 'tenant_name' AS tenant_name,
|
||||
tenant ->> 'slug' AS slug,
|
||||
tenant ->> 'tenant_type' AS tenant_type,
|
||||
tenant ->> 'tenant_status' AS tenant_status,
|
||||
tenant ->> 'playtomic_status' AS playtomic_status,
|
||||
tenant ->> 'booking_type' AS booking_type,
|
||||
tenant -> 'address' ->> 'street' AS street,
|
||||
tenant -> 'address' ->> 'city' AS city,
|
||||
tenant -> 'address' ->> 'postal_code' AS postal_code,
|
||||
UPPER(tenant -> 'address' ->> 'country_code') AS country_code,
|
||||
tenant -> 'address' ->> 'timezone' AS timezone,
|
||||
tenant -> 'address' ->> 'administrative_area' AS administrative_area,
|
||||
TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lon' AS DOUBLE) AS lon,
|
||||
TRY_CAST(tenant ->> 'vat_rate' AS DOUBLE) AS vat_rate,
|
||||
tenant ->> 'default_currency' AS default_currency,
|
||||
TRY_CAST(tenant -> 'booking_settings' ->> 'booking_ahead_limit' AS INTEGER) AS booking_ahead_limit_minutes,
|
||||
tenant -> 'opening_hours' AS opening_hours_json,
|
||||
tenant -> 'resources' AS resources_json,
|
||||
tenant ->> 'created_at' AS created_at,
|
||||
tenant ->> 'is_playtomic_partner' AS is_playtomic_partner_raw,
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
filename = true,
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
),
|
||||
parsed AS (
|
||||
SELECT * FROM jsonl_parsed
|
||||
UNION ALL
|
||||
SELECT * FROM blob_parsed
|
||||
),
|
||||
deduped AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY tenant_id ORDER BY source_file DESC) AS rn
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
-- Broad coverage (140K+ locations) enables Gemeinde-level market intelligence.
|
||||
-- One row per geoname_id (GeoNames stable numeric identifier).
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: cities_global.jsonl.gz — one city per line, columns directly accessible
|
||||
-- Old: cities_global.json.gz — {"rows": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/geonames/{year}/{month}/cities_global.{jsonl,json}.gz
|
||||
-- Source: data/landing/geonames/{year}/{month}/cities_global.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_population_geonames,
|
||||
@@ -16,74 +12,29 @@ MODEL (
|
||||
grain geoname_id
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one city per JSONL line
|
||||
jsonl_rows AS (
|
||||
SELECT
|
||||
TRY_CAST(geoname_id AS INTEGER) AS geoname_id,
|
||||
city_name,
|
||||
country_code,
|
||||
TRY_CAST(lat AS DOUBLE) AS lat,
|
||||
TRY_CAST(lon AS DOUBLE) AS lon,
|
||||
admin1_code,
|
||||
admin2_code,
|
||||
TRY_CAST(population AS BIGINT) AS population,
|
||||
TRY_CAST(ref_year AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {
|
||||
geoname_id: 'INTEGER', city_name: 'VARCHAR', country_code: 'VARCHAR',
|
||||
lat: 'DOUBLE', lon: 'DOUBLE', admin1_code: 'VARCHAR', admin2_code: 'VARCHAR',
|
||||
population: 'BIGINT', ref_year: 'INTEGER'
|
||||
}
|
||||
)
|
||||
WHERE geoname_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"rows": [...]} blob — kept for transition
|
||||
blob_rows AS (
|
||||
SELECT
|
||||
TRY_CAST(row ->> 'geoname_id' AS INTEGER) AS geoname_id,
|
||||
row ->> 'city_name' AS city_name,
|
||||
row ->> 'country_code' AS country_code,
|
||||
TRY_CAST(row ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(row ->> 'lon' AS DOUBLE) AS lon,
|
||||
row ->> 'admin1_code' AS admin1_code,
|
||||
row ->> 'admin2_code' AS admin2_code,
|
||||
TRY_CAST(row ->> 'population' AS BIGINT) AS population,
|
||||
TRY_CAST(row ->> 'ref_year' AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(rows) AS row
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.json.gz',
|
||||
auto_detect = true,
|
||||
maximum_object_size = 40000000
|
||||
)
|
||||
)
|
||||
WHERE (row ->> 'geoname_id') IS NOT NULL
|
||||
),
|
||||
all_rows AS (
|
||||
SELECT * FROM jsonl_rows
|
||||
UNION ALL
|
||||
SELECT * FROM blob_rows
|
||||
)
|
||||
SELECT
|
||||
geoname_id,
|
||||
TRIM(city_name) AS city_name,
|
||||
UPPER(country_code) AS country_code,
|
||||
lat,
|
||||
lon,
|
||||
NULLIF(TRIM(admin1_code), '') AS admin1_code,
|
||||
NULLIF(TRIM(admin2_code), '') AS admin2_code,
|
||||
population,
|
||||
ref_year,
|
||||
extracted_date
|
||||
FROM all_rows
|
||||
WHERE population IS NOT NULL
|
||||
TRY_CAST(geoname_id AS INTEGER) AS geoname_id,
|
||||
TRIM(city_name) AS city_name,
|
||||
UPPER(country_code) AS country_code,
|
||||
TRY_CAST(lat AS DOUBLE) AS lat,
|
||||
TRY_CAST(lon AS DOUBLE) AS lon,
|
||||
NULLIF(TRIM(admin1_code), '') AS admin1_code,
|
||||
NULLIF(TRIM(admin2_code), '') AS admin2_code,
|
||||
TRY_CAST(population AS BIGINT) AS population,
|
||||
TRY_CAST(ref_year AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {
|
||||
geoname_id: 'INTEGER', city_name: 'VARCHAR', country_code: 'VARCHAR',
|
||||
lat: 'DOUBLE', lon: 'DOUBLE', admin1_code: 'VARCHAR', admin2_code: 'VARCHAR',
|
||||
population: 'BIGINT', ref_year: 'INTEGER'
|
||||
}
|
||||
)
|
||||
WHERE geoname_id IS NOT NULL
|
||||
AND population IS NOT NULL
|
||||
AND population > 0
|
||||
AND geoname_id IS NOT NULL
|
||||
AND city_name IS NOT NULL
|
||||
AND lat IS NOT NULL
|
||||
AND lon IS NOT NULL
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
-- Used as a "racket sport culture" signal in the opportunity score:
|
||||
-- areas with high tennis court density are prime padel adoption markets.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: courts.jsonl.gz — one OSM element per line; nodes have lat/lon directly,
|
||||
-- ways/relations have center.lat/center.lon (Overpass out center)
|
||||
-- Old: courts.json.gz — {"elements": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/overpass_tennis/{year}/{month}/courts.{jsonl,json}.gz
|
||||
-- Source: data/landing/overpass_tennis/{year}/{month}/courts.jsonl.gz
|
||||
-- Format: one OSM element per line; nodes have lat/lon directly,
|
||||
-- ways/relations have center.lat/center.lon (Overpass out center)
|
||||
|
||||
MODEL (
|
||||
name staging.stg_tennis_courts,
|
||||
@@ -17,8 +14,7 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one OSM element per JSONL line
|
||||
jsonl_elements AS (
|
||||
parsed AS (
|
||||
SELECT
|
||||
type AS osm_type,
|
||||
TRY_CAST(id AS BIGINT) AS osm_id,
|
||||
@@ -47,33 +43,6 @@ jsonl_elements AS (
|
||||
)
|
||||
WHERE type IS NOT NULL
|
||||
),
|
||||
-- Old format: {"elements": [...]} blob — kept for transition
|
||||
blob_elements AS (
|
||||
SELECT
|
||||
elem ->> 'type' AS osm_type,
|
||||
(elem ->> 'id')::BIGINT AS osm_id,
|
||||
TRY_CAST(elem ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(elem ->> 'lon' AS DOUBLE) AS lon,
|
||||
elem -> 'tags' ->> 'name' AS name,
|
||||
elem -> 'tags' ->> 'addr:country' AS country_code,
|
||||
elem -> 'tags' ->> 'addr:city' AS city_tag,
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(elements) AS elem, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/overpass_tennis/*/*/courts.json.gz',
|
||||
format = 'auto',
|
||||
filename = true
|
||||
)
|
||||
)
|
||||
WHERE (elem ->> 'type') IS NOT NULL
|
||||
),
|
||||
parsed AS (
|
||||
SELECT * FROM jsonl_elements
|
||||
UNION ALL
|
||||
SELECT * FROM blob_elements
|
||||
),
|
||||
deduped AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY osm_id ORDER BY extracted_date DESC) AS rn
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
"""Create minimal seed files for SQLMesh staging models that require landing data."""
|
||||
"""Create minimal seed files for SQLMesh staging models that require landing data.
|
||||
|
||||
Seeds are empty JSONL gzip files — they satisfy DuckDB's file-not-found check
|
||||
while contributing zero rows to the staging models.
|
||||
"""
|
||||
import gzip
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
seed = {
|
||||
"date": "1970-01-01",
|
||||
"captured_at_utc": "1970-01-01T00:00:00Z",
|
||||
"venue_count": 0,
|
||||
"venues_errored": 0,
|
||||
"venues": [],
|
||||
}
|
||||
morning = Path("data/landing/playtomic/1970/01/availability_1970-01-01.json.gz")
|
||||
recheck = Path("data/landing/playtomic/1970/01/availability_1970-01-01_recheck_00.json.gz")
|
||||
# stg_playtomic_availability requires at least one morning and one recheck file
|
||||
morning = Path("data/landing/playtomic/1970/01/availability_1970-01-01.jsonl.gz")
|
||||
recheck = Path("data/landing/playtomic/1970/01/availability_1970-01-01_recheck_00.jsonl.gz")
|
||||
morning.parent.mkdir(parents=True, exist_ok=True)
|
||||
for p in [morning, recheck]:
|
||||
if not p.exists():
|
||||
with gzip.open(p, "wt") as f:
|
||||
json.dump(seed, f)
|
||||
with gzip.open(p, "wb") as f:
|
||||
pass # empty JSONL — 0 rows, no error
|
||||
print("created", p)
|
||||
else:
|
||||
print("exists ", p)
|
||||
|
||||
@@ -2499,7 +2499,12 @@ async def article_results():
|
||||
@csrf_protect
|
||||
async def article_new():
|
||||
"""Create a manual article."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards, is_reserved_path
|
||||
from ..content.routes import (
|
||||
BUILD_DIR,
|
||||
bake_product_cards,
|
||||
bake_scenario_cards,
|
||||
is_reserved_path,
|
||||
)
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
@@ -2523,9 +2528,10 @@ async def article_new():
|
||||
await flash(f"URL path '{url_path}' conflicts with a reserved route.", "error")
|
||||
return await render_template("admin/article_form.html", data=dict(form), editing=False)
|
||||
|
||||
# Render markdown → HTML with scenario cards baked in
|
||||
# Render markdown → HTML with scenario + product cards baked in
|
||||
body_html = mistune.html(body)
|
||||
body_html = await bake_scenario_cards(body_html)
|
||||
body_html = await bake_product_cards(body_html, lang=language)
|
||||
|
||||
build_dir = BUILD_DIR / language
|
||||
build_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -2561,7 +2567,12 @@ async def article_new():
|
||||
@csrf_protect
|
||||
async def article_edit(article_id: int):
|
||||
"""Edit a manual article."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards, is_reserved_path
|
||||
from ..content.routes import (
|
||||
BUILD_DIR,
|
||||
bake_product_cards,
|
||||
bake_scenario_cards,
|
||||
is_reserved_path,
|
||||
)
|
||||
|
||||
article = await fetch_one("SELECT * FROM articles WHERE id = ?", (article_id,))
|
||||
if not article:
|
||||
@@ -2591,6 +2602,7 @@ async def article_edit(article_id: int):
|
||||
if body:
|
||||
body_html = mistune.html(body)
|
||||
body_html = await bake_scenario_cards(body_html)
|
||||
body_html = await bake_product_cards(body_html, lang=language)
|
||||
build_dir = BUILD_DIR / language
|
||||
build_dir.mkdir(parents=True, exist_ok=True)
|
||||
(build_dir / f"{article['slug']}.html").write_text(body_html)
|
||||
@@ -2735,7 +2747,7 @@ async def rebuild_all():
|
||||
|
||||
async def _rebuild_article(article_id: int):
|
||||
"""Re-render a single article from its source."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards
|
||||
from ..content.routes import BUILD_DIR, bake_product_cards, bake_scenario_cards
|
||||
|
||||
article = await fetch_one("SELECT * FROM articles WHERE id = ?", (article_id,))
|
||||
if not article:
|
||||
@@ -2760,6 +2772,7 @@ async def _rebuild_article(article_id: int):
|
||||
body_html = mistune.html(md_path.read_text())
|
||||
lang = article.get("language", "en") if hasattr(article, "get") else "en"
|
||||
body_html = await bake_scenario_cards(body_html, lang=lang)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
BUILD_DIR.mkdir(parents=True, exist_ok=True)
|
||||
(BUILD_DIR / f"{article['slug']}.html").write_text(body_html)
|
||||
|
||||
@@ -3233,3 +3246,388 @@ async def outreach_import():
|
||||
|
||||
await flash(f"Imported {imported} suppliers. Skipped {skipped} (duplicates or missing data).", "success")
|
||||
return redirect(url_for("admin.outreach"))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Affiliate Product Catalog
|
||||
# =============================================================================
|
||||
|
||||
AFFILIATE_CATEGORIES = ("racket", "ball", "shoe", "bag", "grip", "eyewear", "accessory")
|
||||
AFFILIATE_STATUSES = ("draft", "active", "archived")
|
||||
|
||||
|
||||
def _form_to_product(form) -> dict:
|
||||
"""Parse affiliate product form values into a data dict."""
|
||||
price_str = form.get("price_eur", "").strip()
|
||||
price_cents = None
|
||||
if price_str:
|
||||
try:
|
||||
price_cents = round(float(price_str.replace(",", ".")) * 100)
|
||||
except ValueError:
|
||||
price_cents = None
|
||||
|
||||
rating_str = form.get("rating", "").strip()
|
||||
rating = None
|
||||
if rating_str:
|
||||
try:
|
||||
rating = float(rating_str.replace(",", "."))
|
||||
except ValueError:
|
||||
rating = None
|
||||
|
||||
pros_raw = form.get("pros", "").strip()
|
||||
cons_raw = form.get("cons", "").strip()
|
||||
pros = json.dumps([line.strip() for line in pros_raw.splitlines() if line.strip()])
|
||||
cons = json.dumps([line.strip() for line in cons_raw.splitlines() if line.strip()])
|
||||
|
||||
return {
|
||||
"slug": form.get("slug", "").strip(),
|
||||
"name": form.get("name", "").strip(),
|
||||
"brand": form.get("brand", "").strip(),
|
||||
"category": form.get("category", "accessory").strip(),
|
||||
"retailer": form.get("retailer", "").strip(),
|
||||
"affiliate_url": form.get("affiliate_url", "").strip(),
|
||||
"image_url": form.get("image_url", "").strip(),
|
||||
"price_cents": price_cents,
|
||||
"currency": "EUR",
|
||||
"rating": rating,
|
||||
"pros": pros,
|
||||
"cons": cons,
|
||||
"description": form.get("description", "").strip(),
|
||||
"cta_label": form.get("cta_label", "").strip(),
|
||||
"status": form.get("status", "draft").strip(),
|
||||
"language": form.get("language", "de").strip() or "de",
|
||||
"sort_order": int(form.get("sort_order", "0") or "0"),
|
||||
}
|
||||
|
||||
|
||||
@bp.route("/affiliate")
|
||||
@role_required("admin")
|
||||
async def affiliate_products():
|
||||
"""Affiliate product list — full page."""
|
||||
from ..affiliate import get_all_products, get_click_counts, get_distinct_retailers
|
||||
|
||||
q = request.args.get("q", "").strip()
|
||||
category = request.args.get("category", "").strip()
|
||||
retailer_filter = request.args.get("retailer", "").strip()
|
||||
status_filter = request.args.get("status", "").strip()
|
||||
|
||||
products = await get_all_products(
|
||||
status=status_filter or None,
|
||||
retailer=retailer_filter or None,
|
||||
)
|
||||
if q:
|
||||
q_lower = q.lower()
|
||||
products = [p for p in products if q_lower in p["name"].lower() or q_lower in p["brand"].lower()]
|
||||
if category:
|
||||
products = [p for p in products if p["category"] == category]
|
||||
|
||||
click_counts = await get_click_counts()
|
||||
for p in products:
|
||||
p["click_count"] = click_counts.get(p["id"], 0)
|
||||
|
||||
retailers = await get_distinct_retailers()
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_products.html",
|
||||
admin_page="affiliate",
|
||||
products=products,
|
||||
click_counts=click_counts,
|
||||
retailers=retailers,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
q=q,
|
||||
category=category,
|
||||
retailer_filter=retailer_filter,
|
||||
status_filter=status_filter,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/results")
|
||||
@role_required("admin")
|
||||
async def affiliate_results():
|
||||
"""HTMX partial: filtered product rows."""
|
||||
from ..affiliate import get_all_products, get_click_counts
|
||||
|
||||
q = request.args.get("q", "").strip()
|
||||
category = request.args.get("category", "").strip()
|
||||
retailer_filter = request.args.get("retailer", "").strip()
|
||||
status_filter = request.args.get("status", "").strip()
|
||||
|
||||
products = await get_all_products(
|
||||
status=status_filter or None,
|
||||
retailer=retailer_filter or None,
|
||||
)
|
||||
if q:
|
||||
q_lower = q.lower()
|
||||
products = [p for p in products if q_lower in p["name"].lower() or q_lower in p["brand"].lower()]
|
||||
if category:
|
||||
products = [p for p in products if p["category"] == category]
|
||||
|
||||
click_counts = await get_click_counts()
|
||||
for p in products:
|
||||
p["click_count"] = click_counts.get(p["id"], 0)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/affiliate_results.html",
|
||||
products=products,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/preview", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_preview():
|
||||
"""Render a product card fragment from form data — used by live preview HTMX."""
|
||||
from ..content.routes import _bake_env
|
||||
from ..i18n import get_translations
|
||||
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
lang = data["language"] or "de"
|
||||
|
||||
# Convert JSON-string pros/cons to lists for the template
|
||||
product = dict(data)
|
||||
product["pros"] = json.loads(product["pros"]) if product["pros"] else []
|
||||
product["cons"] = json.loads(product["cons"]) if product["cons"] else []
|
||||
|
||||
if not product["name"]:
|
||||
return "<p style='color:#94A3B8;font-size:.875rem;padding:.5rem 0'>Fill in the form to see a preview.</p>"
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_card.html")
|
||||
html = tmpl.render(product=product, t=get_translations(lang), lang=lang)
|
||||
return html
|
||||
|
||||
|
||||
@bp.route("/affiliate/new", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_new():
|
||||
"""Create an affiliate product."""
|
||||
from ..affiliate import get_distinct_retailers
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
|
||||
if not data["slug"] or not data["name"] or not data["affiliate_url"]:
|
||||
await flash("Slug, name, and affiliate URL are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=data,
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
existing = await fetch_one(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ?",
|
||||
(data["slug"], data["language"]),
|
||||
)
|
||||
if existing:
|
||||
await flash(f"Slug '{data['slug']}' already exists for language '{data['language']}'.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=data,
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url, image_url,
|
||||
price_cents, currency, rating, pros, cons, description, cta_label,
|
||||
status, language, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
data["slug"], data["name"], data["brand"], data["category"],
|
||||
data["retailer"], data["affiliate_url"], data["image_url"],
|
||||
data["price_cents"], data["currency"], data["rating"],
|
||||
data["pros"], data["cons"], data["description"], data["cta_label"],
|
||||
data["status"], data["language"], data["sort_order"],
|
||||
),
|
||||
)
|
||||
await flash(f"Product '{data['name']}' created.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={},
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/edit", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_edit(product_id: int):
|
||||
"""Edit an affiliate product."""
|
||||
from ..affiliate import get_distinct_retailers
|
||||
|
||||
product = await fetch_one("SELECT * FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
if not product:
|
||||
await flash("Product not found.", "error")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
|
||||
if not data["slug"] or not data["name"] or not data["affiliate_url"]:
|
||||
await flash("Slug, name, and affiliate URL are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={**dict(product), **data},
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
# Check slug collision only if slug or language changed
|
||||
if data["slug"] != product["slug"] or data["language"] != product["language"]:
|
||||
collision = await fetch_one(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ? AND id != ?",
|
||||
(data["slug"], data["language"], product_id),
|
||||
)
|
||||
if collision:
|
||||
await flash(f"Slug '{data['slug']}' already exists for language '{data['language']}'.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={**dict(product), **data},
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
await execute(
|
||||
"""UPDATE affiliate_products
|
||||
SET slug=?, name=?, brand=?, category=?, retailer=?, affiliate_url=?,
|
||||
image_url=?, price_cents=?, currency=?, rating=?, pros=?, cons=?,
|
||||
description=?, cta_label=?, status=?, language=?, sort_order=?,
|
||||
updated_at=datetime('now')
|
||||
WHERE id=?""",
|
||||
(
|
||||
data["slug"], data["name"], data["brand"], data["category"],
|
||||
data["retailer"], data["affiliate_url"], data["image_url"],
|
||||
data["price_cents"], data["currency"], data["rating"],
|
||||
data["pros"], data["cons"], data["description"], data["cta_label"],
|
||||
data["status"], data["language"], data["sort_order"],
|
||||
product_id,
|
||||
),
|
||||
)
|
||||
await flash(f"Product '{data['name']}' updated.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
# Render pros/cons JSON arrays as newline-separated text for the form
|
||||
product_dict = dict(product)
|
||||
try:
|
||||
product_dict["pros_text"] = "\n".join(json.loads(product["pros"] or "[]"))
|
||||
product_dict["cons_text"] = "\n".join(json.loads(product["cons"] or "[]"))
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
product_dict["pros_text"] = ""
|
||||
product_dict["cons_text"] = ""
|
||||
if product["price_cents"]:
|
||||
product_dict["price_eur"] = f"{product['price_cents'] / 100:.2f}"
|
||||
else:
|
||||
product_dict["price_eur"] = ""
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=product_dict,
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/delete", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_delete(product_id: int):
|
||||
"""Delete an affiliate product."""
|
||||
product = await fetch_one("SELECT name FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
if product:
|
||||
await execute("DELETE FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
await flash(f"Product '{product['name']}' deleted.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
|
||||
@bp.route("/affiliate/dashboard")
|
||||
@role_required("admin")
|
||||
async def affiliate_dashboard():
|
||||
"""Affiliate click statistics dashboard."""
|
||||
from ..affiliate import get_click_stats
|
||||
|
||||
days_count = int(request.args.get("days", "30") or "30")
|
||||
days_count = max(7, min(days_count, 365))
|
||||
stats = await get_click_stats(days_count)
|
||||
|
||||
# Build estimated revenue: clicks × assumed 3% CR × avg basket €80
|
||||
est_revenue = round(stats["total_clicks"] * 0.03 * 80)
|
||||
|
||||
# Article count (live articles that have been clicked)
|
||||
article_count = len(stats["top_articles"])
|
||||
|
||||
# Retailer bars: compute pct of max for width
|
||||
max_ret_clicks = max((r["click_count"] for r in stats["by_retailer"]), default=1)
|
||||
for r in stats["by_retailer"]:
|
||||
r["pct"] = round(r["click_count"] / max_ret_clicks * 100) if max_ret_clicks else 0
|
||||
total = stats["total_clicks"] or 1
|
||||
r["share_pct"] = round(r["click_count"] / total * 100)
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_dashboard.html",
|
||||
admin_page="affiliate_dashboard",
|
||||
stats=stats,
|
||||
est_revenue=est_revenue,
|
||||
article_count=article_count,
|
||||
days_count=days_count,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/toggle", methods=["POST"])
|
||||
@role_required("admin")
|
||||
async def affiliate_toggle(product_id: int):
|
||||
"""Toggle product status: draft → active → archived → draft."""
|
||||
product = await fetch_one(
|
||||
"SELECT id, name, status FROM affiliate_products WHERE id = ?", (product_id,)
|
||||
)
|
||||
if not product:
|
||||
return "", 404
|
||||
|
||||
cycle = {"draft": "active", "active": "archived", "archived": "draft"}
|
||||
new_status = cycle.get(product["status"], "draft")
|
||||
await execute(
|
||||
"UPDATE affiliate_products SET status=?, updated_at=datetime('now') WHERE id=?",
|
||||
(new_status, product_id),
|
||||
)
|
||||
|
||||
product_updated = await fetch_one(
|
||||
"SELECT * FROM affiliate_products WHERE id = ?", (product_id,)
|
||||
)
|
||||
from ..affiliate import get_click_counts
|
||||
click_counts = await get_click_counts()
|
||||
product_dict = dict(product_updated)
|
||||
product_dict["click_count"] = click_counts.get(product_id, 0)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/affiliate_row.html",
|
||||
product=product_dict,
|
||||
)
|
||||
|
||||
@@ -0,0 +1,121 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate_dashboard" %}
|
||||
|
||||
{% block title %}Affiliate Dashboard - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<h1 class="text-2xl">Affiliate Dashboard</h1>
|
||||
<div class="flex gap-2">
|
||||
{% for d in [7, 30, 90] %}
|
||||
<a href="?days={{ d }}" class="btn-outline btn-sm {% if days_count == d %}active{% endif %}">{{ d }}d</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{# ── Stats strip ── #}
|
||||
<div style="display:grid;grid-template-columns:repeat(4,1fr);gap:1rem;margin-bottom:1.5rem;">
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Clicks ({{ days_count }}d)</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ stats.total_clicks | int }}</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Products</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ stats.active_products or 0 }}</div>
|
||||
<div class="text-xs text-slate">{{ stats.draft_products or 0 }} draft</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Articles (clicked)</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ article_count }}</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Est. Revenue</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">~€{{ est_revenue }}</div>
|
||||
<div class="text-xs text-slate">3% CR × €80 basket</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{# ── Daily bar chart ── #}
|
||||
{% if stats.daily_bars %}
|
||||
<div class="card mb-6" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Clicks · Last {{ days_count }} Days</div>
|
||||
<div style="display:flex;align-items:flex-end;gap:2px;height:120px;overflow-x:auto;">
|
||||
{% for bar in stats.daily_bars %}
|
||||
<div title="{{ bar.day }}: {{ bar.click_count }} clicks"
|
||||
style="flex-shrink:0;width:8px;background:#1D4ED8;border-radius:3px 3px 0 0;min-height:2px;height:{{ bar.pct }}%;transition:opacity .15s;"
|
||||
onmouseover="this.style.opacity='.7'" onmouseout="this.style.opacity='1'">
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<div style="display:flex;justify-content:space-between;margin-top:.375rem;">
|
||||
<span class="text-xs text-slate">{{ stats.daily_bars[0].day if stats.daily_bars else '' }}</span>
|
||||
<span class="text-xs text-slate">{{ stats.daily_bars[-1].day if stats.daily_bars else '' }}</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1.5rem;margin-bottom:1.5rem;">
|
||||
|
||||
{# ── Top products ── #}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Top Products</div>
|
||||
{% if stats.top_products %}
|
||||
{% for p in stats.top_products %}
|
||||
<div style="display:flex;align-items:center;gap:.75rem;padding:.5rem 0;{% if not loop.last %}border-bottom:1px solid #F1F5F9;{% endif %}">
|
||||
<span class="mono text-xs text-slate" style="width:1.5rem;text-align:right;">{{ loop.index }}</span>
|
||||
<span style="flex:1;font-size:.8125rem;color:#0F172A;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=p.id) }}" style="color:inherit;text-decoration:none;">{{ p.name }}</a>
|
||||
</span>
|
||||
<span class="mono" style="font-weight:600;font-size:.875rem;color:#0F172A;">{{ p.click_count }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p class="text-slate text-sm">No clicks yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# ── Top articles ── #}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Top Articles</div>
|
||||
{% if stats.top_articles %}
|
||||
{% for a in stats.top_articles %}
|
||||
<div style="display:flex;align-items:center;gap:.75rem;padding:.5rem 0;{% if not loop.last %}border-bottom:1px solid #F1F5F9;{% endif %}">
|
||||
<span class="mono text-xs text-slate" style="width:1.5rem;text-align:right;">{{ loop.index }}</span>
|
||||
<span style="flex:1;font-size:.8125rem;color:#0F172A;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;"
|
||||
title="{{ a.article_slug }}">{{ a.article_slug }}</span>
|
||||
<span class="mono" style="font-weight:600;font-size:.875rem;color:#0F172A;">{{ a.click_count }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p class="text-slate text-sm">No clicks with article source yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{# ── Clicks by retailer ── #}
|
||||
{% if stats.by_retailer %}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Clicks by Retailer</div>
|
||||
{% for r in stats.by_retailer %}
|
||||
<div style="display:flex;align-items:center;gap:1rem;margin-bottom:.75rem;">
|
||||
<span style="width:140px;font-size:.8125rem;color:#0F172A;flex-shrink:0;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;">
|
||||
{{ r.retailer or 'Unknown' }}
|
||||
</span>
|
||||
<div style="flex:1;background:#F1F5F9;border-radius:4px;height:24px;overflow:hidden;">
|
||||
<div style="width:{{ r.pct }}%;background:#1D4ED8;height:100%;border-radius:4px;min-width:2px;"></div>
|
||||
</div>
|
||||
<span class="mono" style="font-size:.8125rem;font-weight:600;width:60px;text-align:right;flex-shrink:0;">
|
||||
{{ r.click_count }} <span class="text-slate" style="font-weight:400;">({{ r.share_pct }}%)</span>
|
||||
</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
220
web/src/padelnomics/admin/templates/admin/affiliate_form.html
Normal file
220
web/src/padelnomics/admin/templates/admin/affiliate_form.html
Normal file
@@ -0,0 +1,220 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate" %}
|
||||
|
||||
{% block title %}{% if editing %}Edit Product{% else %}New Product{% endif %} - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<script>
|
||||
function slugify(text) {
|
||||
return text.toLowerCase()
|
||||
.replace(/[äöü]/g, c => ({'ä':'ae','ö':'oe','ü':'ue'}[c]))
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '');
|
||||
}
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
var nameInput = document.getElementById('f-name');
|
||||
var slugInput = document.getElementById('f-slug');
|
||||
if (nameInput && slugInput && !slugInput.value) {
|
||||
nameInput.addEventListener('input', function() {
|
||||
if (!slugInput.dataset.manual) {
|
||||
slugInput.value = slugify(nameInput.value);
|
||||
}
|
||||
});
|
||||
slugInput.addEventListener('input', function() {
|
||||
slugInput.dataset.manual = '1';
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="text-slate text-sm" style="text-decoration:none">← Products</a>
|
||||
<h1 class="text-2xl mt-1">{% if editing %}Edit Product{% else %}New Product{% endif %}</h1>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{# HTMX preview trigger — outside the grid so it takes no layout space #}
|
||||
<div style="display:none"
|
||||
hx-post="{{ url_for('admin.affiliate_preview') }}"
|
||||
hx-target="#product-preview"
|
||||
hx-trigger="load, input from:#affiliate-form delay:600ms"
|
||||
hx-include="#affiliate-form"
|
||||
hx-push-url="false">
|
||||
</div>
|
||||
|
||||
<div style="display:grid;grid-template-columns:1fr 380px;gap:2rem;align-items:start" class="affiliate-form-grid">
|
||||
|
||||
{# ── Left: form ── #}
|
||||
<form method="post" id="affiliate-form"
|
||||
action="{% if editing %}{{ url_for('admin.affiliate_edit', product_id=product_id) }}{% else %}{{ url_for('admin.affiliate_new') }}{% endif %}">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="card" style="padding:1.5rem;display:flex;flex-direction:column;gap:1.25rem;">
|
||||
|
||||
{# Name #}
|
||||
<div>
|
||||
<label class="form-label" for="f-name">Name *</label>
|
||||
<input id="f-name" type="text" name="name" value="{{ data.get('name','') }}"
|
||||
class="form-input" placeholder="e.g. Bullpadel Vertex 04" required>
|
||||
</div>
|
||||
|
||||
{# Slug #}
|
||||
<div>
|
||||
<label class="form-label" for="f-slug">Slug *</label>
|
||||
<input id="f-slug" type="text" name="slug" value="{{ data.get('slug','') }}"
|
||||
class="form-input" placeholder="e.g. bullpadel-vertex-04-amazon" required
|
||||
pattern="[a-z0-9][a-z0-9\-]*">
|
||||
<p class="form-hint">Lowercase letters, numbers, hyphens only. Include retailer to disambiguate (e.g. <code>-amazon</code>, <code>-padelnuestro</code>).</p>
|
||||
</div>
|
||||
|
||||
{# Brand + Category row #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-brand">Brand</label>
|
||||
<input id="f-brand" type="text" name="brand" value="{{ data.get('brand','') }}"
|
||||
class="form-input" placeholder="e.g. Bullpadel">
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-category">Category</label>
|
||||
<select id="f-category" name="category" class="form-input">
|
||||
{% for cat in categories %}
|
||||
<option value="{{ cat }}" {% if data.get('category','accessory') == cat %}selected{% endif %}>{{ cat | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Retailer #}
|
||||
<div>
|
||||
<label class="form-label" for="f-retailer">Retailer</label>
|
||||
<input id="f-retailer" type="text" name="retailer" value="{{ data.get('retailer','') }}"
|
||||
class="form-input" placeholder="e.g. Amazon, Padel Nuestro"
|
||||
list="retailers-list">
|
||||
<datalist id="retailers-list">
|
||||
{% for r in retailers %}
|
||||
<option value="{{ r }}">
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
</div>
|
||||
|
||||
{# Affiliate URL #}
|
||||
<div>
|
||||
<label class="form-label" for="f-url">Affiliate URL *</label>
|
||||
<input id="f-url" type="url" name="affiliate_url" value="{{ data.get('affiliate_url','') }}"
|
||||
class="form-input" placeholder="https://www.amazon.de/dp/B0XXXXX?tag=padelnomics-21" required>
|
||||
<p class="form-hint">Full URL with tracking params already baked in.</p>
|
||||
</div>
|
||||
|
||||
{# Image URL #}
|
||||
<div>
|
||||
<label class="form-label" for="f-image">Image URL</label>
|
||||
<input id="f-image" type="text" name="image_url" value="{{ data.get('image_url','') }}"
|
||||
class="form-input" placeholder="/static/images/affiliate/bullpadel-vertex-04.webp">
|
||||
<p class="form-hint">Local path (recommended) or external URL.</p>
|
||||
</div>
|
||||
|
||||
{# Price + Rating row #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-price">Price (EUR)</label>
|
||||
<input id="f-price" type="number" name="price_eur" value="{{ data.get('price_eur','') }}"
|
||||
class="form-input" placeholder="149.99" step="0.01" min="0">
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-rating">Rating (0–5)</label>
|
||||
<input id="f-rating" type="number" name="rating" value="{{ data.get('rating','') }}"
|
||||
class="form-input" placeholder="4.3" step="0.1" min="0" max="5">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Description #}
|
||||
<div>
|
||||
<label class="form-label" for="f-desc">Short Description</label>
|
||||
<textarea id="f-desc" name="description" rows="3"
|
||||
class="form-input" placeholder="One to two sentences describing the product...">{{ data.get('description','') }}</textarea>
|
||||
</div>
|
||||
|
||||
{# Pros #}
|
||||
<div>
|
||||
<label class="form-label" for="f-pros">Pros <span class="form-hint" style="font-weight:normal">(one per line)</span></label>
|
||||
<textarea id="f-pros" name="pros" rows="4"
|
||||
class="form-input" placeholder="Carbon frame for maximum power Diamond shape for aggressive players">{{ data.get('pros_text', data.get('pros','')) }}</textarea>
|
||||
</div>
|
||||
|
||||
{# Cons #}
|
||||
<div>
|
||||
<label class="form-label" for="f-cons">Cons <span class="form-hint" style="font-weight:normal">(one per line)</span></label>
|
||||
<textarea id="f-cons" name="cons" rows="3"
|
||||
class="form-input" placeholder="Only for advanced players">{{ data.get('cons_text', data.get('cons','')) }}</textarea>
|
||||
</div>
|
||||
|
||||
{# CTA Label #}
|
||||
<div>
|
||||
<label class="form-label" for="f-cta">CTA Label</label>
|
||||
<input id="f-cta" type="text" name="cta_label" value="{{ data.get('cta_label','') }}"
|
||||
class="form-input" placeholder='Leave empty for default "Zum Angebot"'>
|
||||
</div>
|
||||
|
||||
{# Status + Language + Sort #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-status">Status</label>
|
||||
<select id="f-status" name="status" class="form-input">
|
||||
{% for s in statuses %}
|
||||
<option value="{{ s }}" {% if data.get('status','draft') == s %}selected{% endif %}>{{ s | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-lang">Language</label>
|
||||
<select id="f-lang" name="language" class="form-input">
|
||||
<option value="de" {% if data.get('language','de') == 'de' %}selected{% endif %}>DE</option>
|
||||
<option value="en" {% if data.get('language','de') == 'en' %}selected{% endif %}>EN</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-sort">Sort Order</label>
|
||||
<input id="f-sort" type="number" name="sort_order" value="{{ data.get('sort_order', 0) }}"
|
||||
class="form-input" min="0">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Actions #}
|
||||
<div class="flex gap-3 justify-between" style="margin-top:.5rem">
|
||||
<div class="flex gap-2">
|
||||
<button type="submit" class="btn" formaction="{% if editing %}{{ url_for('admin.affiliate_edit', product_id=product_id) }}{% else %}{{ url_for('admin.affiliate_new') }}{% endif %}">
|
||||
{% if editing %}Save Changes{% else %}Create Product{% endif %}
|
||||
</button>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="btn-outline">Cancel</a>
|
||||
</div>
|
||||
{% if editing %}
|
||||
<form method="post" action="{{ url_for('admin.affiliate_delete', product_id=product_id) }}" style="margin:0">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline"
|
||||
onclick="return confirm('Delete this product? This cannot be undone.')">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{# ── Right: live preview ── #}
|
||||
<div style="position:sticky;top:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-2" style="text-transform:uppercase;letter-spacing:.06em;">Preview</div>
|
||||
<div id="product-preview" style="border:1px solid #E2E8F0;border-radius:12px;padding:1rem;background:#F8FAFC;min-height:180px;">
|
||||
<p style="color:#94A3B8;font-size:.875rem;text-align:center;margin-top:2rem;">Loading preview…</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<style>
|
||||
@media (max-width: 900px) {
|
||||
.affiliate-form-grid { grid-template-columns: 1fr !important; }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,83 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate" %}
|
||||
|
||||
{% block title %}Affiliate Products - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<h1 class="text-2xl">Affiliate Products</h1>
|
||||
<a href="{{ url_for('admin.affiliate_new') }}" class="btn btn-sm">+ New Product</a>
|
||||
</header>
|
||||
|
||||
{# Filters #}
|
||||
<div class="card mb-6" style="padding:1rem 1.25rem">
|
||||
<form class="flex flex-wrap gap-3 items-end"
|
||||
hx-get="{{ url_for('admin.affiliate_results') }}"
|
||||
hx-target="#aff-results"
|
||||
hx-trigger="change, input delay:300ms"
|
||||
hx-indicator="#aff-loading">
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Search</label>
|
||||
<input type="text" name="q" value="{{ q }}" placeholder="Name or brand..."
|
||||
class="form-input" style="min-width:200px">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Category</label>
|
||||
<select name="category" class="form-input" style="min-width:120px">
|
||||
<option value="">All</option>
|
||||
{% for cat in categories %}
|
||||
<option value="{{ cat }}" {% if cat == category %}selected{% endif %}>{{ cat | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Retailer</label>
|
||||
<select name="retailer" class="form-input" style="min-width:140px">
|
||||
<option value="">All</option>
|
||||
{% for r in retailers %}
|
||||
<option value="{{ r }}" {% if r == retailer_filter %}selected{% endif %}>{{ r }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Status</label>
|
||||
<select name="status" class="form-input" style="min-width:110px">
|
||||
<option value="">All</option>
|
||||
{% for s in statuses %}
|
||||
<option value="{{ s }}" {% if s == status_filter %}selected{% endif %}>{{ s | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<svg id="aff-loading" class="htmx-indicator search-spinner" width="14" height="14" viewBox="0 0 24 24" fill="none" aria-hidden="true">
|
||||
<circle cx="12" cy="12" r="10" stroke="#CBD5E1" stroke-width="3"/>
|
||||
<path d="M12 2a10 10 0 0 1 10 10" stroke="#0EA5E9" stroke-width="3" stroke-linecap="round"/>
|
||||
</svg>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{# Results #}
|
||||
<div id="aff-results">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Brand</th>
|
||||
<th>Retailer</th>
|
||||
<th>Category</th>
|
||||
<th>Price</th>
|
||||
<th>Status</th>
|
||||
<th class="text-right">Clicks</th>
|
||||
<th class="text-right">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% include "admin/partials/affiliate_results.html" %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -99,6 +99,7 @@
|
||||
'suppliers': 'suppliers',
|
||||
'articles': 'content', 'scenarios': 'content', 'templates': 'content', 'pseo': 'content',
|
||||
'emails': 'email', 'inbox': 'email', 'compose': 'email', 'gallery': 'email', 'audiences': 'email', 'outreach': 'email',
|
||||
'affiliate': 'affiliate', 'affiliate_dashboard': 'affiliate',
|
||||
'billing': 'billing',
|
||||
'seo': 'analytics',
|
||||
'pipeline': 'pipeline',
|
||||
@@ -149,6 +150,11 @@
|
||||
Billing
|
||||
</a>
|
||||
|
||||
<a href="{{ url_for('admin.affiliate_dashboard') }}" class="{% if active_section == 'affiliate' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M13.5 21v-7.5a.75.75 0 0 1 .75-.75h3a.75.75 0 0 1 .75.75V21m-4.5 0H2.36m11.14 0H18m0 0h3.64m-1.39 0V9.349M3.75 21V9.349m0 0a3.001 3.001 0 0 0 3.75-.615A2.993 2.993 0 0 0 9.75 9.75c.896 0 1.7-.393 2.25-1.016a2.993 2.993 0 0 0 2.25 1.016 2.993 2.993 0 0 0 2.25-1.016 3.001 3.001 0 0 0 3.75.614m-16.5 0a3.004 3.004 0 0 1-.621-4.72l1.189-1.19A1.5 1.5 0 0 1 5.378 3h13.243a1.5 1.5 0 0 1 1.06.44l1.19 1.189a3 3 0 0 1-.621 4.72M6.75 18h3.75a.75.75 0 0 0 .75-.75V13.5a.75.75 0 0 0-.75-.75H6.75a.75.75 0 0 0-.75.75v3.75c0 .414.336.75.75.75Z"/></svg>
|
||||
Affiliate
|
||||
</a>
|
||||
|
||||
<a href="{{ url_for('admin.seo') }}" class="{% if active_section == 'analytics' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M2.25 18 9 11.25l4.306 4.306a11.95 11.95 0 0 1 5.814-5.518l2.74-1.22m0 0-5.94-2.281m5.94 2.28-2.28 5.941"/></svg>
|
||||
Analytics
|
||||
@@ -196,6 +202,11 @@
|
||||
<a href="{{ url_for('admin.audiences') }}" class="{% if admin_page == 'audiences' %}active{% endif %}">Audiences</a>
|
||||
<a href="{{ url_for('admin.outreach') }}" class="{% if admin_page == 'outreach' %}active{% endif %}">Outreach</a>
|
||||
</nav>
|
||||
{% elif active_section == 'affiliate' %}
|
||||
<nav class="admin-subnav">
|
||||
<a href="{{ url_for('admin.affiliate_dashboard') }}" class="{% if admin_page == 'affiliate_dashboard' %}active{% endif %}">Dashboard</a>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="{% if admin_page == 'affiliate' %}active{% endif %}">Products</a>
|
||||
</nav>
|
||||
{% elif active_section == 'system' %}
|
||||
<nav class="admin-subnav">
|
||||
<a href="{{ url_for('admin.users') }}" class="{% if admin_page == 'users' %}active{% endif %}">Users</a>
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
{% if products %}
|
||||
{% for product in products %}
|
||||
{% include "admin/partials/affiliate_row.html" %}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<tr>
|
||||
<td colspan="8" class="text-slate" style="text-align:center;padding:2rem;">No products found.</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,29 @@
|
||||
<tr id="aff-{{ product.id }}">
|
||||
<td style="max-width:240px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap" title="{{ product.name }}">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=product.id) }}" style="color:#0F172A;text-decoration:none;font-weight:500;">{{ product.name }}</a>
|
||||
</td>
|
||||
<td class="text-slate">{{ product.brand or '—' }}</td>
|
||||
<td class="text-slate">{{ product.retailer or '—' }}</td>
|
||||
<td class="text-slate">{{ product.category }}</td>
|
||||
<td class="mono">
|
||||
{% if product.price_cents %}{{ "%.0f" | format(product.price_cents / 100) }}€{% else %}—{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<button hx-post="{{ url_for('admin.affiliate_toggle', product_id=product.id) }}"
|
||||
hx-target="#aff-{{ product.id }}" hx-swap="outerHTML"
|
||||
hx-headers='{"X-CSRF-Token": "{{ csrf_token() }}"}'
|
||||
class="badge {% if product.status == 'active' %}badge-success{% elif product.status == 'draft' %}badge-warning{% else %}badge{% endif %}"
|
||||
style="cursor:pointer;border:none;">
|
||||
{{ product.status }}
|
||||
</button>
|
||||
</td>
|
||||
<td class="mono text-right">{{ product.click_count or 0 }}</td>
|
||||
<td class="text-right" style="white-space:nowrap">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=product.id) }}" class="btn-outline btn-sm">Edit</a>
|
||||
<form method="post" action="{{ url_for('admin.affiliate_delete', product_id=product.id) }}" style="display:inline">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline btn-sm"
|
||||
onclick="return confirm('Delete {{ product.name }}?')">Delete</button>
|
||||
</form>
|
||||
</td>
|
||||
</tr>
|
||||
224
web/src/padelnomics/affiliate.py
Normal file
224
web/src/padelnomics/affiliate.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Affiliate product catalog: product lookup, click logging, and stats queries.
|
||||
|
||||
All functions are plain async procedures — no classes, no state.
|
||||
|
||||
Design decisions:
|
||||
- IP hashing uses a daily salt (date + SECRET_KEY[:16]) for GDPR compliance.
|
||||
Rotating salt prevents re-identification across days without storing PII.
|
||||
- Products are fetched by (slug, language) with a graceful fallback to any
|
||||
language, so DE cards appear in EN articles rather than nothing.
|
||||
- Stats are computed entirely in SQL — no Python aggregation.
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from .core import config, execute, fetch_all, fetch_one
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_CATEGORIES = ("racket", "ball", "shoe", "bag", "grip", "eyewear", "accessory")
|
||||
VALID_STATUSES = ("draft", "active", "archived")
|
||||
|
||||
|
||||
def hash_ip(ip_address: str) -> str:
|
||||
"""SHA256(ip + YYYY-MM-DD + SECRET_KEY[:16]) with daily salt rotation."""
|
||||
assert ip_address, "ip_address must not be empty"
|
||||
today = date.today().isoformat()
|
||||
salt = config.SECRET_KEY[:16]
|
||||
raw = f"{ip_address}:{today}:{salt}"
|
||||
return hashlib.sha256(raw.encode()).hexdigest()
|
||||
|
||||
|
||||
async def get_product(slug: str, language: str = "de") -> dict | None:
|
||||
"""Return active product by slug+language, falling back to any language."""
|
||||
assert slug, "slug must not be empty"
|
||||
row = await fetch_one(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE slug = ? AND language = ? AND status = 'active'",
|
||||
(slug, language),
|
||||
)
|
||||
if row:
|
||||
return _parse_product(row)
|
||||
# Graceful fallback: show any language rather than nothing
|
||||
row = await fetch_one(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE slug = ? AND status = 'active' LIMIT 1",
|
||||
(slug,),
|
||||
)
|
||||
return _parse_product(row) if row else None
|
||||
|
||||
|
||||
async def get_products_by_category(category: str, language: str = "de") -> list[dict]:
|
||||
"""Return active products in category sorted by sort_order, with fallback."""
|
||||
assert category in VALID_CATEGORIES, f"unknown category: {category}"
|
||||
rows = await fetch_all(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE category = ? AND language = ? AND status = 'active'"
|
||||
" ORDER BY sort_order ASC, id ASC",
|
||||
(category, language),
|
||||
)
|
||||
if rows:
|
||||
return [_parse_product(r) for r in rows]
|
||||
# Fallback: any language for this category
|
||||
rows = await fetch_all(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE category = ? AND status = 'active'"
|
||||
" ORDER BY sort_order ASC, id ASC",
|
||||
(category,),
|
||||
)
|
||||
return [_parse_product(r) for r in rows]
|
||||
|
||||
|
||||
async def get_all_products(
|
||||
status: str | None = None,
|
||||
retailer: str | None = None,
|
||||
) -> list[dict]:
|
||||
"""Admin listing — all products, optionally filtered by status and/or retailer."""
|
||||
conditions = []
|
||||
params: list = []
|
||||
if status:
|
||||
assert status in VALID_STATUSES, f"unknown status: {status}"
|
||||
conditions.append("status = ?")
|
||||
params.append(status)
|
||||
if retailer:
|
||||
conditions.append("retailer = ?")
|
||||
params.append(retailer)
|
||||
|
||||
where = ("WHERE " + " AND ".join(conditions)) if conditions else ""
|
||||
rows = await fetch_all(
|
||||
f"SELECT * FROM affiliate_products {where} ORDER BY sort_order ASC, id ASC",
|
||||
tuple(params),
|
||||
)
|
||||
return [_parse_product(r) for r in rows]
|
||||
|
||||
|
||||
async def get_click_counts() -> dict[int, int]:
|
||||
"""Return {product_id: click_count} for all products (used in admin list)."""
|
||||
rows = await fetch_all(
|
||||
"SELECT product_id, COUNT(*) AS cnt FROM affiliate_clicks GROUP BY product_id"
|
||||
)
|
||||
return {r["product_id"]: r["cnt"] for r in rows}
|
||||
|
||||
|
||||
async def log_click(
|
||||
product_id: int,
|
||||
ip_address: str,
|
||||
article_slug: str | None,
|
||||
referrer: str | None,
|
||||
) -> None:
|
||||
"""Insert a click event. Hashes IP for GDPR compliance."""
|
||||
assert product_id > 0, "product_id must be positive"
|
||||
assert ip_address, "ip_address must not be empty"
|
||||
ip = hash_ip(ip_address)
|
||||
await execute(
|
||||
"INSERT INTO affiliate_clicks (product_id, article_slug, referrer, ip_hash)"
|
||||
" VALUES (?, ?, ?, ?)",
|
||||
(product_id, article_slug, referrer, ip),
|
||||
)
|
||||
|
||||
|
||||
async def get_click_stats(days_count: int = 30) -> dict:
|
||||
"""Compute click statistics over the last N days, entirely in SQL."""
|
||||
assert 1 <= days_count <= 365, f"days must be 1-365, got {days_count}"
|
||||
|
||||
# Total clicks in window
|
||||
total_row = await fetch_one(
|
||||
"SELECT COUNT(*) AS cnt FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
total = total_row["cnt"] if total_row else 0
|
||||
|
||||
# Active product count
|
||||
product_counts = await fetch_one(
|
||||
"SELECT"
|
||||
" SUM(CASE WHEN status='active' THEN 1 ELSE 0 END) AS active_count,"
|
||||
" SUM(CASE WHEN status='draft' THEN 1 ELSE 0 END) AS draft_count"
|
||||
" FROM affiliate_products"
|
||||
)
|
||||
|
||||
# Top products by clicks
|
||||
top_products = await fetch_all(
|
||||
"SELECT p.id, p.name, p.slug, p.retailer, COUNT(c.id) AS click_count"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_clicks c"
|
||||
" ON c.product_id = p.id"
|
||||
" AND c.clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY p.id"
|
||||
" ORDER BY click_count DESC"
|
||||
" LIMIT 10",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Top articles by clicks
|
||||
top_articles = await fetch_all(
|
||||
"SELECT article_slug, COUNT(*) AS click_count"
|
||||
" FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)"
|
||||
" AND article_slug IS NOT NULL"
|
||||
" GROUP BY article_slug"
|
||||
" ORDER BY click_count DESC"
|
||||
" LIMIT 10",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Clicks by retailer
|
||||
by_retailer = await fetch_all(
|
||||
"SELECT p.retailer, COUNT(c.id) AS click_count"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_clicks c"
|
||||
" ON c.product_id = p.id"
|
||||
" AND c.clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY p.retailer"
|
||||
" ORDER BY click_count DESC",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Daily click counts for bar chart
|
||||
daily = await fetch_all(
|
||||
"SELECT date(clicked_at) AS day, COUNT(*) AS click_count"
|
||||
" FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY day"
|
||||
" ORDER BY day ASC",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Normalize daily to percentage heights for CSS bar chart
|
||||
max_daily = max((r["click_count"] for r in daily), default=1)
|
||||
daily_bars = [
|
||||
{"day": r["day"], "click_count": r["click_count"],
|
||||
"pct": round(r["click_count"] / max_daily * 100)}
|
||||
for r in daily
|
||||
]
|
||||
|
||||
return {
|
||||
"total_clicks": total,
|
||||
"active_products": product_counts["active_count"] if product_counts else 0,
|
||||
"draft_products": product_counts["draft_count"] if product_counts else 0,
|
||||
"top_products": [dict(r) for r in top_products],
|
||||
"top_articles": [dict(r) for r in top_articles],
|
||||
"by_retailer": [dict(r) for r in by_retailer],
|
||||
"daily_bars": daily_bars,
|
||||
"days": days_count,
|
||||
}
|
||||
|
||||
|
||||
async def get_distinct_retailers() -> list[str]:
|
||||
"""Return sorted list of distinct retailer names for form datalist."""
|
||||
rows = await fetch_all(
|
||||
"SELECT DISTINCT retailer FROM affiliate_products"
|
||||
" WHERE retailer != '' ORDER BY retailer"
|
||||
)
|
||||
return [r["retailer"] for r in rows]
|
||||
|
||||
|
||||
def _parse_product(row) -> dict:
|
||||
"""Convert aiosqlite Row to plain dict, parsing JSON pros/cons arrays."""
|
||||
d = dict(row)
|
||||
d["pros"] = json.loads(d.get("pros") or "[]")
|
||||
d["cons"] = json.loads(d.get("cons") or "[]")
|
||||
return d
|
||||
@@ -4,6 +4,10 @@ DuckDB read-only analytics reader.
|
||||
Opens a single long-lived DuckDB connection at startup (read_only=True).
|
||||
All queries run via asyncio.to_thread() to avoid blocking the event loop.
|
||||
|
||||
When export_serving.py atomically renames a new analytics.duckdb into place,
|
||||
_check_and_reopen() detects the inode change and transparently reopens —
|
||||
no app restart required.
|
||||
|
||||
Usage:
|
||||
from .analytics import fetch_analytics, execute_user_query
|
||||
|
||||
@@ -14,6 +18,7 @@ Usage:
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
@@ -21,6 +26,8 @@ from typing import Any
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_conn = None # duckdb.DuckDBPyConnection | None — lazy import
|
||||
_conn_inode: int | None = None
|
||||
_reopen_lock = threading.Lock()
|
||||
_DUCKDB_PATH = os.environ.get("SERVING_DUCKDB_PATH", "data/analytics.duckdb")
|
||||
|
||||
# DuckDB queries run in the asyncio thread pool. Cap them so a slow scan
|
||||
@@ -32,20 +39,67 @@ def open_analytics_db() -> None:
|
||||
"""Open the DuckDB connection. Call once at app startup."""
|
||||
import duckdb
|
||||
|
||||
global _conn
|
||||
global _conn, _conn_inode
|
||||
path = Path(_DUCKDB_PATH)
|
||||
if not path.exists():
|
||||
# Database doesn't exist yet — skip silently. Queries will return empty.
|
||||
return
|
||||
_conn = duckdb.connect(str(path), read_only=True)
|
||||
_conn_inode = path.stat().st_ino
|
||||
|
||||
|
||||
def close_analytics_db() -> None:
|
||||
"""Close the DuckDB connection. Call at app shutdown."""
|
||||
global _conn
|
||||
global _conn, _conn_inode
|
||||
if _conn is not None:
|
||||
_conn.close()
|
||||
_conn = None
|
||||
_conn_inode = None
|
||||
|
||||
|
||||
def _check_and_reopen() -> None:
|
||||
"""Reopen the connection if analytics.duckdb was atomically replaced (new inode).
|
||||
|
||||
Called at the start of each query. Requires a directory bind mount (not a file
|
||||
bind mount) so that os.stat() inside the container sees the new inode after rename.
|
||||
"""
|
||||
global _conn, _conn_inode
|
||||
import duckdb
|
||||
|
||||
path = Path(_DUCKDB_PATH)
|
||||
try:
|
||||
current_inode = path.stat().st_ino
|
||||
except OSError:
|
||||
return
|
||||
|
||||
if current_inode == _conn_inode:
|
||||
return # same file — nothing to do
|
||||
|
||||
with _reopen_lock:
|
||||
# Double-check under lock to avoid concurrent reopens.
|
||||
try:
|
||||
current_inode = path.stat().st_ino
|
||||
except OSError:
|
||||
return
|
||||
if current_inode == _conn_inode:
|
||||
return
|
||||
|
||||
old_conn = _conn
|
||||
try:
|
||||
new_conn = duckdb.connect(str(path), read_only=True)
|
||||
except Exception:
|
||||
logger.exception("Failed to reopen analytics DB after file change")
|
||||
return
|
||||
|
||||
_conn = new_conn
|
||||
_conn_inode = current_inode
|
||||
logger.info("Analytics DB reopened (inode changed to %d)", current_inode)
|
||||
|
||||
if old_conn is not None:
|
||||
try:
|
||||
old_conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
async def fetch_analytics(sql: str, params: list | None = None) -> list[dict[str, Any]]:
|
||||
@@ -61,7 +115,11 @@ async def fetch_analytics(sql: str, params: list | None = None) -> list[dict[str
|
||||
return []
|
||||
|
||||
def _run() -> list[dict]:
|
||||
cur = _conn.cursor()
|
||||
_check_and_reopen()
|
||||
conn = _conn
|
||||
if conn is None:
|
||||
return []
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
rel = cur.execute(sql, params or [])
|
||||
cols = [d[0] for d in rel.description]
|
||||
@@ -104,8 +162,12 @@ async def execute_user_query(
|
||||
return [], [], "Analytics database is not available.", 0.0
|
||||
|
||||
def _run() -> tuple[list[str], list[tuple], str | None, float]:
|
||||
_check_and_reopen()
|
||||
conn = _conn
|
||||
if conn is None:
|
||||
return [], [], "Analytics database is not available.", 0.0
|
||||
t0 = time.monotonic()
|
||||
cur = _conn.cursor()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
rel = cur.execute(sql)
|
||||
cols = [d[0] for d in rel.description]
|
||||
|
||||
@@ -280,6 +280,49 @@ def create_app() -> Quart:
|
||||
except Exception as e:
|
||||
return {"status": "unhealthy", "db": str(e)}, 500
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Affiliate click redirect — language-agnostic, no blueprint prefix
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
@app.route("/go/<slug>")
|
||||
async def affiliate_redirect(slug: str):
|
||||
"""302 redirect to affiliate URL, logging the click.
|
||||
|
||||
Uses 302 (not 301) so every hit is tracked — browsers don't cache 302s.
|
||||
Extracts article_slug and lang from Referer header best-effort.
|
||||
"""
|
||||
from .affiliate import get_product, log_click
|
||||
from .core import check_rate_limit
|
||||
|
||||
# Extract lang from Referer path (e.g. /de/blog/... → "de"), default de
|
||||
referer = request.headers.get("Referer", "")
|
||||
lang = "de"
|
||||
article_slug = None
|
||||
if referer:
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
ref_path = urlparse(referer).path
|
||||
parts = ref_path.strip("/").split("/")
|
||||
if parts and len(parts[0]) == 2:
|
||||
lang = parts[0]
|
||||
if len(parts) > 1:
|
||||
article_slug = parts[-1] or None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
product = await get_product(slug, lang)
|
||||
if not product:
|
||||
abort(404)
|
||||
|
||||
ip = request.remote_addr or "unknown"
|
||||
allowed, _info = await check_rate_limit(f"aff:{ip}", limit=60, window=60)
|
||||
if not allowed:
|
||||
# Still redirect even if rate-limited; just don't log the click
|
||||
return redirect(product["affiliate_url"], 302)
|
||||
|
||||
await log_click(product["id"], ip, article_slug, referer or None)
|
||||
return redirect(product["affiliate_url"], 302)
|
||||
|
||||
# Legacy 301 redirects — bookmarked/cached URLs before lang prefixes existed
|
||||
@app.route("/terms")
|
||||
async def legacy_terms():
|
||||
|
||||
@@ -315,7 +315,7 @@ async def generate_articles(
|
||||
"""
|
||||
from ..core import execute as db_execute
|
||||
from ..planner.calculator import DEFAULTS, calc, validate_state
|
||||
from .routes import bake_scenario_cards, is_reserved_path
|
||||
from .routes import bake_product_cards, bake_scenario_cards, is_reserved_path
|
||||
|
||||
assert articles_per_day > 0, "articles_per_day must be positive"
|
||||
|
||||
@@ -443,6 +443,7 @@ async def generate_articles(
|
||||
body_html = await bake_scenario_cards(
|
||||
body_html, lang=lang, scenario_overrides=scenario_overrides
|
||||
)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
t_bake += time.perf_counter() - t0
|
||||
|
||||
# Extract FAQ pairs for structured data
|
||||
@@ -584,7 +585,7 @@ async def preview_article(
|
||||
No disk write, no DB insert. Returns {title, url_path, html, meta_description}.
|
||||
"""
|
||||
from ..planner.calculator import DEFAULTS, calc, validate_state
|
||||
from .routes import bake_scenario_cards
|
||||
from .routes import bake_product_cards, bake_scenario_cards
|
||||
|
||||
config = load_template(slug)
|
||||
|
||||
@@ -641,6 +642,7 @@ async def preview_article(
|
||||
body_html = await bake_scenario_cards(
|
||||
body_html, lang=lang, scenario_overrides=scenario_overrides,
|
||||
)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
|
||||
return {
|
||||
"title": title,
|
||||
|
||||
@@ -27,6 +27,8 @@ RESERVED_PREFIXES = (
|
||||
)
|
||||
|
||||
SCENARIO_RE = re.compile(r'\[scenario:([a-z0-9_-]+)(?::([a-z]+))?\]')
|
||||
PRODUCT_RE = re.compile(r'\[product:([a-z0-9_-]+)\]')
|
||||
PRODUCT_GROUP_RE = re.compile(r'\[product-group:([a-z0-9_-]+)\]')
|
||||
|
||||
SECTION_TEMPLATES = {
|
||||
None: "partials/scenario_summary.html",
|
||||
@@ -112,6 +114,53 @@ async def bake_scenario_cards(
|
||||
return html
|
||||
|
||||
|
||||
async def bake_product_cards(html: str, lang: str = "de") -> str:
|
||||
"""Replace [product:slug] and [product-group:category] markers with rendered HTML.
|
||||
|
||||
Processes markers in two passes (product first, then groups) to keep logic
|
||||
clear. Reverse iteration preserves string offsets when splicing.
|
||||
"""
|
||||
from ..affiliate import get_product, get_products_by_category
|
||||
|
||||
t = get_translations(lang)
|
||||
|
||||
# ── Pass 1: [product:slug] ────────────────────────────────────────────────
|
||||
product_matches = list(PRODUCT_RE.finditer(html))
|
||||
if product_matches:
|
||||
slugs = list({m.group(1) for m in product_matches})
|
||||
products: dict[str, dict | None] = {}
|
||||
for slug in slugs:
|
||||
products[slug] = await get_product(slug, lang)
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_card.html")
|
||||
for match in reversed(product_matches):
|
||||
slug = match.group(1)
|
||||
product = products.get(slug)
|
||||
if not product:
|
||||
continue
|
||||
card_html = tmpl.render(product=product, lang=lang, t=t)
|
||||
html = html[:match.start()] + card_html + html[match.end():]
|
||||
|
||||
# ── Pass 2: [product-group:category] ─────────────────────────────────────
|
||||
group_matches = list(PRODUCT_GROUP_RE.finditer(html))
|
||||
if group_matches:
|
||||
categories = list({m.group(1) for m in group_matches})
|
||||
groups: dict[str, list] = {}
|
||||
for cat in categories:
|
||||
groups[cat] = await get_products_by_category(cat, lang)
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_group.html")
|
||||
for match in reversed(group_matches):
|
||||
cat = match.group(1)
|
||||
group_products = groups.get(cat, [])
|
||||
if not group_products:
|
||||
continue
|
||||
grid_html = tmpl.render(products=group_products, category=cat, lang=lang, t=t)
|
||||
html = html[:match.start()] + grid_html + html[match.end():]
|
||||
|
||||
return html
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Markets Hub
|
||||
# =============================================================================
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
{# Affiliate product card — editorial recommendation style.
|
||||
Variables: product (dict with parsed pros/cons lists), t (translations), lang.
|
||||
Rendered bake-time by bake_product_cards(); no request context available. #}
|
||||
{%- set price_eur = (product.price_cents / 100) if product.price_cents else none -%}
|
||||
{%- set cta = product.cta_label if product.cta_label else t.affiliate_cta_buy -%}
|
||||
<div class="aff-card" style="background:#fff;border:1px solid #E2E8F0;border-radius:16px;padding:1.5rem;margin:1.5rem 0;box-shadow:0 1px 3px rgba(0,0,0,.05);transition:transform .2s,box-shadow .2s;" onmouseover="this.style.transform='translateY(-2px)';this.style.boxShadow='0 8px 24px rgba(0,0,0,.08)'" onmouseout="this.style.transform='';this.style.boxShadow='0 1px 3px rgba(0,0,0,.05)'">
|
||||
<div style="display:flex;gap:1.25rem;align-items:flex-start;flex-wrap:wrap;">
|
||||
|
||||
{# ── Image ── #}
|
||||
<div style="width:160px;flex-shrink:0;aspect-ratio:1;border-radius:12px;background:#F8FAFC;border:1px solid #E2E8F0;overflow:hidden;display:flex;align-items:center;justify-content:center;">
|
||||
{% if product.image_url %}
|
||||
<img src="{{ product.image_url }}" alt="{{ product.name }}" style="width:100%;height:100%;object-fit:contain;" loading="lazy">
|
||||
{% else %}
|
||||
<svg width="48" height="48" fill="none" stroke="#CBD5E1" stroke-width="1.5" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" d="m2.25 15.75 5.159-5.159a2.25 2.25 0 0 1 3.182 0l5.159 5.159m-1.5-1.5 1.409-1.409a2.25 2.25 0 0 1 3.182 0l2.909 2.909m-18 3.75h16.5a1.5 1.5 0 0 0 1.5-1.5V6a1.5 1.5 0 0 0-1.5-1.5H3.75A1.5 1.5 0 0 0 2.25 6v12a1.5 1.5 0 0 0 1.5 1.5Z"/></svg>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# ── Content ── #}
|
||||
<div style="flex:1;min-width:0;">
|
||||
|
||||
{# Brand + retailer #}
|
||||
<div style="display:flex;align-items:center;gap:.5rem;margin-bottom:.375rem;">
|
||||
{% if product.brand %}
|
||||
<span style="text-transform:uppercase;font-size:.6875rem;font-weight:600;letter-spacing:.06em;color:#64748B;">{{ product.brand }}</span>
|
||||
{% endif %}
|
||||
{% if product.retailer %}
|
||||
<span style="background:#F1F5F9;border-radius:999px;padding:2px 8px;font-size:.625rem;font-weight:600;color:#64748B;letter-spacing:.04em;text-transform:uppercase;">{{ t.affiliate_at_retailer | tformat(retailer=product.retailer) }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Name #}
|
||||
<h3 style="font-family:'Bricolage Grotesque',sans-serif;font-size:1.125rem;font-weight:700;color:#0F172A;letter-spacing:-.01em;margin:0 0 .375rem;">{{ product.name }}</h3>
|
||||
|
||||
{# Rating #}
|
||||
{% if product.rating %}
|
||||
{%- set stars_full = product.rating | int -%}
|
||||
{%- set has_half = (product.rating - stars_full) >= 0.5 -%}
|
||||
<div style="display:flex;align-items:center;gap:.25rem;margin-bottom:.375rem;">
|
||||
<span style="color:#D97706;font-size:.9375rem;">
|
||||
{%- for i in range(stars_full) %}★{% endfor -%}
|
||||
{%- if has_half %}★{% endif -%}
|
||||
{%- for i in range(5 - stars_full - (1 if has_half else 0)) %}<span style="color:#E2E8F0;">★</span>{% endfor -%}
|
||||
</span>
|
||||
<span style="font-size:.8125rem;color:#64748B;">{{ "%.1f" | format(product.rating) }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Price #}
|
||||
{% if price_eur %}
|
||||
<div style="font-family:'Commit Mono',monospace;font-size:1.25rem;font-weight:700;color:#0F172A;margin-bottom:.5rem;">{{ "%.2f" | format(price_eur) | replace('.', ',') }} €</div>
|
||||
{% endif %}
|
||||
|
||||
{# Description #}
|
||||
{% if product.description %}
|
||||
<p style="font-size:.875rem;color:#475569;line-height:1.55;margin:.625rem 0;overflow:hidden;display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;">{{ product.description }}</p>
|
||||
{% endif %}
|
||||
|
||||
{# Pros #}
|
||||
{% if product.pros %}
|
||||
<ul style="list-style:none;padding:0;margin:.625rem 0 .25rem;">
|
||||
{% for pro in product.pros %}
|
||||
<li style="font-size:.8125rem;color:#475569;line-height:1.7;"><span style="color:#16A34A;margin-right:.25rem;">✓</span>{{ pro }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{# Cons #}
|
||||
{% if product.cons %}
|
||||
<ul style="list-style:none;padding:0;margin:.25rem 0 .75rem;">
|
||||
{% for con in product.cons %}
|
||||
<li style="font-size:.8125rem;color:#475569;line-height:1.7;"><span style="color:#EF4444;margin-right:.25rem;">✗</span>{{ con }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{# CTA #}
|
||||
<a href="/go/{{ product.slug }}" rel="sponsored nofollow noopener" target="_blank"
|
||||
style="display:block;width:100%;background:#1D4ED8;color:#fff;border-radius:12px;padding:.625rem 1.25rem;font-weight:600;font-size:.875rem;text-align:center;text-decoration:none;box-shadow:0 2px 10px rgba(29,78,216,.25);transition:background .2s,transform .2s;margin-top:.5rem;"
|
||||
onmouseover="this.style.background='#1E40AF';this.style.transform='translateY(-1px)'"
|
||||
onmouseout="this.style.background='#1D4ED8';this.style.transform=''">
|
||||
{{ cta }} →
|
||||
</a>
|
||||
|
||||
{# Disclosure #}
|
||||
<p style="font-size:.6875rem;color:#94A3B8;font-style:italic;margin:.5rem 0 0;text-align:center;">{{ t.affiliate_disclosure }}</p>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,68 @@
|
||||
{# Affiliate product comparison grid — editorial picks layout.
|
||||
Variables: products (list of dicts), category (str), t (translations), lang.
|
||||
Rendered bake-time by bake_product_cards(). #}
|
||||
{% if products %}
|
||||
<div style="margin:2rem 0;">
|
||||
|
||||
{# Section header #}
|
||||
<div style="text-transform:uppercase;font-size:.75rem;font-weight:600;color:#64748B;letter-spacing:.06em;margin-bottom:1rem;padding-bottom:.75rem;border-bottom:2px solid #E2E8F0;">
|
||||
{{ t.affiliate_our_picks }} · {{ category | capitalize }}
|
||||
</div>
|
||||
|
||||
{# Responsive grid of compact cards #}
|
||||
<div style="display:grid;grid-template-columns:repeat(auto-fill,minmax(220px,1fr));gap:1rem;">
|
||||
{% for product in products %}
|
||||
{%- set price_eur = (product.price_cents / 100) if product.price_cents else none -%}
|
||||
{%- set cta = product.cta_label if product.cta_label else t.affiliate_cta_buy -%}
|
||||
<div class="aff-card-compact" style="background:#fff;border:1px solid #E2E8F0;border-radius:16px;padding:1rem;display:flex;flex-direction:column;gap:.5rem;transition:transform .2s,box-shadow .2s;" onmouseover="this.style.transform='translateY(-2px)';this.style.boxShadow='0 8px 24px rgba(0,0,0,.08)'" onmouseout="this.style.transform='';this.style.boxShadow=''">
|
||||
|
||||
{# Image #}
|
||||
<div style="aspect-ratio:1;border-radius:10px;background:#F8FAFC;border:1px solid #E2E8F0;overflow:hidden;display:flex;align-items:center;justify-content:center;">
|
||||
{% if product.image_url %}
|
||||
<img src="{{ product.image_url }}" alt="{{ product.name }}" style="width:100%;height:100%;object-fit:contain;" loading="lazy">
|
||||
{% else %}
|
||||
<svg width="36" height="36" fill="none" stroke="#CBD5E1" stroke-width="1.5" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" d="m2.25 15.75 5.159-5.159a2.25 2.25 0 0 1 3.182 0l5.159 5.159m-1.5-1.5 1.409-1.409a2.25 2.25 0 0 1 3.182 0l2.909 2.909m-18 3.75h16.5a1.5 1.5 0 0 0 1.5-1.5V6a1.5 1.5 0 0 0-1.5-1.5H3.75A1.5 1.5 0 0 0 2.25 6v12a1.5 1.5 0 0 0 1.5 1.5Z"/></svg>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Brand #}
|
||||
{% if product.brand %}
|
||||
<span style="text-transform:uppercase;font-size:.625rem;font-weight:600;letter-spacing:.06em;color:#94A3B8;">{{ product.brand }}</span>
|
||||
{% endif %}
|
||||
|
||||
{# Name #}
|
||||
<h4 style="font-family:'Bricolage Grotesque',sans-serif;font-size:1rem;font-weight:700;color:#0F172A;letter-spacing:-.01em;margin:0;line-height:1.3;">{{ product.name }}</h4>
|
||||
|
||||
{# Rating + pros/cons counts #}
|
||||
<div style="display:flex;align-items:center;gap:.5rem;flex-wrap:wrap;">
|
||||
{% if product.rating %}
|
||||
<span style="color:#D97706;font-size:.8125rem;">★</span>
|
||||
<span style="font-size:.75rem;color:#64748B;">{{ "%.1f" | format(product.rating) }}</span>
|
||||
{% endif %}
|
||||
{% if product.pros %}
|
||||
<span style="font-size:.6875rem;color:#16A34A;background:#F0FDF4;border-radius:999px;padding:1px 6px;">{{ product.pros | length }} {{ t.affiliate_pros_label }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Price #}
|
||||
{% if price_eur %}
|
||||
<div style="font-family:'Commit Mono',monospace;font-size:1.0625rem;font-weight:700;color:#0F172A;">{{ "%.2f" | format(price_eur) | replace('.', ',') }} €</div>
|
||||
{% endif %}
|
||||
|
||||
{# CTA — pushed to bottom via margin-top:auto #}
|
||||
<a href="/go/{{ product.slug }}" rel="sponsored nofollow noopener" target="_blank"
|
||||
style="display:block;background:#1D4ED8;color:#fff;border-radius:10px;padding:.5rem 1rem;font-weight:600;font-size:.8125rem;text-align:center;text-decoration:none;margin-top:auto;transition:background .2s;"
|
||||
onmouseover="this.style.background='#1E40AF'"
|
||||
onmouseout="this.style.background='#1D4ED8'">
|
||||
{{ cta }} →
|
||||
</a>
|
||||
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{# Shared disclosure #}
|
||||
<p style="font-size:.6875rem;color:#94A3B8;font-style:italic;margin:.75rem 0 0;text-align:center;">{{ t.affiliate_disclosure }}</p>
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -1777,5 +1777,12 @@
|
||||
"report_q1_confirmed_title": "Download bereit",
|
||||
"report_q1_confirmed_body": "Unten auf den Button klicken, um das vollständige Bericht-PDF zu öffnen.",
|
||||
"report_q1_download_btn": "PDF herunterladen",
|
||||
"report_q1_download_note": "PDF öffnet im Browser. Rechtsklick zum Speichern."
|
||||
"report_q1_download_note": "PDF öffnet im Browser. Rechtsklick zum Speichern.",
|
||||
|
||||
"affiliate_cta_buy": "Zum Angebot",
|
||||
"affiliate_disclosure": "Affiliate-Link — wir erhalten eine Provision ohne Mehrkosten für dich.",
|
||||
"affiliate_pros_label": "Vorteile",
|
||||
"affiliate_cons_label": "Nachteile",
|
||||
"affiliate_at_retailer": "bei {retailer}",
|
||||
"affiliate_our_picks": "Unsere Empfehlungen"
|
||||
}
|
||||
@@ -1780,5 +1780,12 @@
|
||||
"report_q1_confirmed_title": "Your download is ready",
|
||||
"report_q1_confirmed_body": "Click below to open the full report PDF.",
|
||||
"report_q1_download_btn": "Download PDF",
|
||||
"report_q1_download_note": "PDF opens in your browser. Right-click to save."
|
||||
"report_q1_download_note": "PDF opens in your browser. Right-click to save.",
|
||||
|
||||
"affiliate_cta_buy": "View offer",
|
||||
"affiliate_disclosure": "Affiliate link — we may earn a commission at no extra cost to you.",
|
||||
"affiliate_pros_label": "Pros",
|
||||
"affiliate_cons_label": "Cons",
|
||||
"affiliate_at_retailer": "at {retailer}",
|
||||
"affiliate_our_picks": "Our picks"
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
"""Migration 0026: Affiliate product catalog + click tracking tables.
|
||||
|
||||
affiliate_products: admin-managed product catalog for editorial affiliate cards.
|
||||
- slug+language uniqueness mirrors articles (same slug can exist in DE + EN
|
||||
with different affiliate URLs, copy, and pros/cons).
|
||||
- retailer: display name (Amazon, Padel Nuestro, etc.) — stored in full URL
|
||||
with tracking params already baked into affiliate_url.
|
||||
- cta_label: per-product override; empty → use i18n default "Zum Angebot".
|
||||
- status: draft/active/archived — only active products are baked into articles.
|
||||
|
||||
affiliate_clicks: one row per /go/<slug> redirect hit.
|
||||
- ip_hash: SHA256(ip + YYYY-MM-DD + SECRET_KEY[:16]), daily rotation for GDPR.
|
||||
- article_slug: best-effort extraction from Referer header.
|
||||
"""
|
||||
|
||||
|
||||
def up(conn) -> None:
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
slug TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
brand TEXT NOT NULL DEFAULT '',
|
||||
category TEXT NOT NULL DEFAULT 'accessory',
|
||||
retailer TEXT NOT NULL DEFAULT '',
|
||||
affiliate_url TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL DEFAULT '',
|
||||
price_cents INTEGER,
|
||||
currency TEXT NOT NULL DEFAULT 'EUR',
|
||||
rating REAL,
|
||||
pros TEXT NOT NULL DEFAULT '[]',
|
||||
cons TEXT NOT NULL DEFAULT '[]',
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
cta_label TEXT NOT NULL DEFAULT '',
|
||||
status TEXT NOT NULL DEFAULT 'draft',
|
||||
language TEXT NOT NULL DEFAULT 'de',
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT,
|
||||
UNIQUE(slug, language)
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_clicks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
product_id INTEGER NOT NULL REFERENCES affiliate_products(id),
|
||||
article_slug TEXT,
|
||||
referrer TEXT,
|
||||
ip_hash TEXT NOT NULL,
|
||||
clicked_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
)
|
||||
""")
|
||||
# Queries: products by category+status, clicks by product and time
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_products_category_status"
|
||||
" ON affiliate_products(category, status)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_clicks_product_id"
|
||||
" ON affiliate_clicks(product_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_clicks_clicked_at"
|
||||
" ON affiliate_clicks(clicked_at)"
|
||||
)
|
||||
@@ -284,6 +284,184 @@ LEADS = [
|
||||
]
|
||||
|
||||
|
||||
AFFILIATE_PRODUCTS = [
|
||||
# Rackets
|
||||
{
|
||||
"slug": "bullpadel-vertex-04-amazon",
|
||||
"name": "Bullpadel Vertex 04",
|
||||
"brand": "Bullpadel",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST01?tag=padelnomics-21",
|
||||
"price_cents": 17999,
|
||||
"rating": 4.7,
|
||||
"pros": '["Carbon-Rahmen für maximale Power", "Diamant-Form für aggressive Spieler", "Sehr gute Balance"]',
|
||||
"cons": '["Nur für fortgeschrittene Spieler", "Höherer Preis"]',
|
||||
"description": "Der Vertex 04 ist der Flaggschiff-Schläger von Bullpadel für Power-Spieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "head-delta-pro-amazon",
|
||||
"name": "HEAD Delta Pro",
|
||||
"brand": "HEAD",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST02?tag=padelnomics-21",
|
||||
"price_cents": 14999,
|
||||
"rating": 4.5,
|
||||
"pros": '["Sehr kontrollorientiert", "Ideal für Defensivspieler", "Leicht"]',
|
||||
"cons": '["Weniger Power als Diamant-Formen"]',
|
||||
"description": "Runde Form mit perfekter Kontrolle — ideal für Einsteiger und Defensivspieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 2,
|
||||
},
|
||||
{
|
||||
"slug": "adidas-metalbone-30-amazon",
|
||||
"name": "Adidas Metalbone 3.0",
|
||||
"brand": "Adidas",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST03?tag=padelnomics-21",
|
||||
"price_cents": 18999,
|
||||
"rating": 4.8,
|
||||
"pros": '["Brutale Power", "Hochwertige Verarbeitung", "Sehr beliebt auf Pro-Tour"]',
|
||||
"cons": '["Teuer", "Gewöhnungsbedürftig"]',
|
||||
"description": "Das Flaggschiff von Adidas Padel — getragen von den besten Profis der Welt.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 3,
|
||||
},
|
||||
{
|
||||
"slug": "wilson-bela-pro-v2-amazon",
|
||||
"name": "Wilson Bela Pro v2",
|
||||
"brand": "Wilson",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST04?tag=padelnomics-21",
|
||||
"price_cents": 16999,
|
||||
"rating": 4.6,
|
||||
"pros": '["Bekannter Signature-Schläger", "Gute Mischung aus Power und Kontrolle"]',
|
||||
"cons": '["Fortgeschrittene bevorzugt"]',
|
||||
"description": "Der Schläger von Fernando Belasteguín — einer der meistgekauften Schläger weltweit.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 4,
|
||||
},
|
||||
# Beginner racket — draft (tests that draft products are excluded from public)
|
||||
{
|
||||
"slug": "dunlop-aero-star-amazon",
|
||||
"name": "Dunlop Aero Star",
|
||||
"brand": "Dunlop",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST05?tag=padelnomics-21",
|
||||
"price_cents": 8999,
|
||||
"rating": 4.2,
|
||||
"pros": '["Günstig", "Für Einsteiger ideal"]',
|
||||
"cons": '["Wenig Power für Fortgeschrittene"]',
|
||||
"description": "Solider Einsteigerschläger für unter 90 Euro.",
|
||||
"status": "draft",
|
||||
"language": "de",
|
||||
"sort_order": 5,
|
||||
},
|
||||
# Shoes
|
||||
{
|
||||
"slug": "adidas-adipower-ctrl-amazon",
|
||||
"name": "Adidas Adipower Ctrl",
|
||||
"brand": "Adidas",
|
||||
"category": "shoe",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST10?tag=padelnomics-21",
|
||||
"price_cents": 9999,
|
||||
"rating": 4.4,
|
||||
"pros": '["Hervorragender Halt auf Sand", "Leicht und atmungsaktiv"]',
|
||||
"cons": '["Größenfehler möglich — eine Größe größer bestellen"]',
|
||||
"description": "Professioneller Padelschuh mit optimierter Sohle für Sand- und Kunstrasencourts.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "babolat-jet-premura-amazon",
|
||||
"name": "Babolat Jet Premura",
|
||||
"brand": "Babolat",
|
||||
"category": "shoe",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST11?tag=padelnomics-21",
|
||||
"price_cents": 11999,
|
||||
"rating": 4.6,
|
||||
"pros": '["Sehr leicht", "Gute Dämpfung", "Stylisches Design"]',
|
||||
"cons": '["Teurer als Mitbewerber"]',
|
||||
"description": "Ultraleichter Padelschuh von Babolat — ideal für schnelle Spieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 2,
|
||||
},
|
||||
# Balls
|
||||
{
|
||||
"slug": "head-padel-pro-balls-amazon",
|
||||
"name": "HEAD Padel Pro Bälle (3er-Dose)",
|
||||
"brand": "HEAD",
|
||||
"category": "ball",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST20?tag=padelnomics-21",
|
||||
"price_cents": 799,
|
||||
"rating": 4.5,
|
||||
"pros": '["Offizieller Turnierball", "Guter Druckerhalt", "Günstig"]',
|
||||
"cons": '["Bei intensivem Spiel nach 4–5 Sessions platter"]',
|
||||
"description": "Offizieller Turnierball von HEAD — der am häufigsten gespielte Padelball in Europa.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
# Grips/Accessories
|
||||
{
|
||||
"slug": "bullpadel-overgrip-3er-amazon",
|
||||
"name": "Bullpadel Overgrip (3er-Pack)",
|
||||
"brand": "Bullpadel",
|
||||
"category": "grip",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST30?tag=padelnomics-21",
|
||||
"price_cents": 499,
|
||||
"rating": 4.3,
|
||||
"pros": '["Günstig", "Guter Halt auch bei Schweiß", "Einfach zu wechseln"]',
|
||||
"cons": '["Hält weniger lang als Originalgriff"]',
|
||||
"description": "Günstiges Overgrip-Set — jeder Padelspieler sollte regelmäßig wechseln.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "nox-padel-bag-amazon",
|
||||
"name": "NOX ML10 Schläger-Tasche",
|
||||
"brand": "NOX",
|
||||
"category": "accessory",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST40?tag=padelnomics-21",
|
||||
"price_cents": 5999,
|
||||
"rating": 4.4,
|
||||
"pros": '["Platz für 2 Schläger", "Gepolstertes Schlägerfach", "Robustes Material"]',
|
||||
"cons": '["Kein Schuhfach"]',
|
||||
"description": "Praktische Padelschläger-Tasche mit Platz für 2 Schläger und Zubehör.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
]
|
||||
|
||||
# Article slugs for realistic click referrers
|
||||
_ARTICLE_SLUGS = [
|
||||
"beste-padelschlaeger-2026",
|
||||
"padelschlaeger-anfaenger",
|
||||
"padelschuhe-test",
|
||||
"padelbaelle-vergleich",
|
||||
"padel-zubehoer",
|
||||
]
|
||||
|
||||
|
||||
def main():
|
||||
db_path = DATABASE_PATH
|
||||
if not Path(db_path).exists():
|
||||
@@ -481,6 +659,72 @@ def main():
|
||||
)
|
||||
logger.info(" PadelTech unlocked lead #%s", lead_id)
|
||||
|
||||
# 7. Seed affiliate products
|
||||
logger.info("Seeding %s affiliate products...", len(AFFILIATE_PRODUCTS))
|
||||
product_ids: dict[str, int] = {}
|
||||
for p in AFFILIATE_PRODUCTS:
|
||||
existing = conn.execute(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ?",
|
||||
(p["slug"], p["language"]),
|
||||
).fetchone()
|
||||
if existing:
|
||||
product_ids[p["slug"]] = existing["id"]
|
||||
logger.info(" %s already exists (id=%s)", p["name"], existing["id"])
|
||||
continue
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, rating, pros, cons, description,
|
||||
status, language, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
p["slug"], p["name"], p["brand"], p["category"], p["retailer"],
|
||||
p["affiliate_url"], p["price_cents"], p["rating"],
|
||||
p["pros"], p["cons"], p["description"],
|
||||
p["status"], p["language"], p["sort_order"],
|
||||
),
|
||||
)
|
||||
product_ids[p["slug"]] = cursor.lastrowid
|
||||
logger.info(" %s -> id=%s (%s)", p["name"], cursor.lastrowid, p["status"])
|
||||
|
||||
# 8. Seed affiliate clicks (realistic 30-day spread for dashboard charts)
|
||||
logger.info("Seeding affiliate clicks...")
|
||||
import random
|
||||
rng = random.Random(42)
|
||||
# click distribution: more on popular rackets, fewer on accessories
|
||||
click_weights = [
|
||||
("bullpadel-vertex-04-amazon", "beste-padelschlaeger-2026", 52),
|
||||
("adidas-metalbone-30-amazon", "beste-padelschlaeger-2026", 41),
|
||||
("head-delta-pro-amazon", "padelschlaeger-anfaenger", 38),
|
||||
("wilson-bela-pro-v2-amazon", "padelschlaeger-anfaenger", 29),
|
||||
("adidas-adipower-ctrl-amazon", "padelschuhe-test", 24),
|
||||
("babolat-jet-premura-amazon", "padelschuhe-test", 18),
|
||||
("head-padel-pro-balls-amazon", "padelbaelle-vergleich", 15),
|
||||
("bullpadel-overgrip-3er-amazon", "padel-zubehoer", 11),
|
||||
("nox-padel-bag-amazon", "padel-zubehoer", 8),
|
||||
]
|
||||
existing_click_count = conn.execute("SELECT COUNT(*) FROM affiliate_clicks").fetchone()[0]
|
||||
if existing_click_count == 0:
|
||||
for slug, article_slug, count in click_weights:
|
||||
pid = product_ids.get(slug)
|
||||
if not pid:
|
||||
continue
|
||||
for _ in range(count):
|
||||
days_ago = rng.randint(0, 29)
|
||||
hours_ago = rng.randint(0, 23)
|
||||
clicked_at = (now - timedelta(days=days_ago, hours=hours_ago)).strftime("%Y-%m-%d %H:%M:%S")
|
||||
ip_hash = f"dev_{slug}_{_:04d}" # stable fake hash (not real SHA256)
|
||||
conn.execute(
|
||||
"""INSERT INTO affiliate_clicks
|
||||
(product_id, article_slug, referrer, ip_hash, clicked_at)
|
||||
VALUES (?, ?, ?, ?, ?)""",
|
||||
(pid, article_slug, f"https://padelnomics.io/de/blog/{article_slug}", ip_hash, clicked_at),
|
||||
)
|
||||
total_clicks = sum(c for _, _, c in click_weights)
|
||||
logger.info(" Inserted %s click events across 9 products", total_clicks)
|
||||
else:
|
||||
logger.info(" Clicks already seeded (%s rows), skipping", existing_click_count)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
332
web/tests/test_affiliate.py
Normal file
332
web/tests/test_affiliate.py
Normal file
@@ -0,0 +1,332 @@
|
||||
"""
|
||||
Tests for the affiliate product system.
|
||||
|
||||
Covers: hash_ip determinism, product CRUD, bake_product_cards marker replacement,
|
||||
click redirect (302 + logged), rate limiting, inactive product 404, multi-retailer.
|
||||
"""
|
||||
import json
|
||||
from datetime import date
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from padelnomics.affiliate import (
|
||||
get_all_products,
|
||||
get_click_counts,
|
||||
get_click_stats,
|
||||
get_product,
|
||||
get_products_by_category,
|
||||
hash_ip,
|
||||
log_click,
|
||||
)
|
||||
from padelnomics.content.routes import PRODUCT_GROUP_RE, PRODUCT_RE, bake_product_cards
|
||||
from padelnomics.core import execute, fetch_all
|
||||
|
||||
# ── Helpers ────────────────────────────────────────────────────────────────────
|
||||
|
||||
async def _insert_product(
|
||||
slug="test-racket-amazon",
|
||||
name="Test Racket",
|
||||
brand="TestBrand",
|
||||
category="racket",
|
||||
retailer="Amazon",
|
||||
affiliate_url="https://amazon.de/dp/TEST?tag=test-21",
|
||||
status="active",
|
||||
language="de",
|
||||
price_cents=14999,
|
||||
pros=None,
|
||||
cons=None,
|
||||
sort_order=0,
|
||||
) -> int:
|
||||
"""Insert an affiliate product, return its id."""
|
||||
return await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, status, language, pros, cons, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, status, language,
|
||||
json.dumps(pros or ["Gut"]),
|
||||
json.dumps(cons or ["Teuer"]),
|
||||
sort_order,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# ── hash_ip ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_hash_ip_deterministic():
|
||||
"""Same IP + same day → same hash."""
|
||||
h1 = hash_ip("1.2.3.4")
|
||||
h2 = hash_ip("1.2.3.4")
|
||||
assert h1 == h2
|
||||
assert len(h1) == 64 # SHA256 hex digest
|
||||
|
||||
|
||||
def test_hash_ip_different_ips_differ():
|
||||
"""Different IPs → different hashes."""
|
||||
assert hash_ip("1.2.3.4") != hash_ip("5.6.7.8")
|
||||
|
||||
|
||||
def test_hash_ip_rotates_daily():
|
||||
"""Different days → different hashes for same IP (GDPR daily rotation)."""
|
||||
with patch("padelnomics.affiliate.date") as mock_date:
|
||||
mock_date.today.return_value = date(2026, 2, 1)
|
||||
h1 = hash_ip("1.2.3.4")
|
||||
mock_date.today.return_value = date(2026, 2, 2)
|
||||
h2 = hash_ip("1.2.3.4")
|
||||
assert h1 != h2
|
||||
|
||||
|
||||
# ── get_product ────────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_active_by_lang(db):
|
||||
"""get_product returns active product for correct language."""
|
||||
await _insert_product(slug="vertex-amazon", language="de", status="active")
|
||||
product = await get_product("vertex-amazon", "de")
|
||||
assert product is not None
|
||||
assert product["slug"] == "vertex-amazon"
|
||||
assert isinstance(product["pros"], list)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_draft_returns_none(db):
|
||||
"""Draft products are not returned."""
|
||||
await _insert_product(slug="vertex-draft", status="draft")
|
||||
product = await get_product("vertex-draft", "de")
|
||||
assert product is None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_lang_fallback(db):
|
||||
"""Falls back to any language when no match for requested lang."""
|
||||
await _insert_product(slug="vertex-de-only", language="de", status="active")
|
||||
# Request EN but only DE exists — should fall back
|
||||
product = await get_product("vertex-de-only", "en")
|
||||
assert product is not None
|
||||
assert product["language"] == "de"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_not_found(db):
|
||||
"""Returns None for unknown slug."""
|
||||
product = await get_product("nonexistent-slug", "de")
|
||||
assert product is None
|
||||
|
||||
|
||||
# ── get_products_by_category ───────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_products_by_category_sorted(db):
|
||||
"""Returns products sorted by sort_order."""
|
||||
await _insert_product(slug="racket-b", name="Racket B", sort_order=2)
|
||||
await _insert_product(slug="racket-a", name="Racket A", sort_order=1)
|
||||
products = await get_products_by_category("racket", "de")
|
||||
assert len(products) == 2
|
||||
assert products[0]["sort_order"] == 1
|
||||
assert products[1]["sort_order"] == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_products_by_category_inactive_excluded(db):
|
||||
"""Draft and archived products are excluded."""
|
||||
await _insert_product(slug="racket-draft", status="draft")
|
||||
await _insert_product(slug="racket-archived", status="archived")
|
||||
products = await get_products_by_category("racket", "de")
|
||||
assert products == []
|
||||
|
||||
|
||||
# ── get_all_products ───────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_products_no_filter(db):
|
||||
"""Returns all products regardless of status."""
|
||||
await _insert_product(slug="p1", status="active")
|
||||
await _insert_product(slug="p2", status="draft")
|
||||
products = await get_all_products()
|
||||
assert len(products) == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_products_status_filter(db):
|
||||
"""Status filter returns only matching rows."""
|
||||
await _insert_product(slug="p-active", status="active")
|
||||
await _insert_product(slug="p-draft", status="draft")
|
||||
active = await get_all_products(status="active")
|
||||
assert len(active) == 1
|
||||
assert active[0]["slug"] == "p-active"
|
||||
|
||||
|
||||
# ── log_click + get_click_counts ──────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_log_click_inserts_row(db):
|
||||
"""log_click inserts a row into affiliate_clicks."""
|
||||
product_id = await _insert_product(slug="clickable")
|
||||
await log_click(product_id, "1.2.3.4", "beste-padelschlaeger", "https://example.com/de/blog/test")
|
||||
rows = await fetch_all("SELECT * FROM affiliate_clicks WHERE product_id = ?", (product_id,))
|
||||
assert len(rows) == 1
|
||||
assert rows[0]["article_slug"] == "beste-padelschlaeger"
|
||||
# IP hash must not be the raw IP
|
||||
assert rows[0]["ip_hash"] != "1.2.3.4"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_click_counts(db):
|
||||
"""get_click_counts returns dict of product_id → count."""
|
||||
pid = await _insert_product(slug="tracked-product")
|
||||
await log_click(pid, "1.2.3.4", None, None)
|
||||
await log_click(pid, "5.6.7.8", None, None)
|
||||
counts = await get_click_counts()
|
||||
assert counts.get(pid) == 2
|
||||
|
||||
|
||||
# ── get_click_stats ────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_click_stats_structure(db):
|
||||
"""get_click_stats returns expected keys."""
|
||||
stats = await get_click_stats(days_count=30)
|
||||
assert "total_clicks" in stats
|
||||
assert "top_products" in stats
|
||||
assert "daily_bars" in stats
|
||||
assert "by_retailer" in stats
|
||||
|
||||
|
||||
# ── bake_product_cards ────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_replaces_marker(db):
|
||||
"""[product:slug] marker is replaced with rendered HTML."""
|
||||
await _insert_product(slug="vertex-04-amazon", name="Bullpadel Vertex 04", status="active")
|
||||
html = "<p>Intro</p>\n[product:vertex-04-amazon]\n<p>Outro</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "[product:vertex-04-amazon]" not in result
|
||||
assert "Bullpadel Vertex 04" in result
|
||||
assert "/go/vertex-04-amazon" in result
|
||||
assert "sponsored" in result
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_missing_slug_passthrough(db):
|
||||
"""Unknown slugs pass through unchanged — no product card rendered."""
|
||||
html = "<p>Text</p>\n[product:nonexistent-slug]\n<p>End</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
# Surrounding content is intact; no product HTML injected
|
||||
assert "<p>Text</p>" in result
|
||||
assert "<p>End</p>" in result
|
||||
assert "<article" not in result # no product card rendered
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_group_marker(db):
|
||||
"""[product-group:category] renders a grid of products."""
|
||||
await _insert_product(slug="shoe-1-amazon", name="Test Shoe", category="shoe", status="active")
|
||||
html = "<h2>Shoes</h2>\n[product-group:shoe]\n<p>End</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "[product-group:shoe]" not in result
|
||||
assert "Test Shoe" in result
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_no_markers(db):
|
||||
"""HTML without markers is returned unchanged."""
|
||||
html = "<p>No markers here.</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert result == html
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_draft_not_shown(db):
|
||||
"""Draft products are not baked into articles."""
|
||||
await _insert_product(slug="draft-product", name="Draft Product", status="draft")
|
||||
html = "[product:draft-product]"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "Draft Product" not in result
|
||||
|
||||
|
||||
# ── regex patterns ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_product_re_matches():
|
||||
"""PRODUCT_RE matches valid [product:slug] markers."""
|
||||
assert PRODUCT_RE.match("[product:bullpadel-vertex-04-amazon]")
|
||||
assert PRODUCT_RE.match("[product:test-123]")
|
||||
|
||||
|
||||
def test_product_group_re_matches():
|
||||
"""PRODUCT_GROUP_RE matches valid [product-group:category] markers."""
|
||||
assert PRODUCT_GROUP_RE.match("[product-group:racket]")
|
||||
assert PRODUCT_GROUP_RE.match("[product-group:shoe]")
|
||||
|
||||
|
||||
# ── multi-retailer ────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_multi_retailer_same_slug_different_lang(db):
|
||||
"""Same slug can exist in DE and EN with different affiliate URLs."""
|
||||
await _insert_product(
|
||||
slug="vertex-04", language="de",
|
||||
affiliate_url="https://amazon.de/dp/TEST?tag=de-21",
|
||||
)
|
||||
await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, status, language, pros, cons, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
"vertex-04", "Test Racket EN", "TestBrand", "racket", "Amazon UK",
|
||||
"https://amazon.co.uk/dp/TEST?tag=en-21",
|
||||
14999, "active", "en", "[]", "[]", 0,
|
||||
),
|
||||
)
|
||||
de_product = await get_product("vertex-04", "de")
|
||||
en_product = await get_product("vertex-04", "en")
|
||||
assert de_product is not None
|
||||
assert en_product is not None
|
||||
assert de_product["affiliate_url"] != en_product["affiliate_url"]
|
||||
assert "amazon.de" in de_product["affiliate_url"]
|
||||
assert "amazon.co.uk" in en_product["affiliate_url"]
|
||||
|
||||
|
||||
# ── click redirect (e2e via Quart test client) ────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_302(app, db):
|
||||
"""GET /go/<slug> redirects to affiliate_url with 302."""
|
||||
await _insert_product(slug="redirect-test", affiliate_url="https://amazon.de/dp/XYZ?tag=test-21")
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/redirect-test")
|
||||
assert response.status_code == 302
|
||||
assert "amazon.de" in response.headers.get("Location", "")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_logs_click(app, db):
|
||||
"""Successful redirect logs a click in affiliate_clicks."""
|
||||
pid = await _insert_product(slug="logged-test", affiliate_url="https://amazon.de/dp/LOG?tag=test-21")
|
||||
async with app.test_client() as client:
|
||||
await client.get(
|
||||
"/go/logged-test",
|
||||
headers={"Referer": "https://padelnomics.io/de/beste-padelschlaeger-2026"},
|
||||
)
|
||||
rows = await fetch_all("SELECT * FROM affiliate_clicks WHERE product_id = ?", (pid,))
|
||||
assert len(rows) == 1
|
||||
assert rows[0]["article_slug"] == "beste-padelschlaeger-2026"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_inactive_404(app, db):
|
||||
"""Draft products return 404 on /go/<slug>."""
|
||||
await _insert_product(slug="inactive-test", status="draft")
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/inactive-test")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_unknown_404(app, db):
|
||||
"""Unknown slug returns 404."""
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/totally-unknown-xyz")
|
||||
assert response.status_code == 404
|
||||
@@ -24,9 +24,11 @@ sup = _ilu.module_from_spec(_spec)
|
||||
_spec.loader.exec_module(sup)
|
||||
|
||||
from padelnomics_extract.proxy import ( # noqa: E402
|
||||
load_proxy_urls,
|
||||
fetch_webshare_proxies,
|
||||
load_proxy_tiers,
|
||||
make_round_robin_cycler,
|
||||
make_sticky_selector,
|
||||
make_tiered_cycler,
|
||||
)
|
||||
|
||||
# ── load_workflows ────────────────────────────────────────────────
|
||||
@@ -198,28 +200,112 @@ class TestTopologicalWaves:
|
||||
# ── proxy.py ─────────────────────────────────────────────────────
|
||||
|
||||
|
||||
class TestLoadProxyUrls:
|
||||
def test_returns_empty_when_unset(self, monkeypatch):
|
||||
monkeypatch.delenv("PROXY_URLS", raising=False)
|
||||
assert load_proxy_urls() == []
|
||||
class TestFetchWebshareProxies:
|
||||
def test_parses_ip_port_user_pass_format(self):
|
||||
raw = "1.2.3.4:1080:user1:pass1\n5.6.7.8:1080:user2:pass2\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == [
|
||||
"http://user1:pass1@1.2.3.4:1080",
|
||||
"http://user2:pass2@5.6.7.8:1080",
|
||||
]
|
||||
|
||||
def test_parses_comma_separated_urls(self, monkeypatch):
|
||||
monkeypatch.setenv(
|
||||
"PROXY_URLS",
|
||||
"http://p1:8080,http://p2:8080,http://p3:8080",
|
||||
)
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080", "http://p3:8080"]
|
||||
def test_network_error_returns_empty(self):
|
||||
import urllib.error
|
||||
with patch("urllib.request.urlopen", side_effect=urllib.error.URLError("timeout")):
|
||||
result = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert result == []
|
||||
|
||||
def test_strips_whitespace(self, monkeypatch):
|
||||
monkeypatch.setenv("PROXY_URLS", " http://p1:8080 , http://p2:8080 ")
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080"]
|
||||
def test_malformed_lines_are_skipped(self):
|
||||
raw = "bad_line\n1.2.3.4:1080:user:pass\nonly:three:parts\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == ["http://user:pass@1.2.3.4:1080"]
|
||||
|
||||
def test_ignores_empty_segments(self, monkeypatch):
|
||||
monkeypatch.setenv("PROXY_URLS", "http://p1:8080,,http://p2:8080,")
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080"]
|
||||
def test_max_proxies_respected(self):
|
||||
lines = "\n".join(f"10.0.0.{i}:1080:u{i}:p{i}" for i in range(10))
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = lines.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list", max_proxies=3)
|
||||
assert len(urls) == 3
|
||||
|
||||
def test_empty_lines_skipped(self):
|
||||
raw = "\n\n1.2.3.4:1080:user:pass\n\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == ["http://user:pass@1.2.3.4:1080"]
|
||||
|
||||
|
||||
class TestLoadProxyTiers:
|
||||
def _clear_proxy_env(self, monkeypatch):
|
||||
for var in ("WEBSHARE_DOWNLOAD_URL", "PROXY_URLS_DATACENTER", "PROXY_URLS_RESIDENTIAL"):
|
||||
monkeypatch.delenv(var, raising=False)
|
||||
|
||||
def test_returns_empty_when_all_unset(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
assert load_proxy_tiers() == []
|
||||
|
||||
def test_single_datacenter_tier(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080,http://dc2:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://dc1:8080", "http://dc2:8080"]
|
||||
|
||||
def test_residential_only(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://res1:8080"]
|
||||
|
||||
def test_empty_tiers_skipped(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "")
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://res1:8080"]
|
||||
|
||||
def test_three_tiers_correct_order(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
with patch("padelnomics_extract.proxy.fetch_webshare_proxies", return_value=["http://user:pass@1.2.3.4:1080"]):
|
||||
monkeypatch.setenv("WEBSHARE_DOWNLOAD_URL", "http://example.com/list")
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080")
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 3
|
||||
assert tiers[0] == ["http://user:pass@1.2.3.4:1080"] # free
|
||||
assert tiers[1] == ["http://dc1:8080"] # datacenter
|
||||
assert tiers[2] == ["http://res1:8080"] # residential
|
||||
|
||||
def test_webshare_fetch_failure_skips_tier(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
with patch("padelnomics_extract.proxy.fetch_webshare_proxies", return_value=[]):
|
||||
monkeypatch.setenv("WEBSHARE_DOWNLOAD_URL", "http://example.com/list")
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://dc1:8080"]
|
||||
|
||||
|
||||
class TestRoundRobinCycler:
|
||||
@@ -279,3 +365,138 @@ class TestStickySelectorProxy:
|
||||
fn = make_sticky_selector(urls)
|
||||
for i in range(20):
|
||||
assert fn(f"key_{i}") in urls
|
||||
|
||||
|
||||
class TestTieredCyclerNTier:
|
||||
def test_starts_on_first_tier(self):
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1a"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
assert cycler["active_tier_index"]() == 0
|
||||
assert not cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() in tiers[0]
|
||||
|
||||
def test_escalates_after_threshold(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
# Two failures — stays on tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 0
|
||||
# Third failure — escalates
|
||||
escalated = cycler["record_failure"]()
|
||||
assert escalated is True
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
assert cycler["next_proxy"]() == "http://t1"
|
||||
|
||||
def test_escalates_through_all_tiers(self):
|
||||
tiers = [["http://t0"], ["http://t1"], ["http://t2"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=2)
|
||||
# Exhaust tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# Exhaust tier 1
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 2
|
||||
# Exhaust tier 2
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
|
||||
def test_success_resets_counter(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
cycler["record_success"]()
|
||||
# Counter reset — need threshold more failures to escalate
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 0 # still on tier 0
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1 # now escalated
|
||||
|
||||
def test_counter_resets_on_escalation(self):
|
||||
"""After escalating, failure counter resets so new tier gets a fresh start."""
|
||||
tiers = [["http://t0"], ["http://t1"], ["http://t2"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=2)
|
||||
# Exhaust tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# One failure on tier 1 — should NOT escalate yet (counter reset)
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# Second failure on tier 1 — escalates to tier 2
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 2
|
||||
|
||||
def test_is_exhausted_false_when_tiers_remain(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
assert not cycler["is_exhausted"]()
|
||||
cycler["record_failure"]() # escalates to tier 1
|
||||
assert not cycler["is_exhausted"]()
|
||||
|
||||
def test_is_exhausted_true_after_all_tiers_fail(self):
|
||||
tiers = [["http://t0"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
assert not cycler["is_exhausted"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
|
||||
def test_empty_tiers_immediately_exhausted(self):
|
||||
cycler = make_tiered_cycler([], threshold=3)
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
assert cycler["tier_count"]() == 0
|
||||
|
||||
def test_single_tier_cycles_within_tier(self):
|
||||
tiers = [["http://p1", "http://p2", "http://p3"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10)
|
||||
results = [cycler["next_proxy"]() for _ in range(6)]
|
||||
assert results == ["http://p1", "http://p2", "http://p3"] * 2
|
||||
|
||||
def test_tier_count_reflects_input(self):
|
||||
assert make_tiered_cycler([], threshold=1)["tier_count"]() == 0
|
||||
assert make_tiered_cycler([["a"]], threshold=1)["tier_count"]() == 1
|
||||
assert make_tiered_cycler([["a"], ["b"], ["c"]], threshold=1)["tier_count"]() == 3
|
||||
|
||||
def test_record_failure_noop_when_exhausted(self):
|
||||
tiers = [["http://t0"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
cycler["record_failure"]() # exhausts
|
||||
assert cycler["is_exhausted"]()
|
||||
# Further failures are no-ops, not exceptions
|
||||
result = cycler["record_failure"]()
|
||||
assert result is False
|
||||
assert cycler["is_exhausted"]()
|
||||
|
||||
def test_thread_safety(self):
|
||||
"""Concurrent next_proxy and record calls do not raise or corrupt state."""
|
||||
import threading
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1a", "http://t1b"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=5)
|
||||
errors = []
|
||||
lock = threading.Lock()
|
||||
|
||||
def worker():
|
||||
try:
|
||||
for _ in range(20):
|
||||
cycler["next_proxy"]()
|
||||
cycler["record_failure"]()
|
||||
cycler["record_success"]()
|
||||
except Exception as e:
|
||||
with lock:
|
||||
errors.append(e)
|
||||
|
||||
threads = [threading.Thread(target=worker) for _ in range(8)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert errors == [], f"Thread safety errors: {errors}"
|
||||
|
||||
Reference in New Issue
Block a user