Compare commits
93 Commits
v4
...
v202603011
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5d872ec55 | ||
|
|
75305935bd | ||
|
|
99cb0ac005 | ||
|
|
a15c32d398 | ||
|
|
97c5846d51 | ||
|
|
0d903ec926 | ||
|
|
42c49e383c | ||
|
|
1c0edff3e5 | ||
|
|
8a28b94ec2 | ||
|
|
9b54f2d544 | ||
|
|
08bd2b2989 | ||
|
|
81a57db272 | ||
|
|
bce6b2d340 | ||
|
|
f92d863781 | ||
|
|
a3dd37b1be | ||
|
|
e5cbcf462e | ||
|
|
169092c8ea | ||
|
|
6ae16f6c1f | ||
|
|
8b33daa4f3 | ||
|
|
a898a06575 | ||
|
|
219554b7cb | ||
|
|
1aedf78ec6 | ||
|
|
8f2ffd432b | ||
|
|
c9dec066f7 | ||
|
|
fea4f85da3 | ||
|
|
2590020014 | ||
|
|
a72f7721bb | ||
|
|
849dc8359c | ||
|
|
ec839478c3 | ||
|
|
47acf4d3df | ||
|
|
53117094ee | ||
|
|
6076a0b30f | ||
|
|
8dbbd0df05 | ||
|
|
b1eeb0a0ac | ||
|
|
6aae92fc58 | ||
|
|
86be044116 | ||
|
|
5de0676f44 | ||
|
|
81ec8733c7 | ||
|
|
8a921ee18a | ||
|
|
07d8ea1c0e | ||
|
|
370fc1f70b | ||
|
|
e0c3f38c0a | ||
|
|
f9faa02683 | ||
|
|
109da23902 | ||
|
|
34065fa2ac | ||
|
|
d1a10ff243 | ||
|
|
5f48449d25 | ||
|
|
b7e44ac5b3 | ||
|
|
c2dfefcc1e | ||
|
|
e9d1b74618 | ||
|
|
4b5c237bee | ||
|
|
8c4a4078f9 | ||
|
|
5f756a2ba5 | ||
|
|
4ac17af503 | ||
|
|
0984657e72 | ||
|
|
73547ec876 | ||
|
|
129ca26143 | ||
|
|
9ea4ff55fa | ||
|
|
8a91fc752b | ||
|
|
4783067c6e | ||
|
|
c1e1f42aad | ||
|
|
ecd1cdd27a | ||
|
|
24ec7060b3 | ||
|
|
5c22ea9780 | ||
|
|
aee3733b49 | ||
|
|
51d9aab4a0 | ||
|
|
1fdd2d07a4 | ||
|
|
2214d7a58f | ||
|
|
0f360fd230 | ||
|
|
85b6aa0d0a | ||
|
|
bc7e40b531 | ||
|
|
ef85d3bb36 | ||
|
|
4d45b99cd8 | ||
|
|
e62aad148b | ||
|
|
b5db9d16b9 | ||
|
|
2e149fc1db | ||
|
|
6fb1e990e3 | ||
|
|
6edf8ba65e | ||
|
|
ed0a578050 | ||
|
|
c1cdeec6be | ||
|
|
710624f417 | ||
|
|
6cf98f44d4 | ||
|
|
60659a5ec5 | ||
|
|
beb4195f16 | ||
|
|
88cc857f3a | ||
|
|
9116625884 | ||
|
|
1af65bb46f | ||
|
|
9b0bfc478d | ||
|
|
adf22924f6 | ||
|
|
09665b7786 | ||
|
|
93349923bd | ||
|
|
642041b32b | ||
|
|
bb70a5372b |
@@ -3,6 +3,8 @@ APP_NAME=ENC[AES256_GCM,data:Vic/MJYoxZo8JAI=,iv:n1SEGQaGeZtYMtLmDRFiljDBbNKFvCz
|
||||
SECRET_KEY=ENC[AES256_GCM,data:a3Bhj3gSQaE3llRWBYzpjoFDhhhSsNee67jXJs7+qn4=,iv:yvrx78X5Ut4DBSlmBnIn09ESVc/tuDiwiV4njmjcvko=,tag:cbFUTAEpX+isQD9FCVllsw==,type:str]
|
||||
BASE_URL=ENC[AES256_GCM,data:LcbPDZf9Pwcuv7RxN9xhNfa9Tufi,iv:cOdjW9nNe+BuDXh+dL4b5LFQL2mKBiKV0FaEsDGMAQc=,tag:3uAn3AIwsztIfGpkQLD5Fg==,type:str]
|
||||
DEBUG=ENC[AES256_GCM,data:qrEGkA==,iv:bCyEDWiEzolHo4vabiyYTsqM0eUaBmNbXYYu4wCsaeE=,tag:80gnDNbdZHRWVEYtuA1M2Q==,type:str]
|
||||
#ENC[AES256_GCM,data:YB5h,iv:2HFpvHNebAB9M/44rtPk/QpFV9hNKOlV/099OSjPnOA=,tag:BVj8vGy6K3LW/wb1vcZ+Ug==,type:comment]
|
||||
GITEA_TOKEN=ENC[AES256_GCM,data:aIM7vQXxFbz7FDdXEdwtelvmXAdLgJfWNCSPeK//NlveQrU5cLDt8w==,iv:9qhjk52ZAs+y5WwP5WebMUwHhu6JNdHzAsEOpznrwBw=,tag:WnCDA4hAccMFs6vXVVKqxw==,type:str]
|
||||
#ENC[AES256_GCM,data:YmlGAWpXxRCqam3oTWtGxHDXC+svEXI4HyUxrm/8OcKTuJsYPcL1WcnYqrP5Mf5lU5qPezEXUrrgZy8vjVW6qAbb0IA2PMM4Kg==,iv:dx6Dn99dJgjwyvUp8NAygXjRQ50yKYFeC73Oqt9WvmY=,tag:6JLF2ixSAv39VkKt6+cecQ==,type:comment]
|
||||
ADMIN_EMAILS=ENC[AES256_GCM,data:hlG8b32WlD4ems3VKQ==,iv:wWO08dmX4oLhHulXg4HUG0PjRnFiX19RUTkTvjqIw5I=,tag:KMjXsBt7aE/KqlCfV+fdMg==,type:str]
|
||||
#ENC[AES256_GCM,data:b2wQxnL8Q2Bp,iv:q8ep3yUPzCumpZpljoVL2jbcPdsI5c2piiZ0x5k10Mw=,tag:IbjkT0Mjgu9n+6FGiPVihg==,type:comment]
|
||||
@@ -56,9 +58,10 @@ WORKFLOWS_PATH=ENC[AES256_GCM,data:PehxEUMb1K3F1557BY3IqKD7sbJcoaIjnQvboBRJ1g==,
|
||||
ALERT_WEBHOOK_URL=
|
||||
NTFY_TOKEN=
|
||||
#ENC[AES256_GCM,data:BCyQYjRnTx8yW9A=,iv:4OPCP+xzRLUJrpoFewVnbZRKnZH4sAbV76SM//2k5wU=,tag:HxwEp7VFVZUN/VjPiL/+Vw==,type:comment]
|
||||
PROXY_URLS=ENC[AES256_GCM,data:CzRaK0piUQfvuYYsdz0i2MEQIphKi0BhNvHw9alo46aTH+kqEKvoS7dKEKzyU9VJ4TyNweInlVMxB962DsvRoBtnHwo/pUmYtVeEr2881clNgEiZVYRDFRdEbpULcLPDJa3ey1leqAAHlmiL0RQ6Qa57gPCOVBzVG6npGLKO+K8XVIb+BZMs9kEUOlw7iuqTJW5xPN/t4X/jHidEqfTSAl9b4vU4bsYVuY3yQrL+/V5QpTbyXlf+cMq3flpA3zE2Fxhalzg+c/wHMTrCksFwrCkrInW0kY9yPkA7usUWr1xwwaV3wIDoNQsLXpMd/3RztipNvKtOMRhRJOmjzP7BKhCJvvvKTV5p+mBCulFijbMQgArg3BqcFanfw3YZ4wPd4hp8q/vOhE/U9Wu0yrMmyWYFHYGQnFVARlBH7pwn/ez8W4KqRFveEAuev9CE7K7s5RqzPLelSkoa9UuiiULJ+t0LFgKlgxuLtQ8GdFdgsmBCxY/4U/xzvNdC82hD549z5nMWWlaUJm4onPWirT/RYm7j3v6z4mmNImI2W6rCNbvEvsXwWsciquVaBIgReA47p6/GTzZ9VZMyGr4PdzB87BJGAgX1W57WNdPAsRIF49XP2BU72RtRFxsUG8Ha2dc=,iv:a10Vpk7Zv8QqORuEcMlpcvtHO/zjBLaFphWPYBXwysc=,tag:8N66/R+CLqEZ45wj+tCt6w==,type:str]
|
||||
RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:YWM=,iv:iY5+uMazLAFdwyLT7Gr7MaF1QHBIgHuoi6nF2VbSsOA=,tag:dc6AmuJdTQ55gVe16uzs6A==,type:str]
|
||||
PROXY_URLS_FALLBACK=ENC[AES256_GCM,data:95rwI7kKUj1YxLpjChtrM4f2EFUDzQdAg1e1MOHnLwQ9ZY54UNH7v4JcqTsvDk9D+0N/BIdwFSDi7pnCSd6BWFV+cQ==,iv:rm9HdBsibSne7JR6vWl+ao/GHb1rbuVdZZDUWhVbTnE=,tag:NJ2STxmFZPvFayfTrEEYbg==,type:str]
|
||||
PROXY_URLS_RESIDENTIAL=ENC[AES256_GCM,data:lfmlsjXFtL+zo40SNFLiFKaZiYvE7CNH+zRwjMK5pqPfCs0TlMX+Y9e1KmzAS+y/cI69TP5sgMPRBzER0Jn7RvH0KA==,iv:jBN/4/K5L5886G4rSzxt8V8u/57tAuj3R76haltzqeU=,tag:Xe6o9eg2PodfktDqmLgVNA==,type:str]
|
||||
PROXY_URLS_DATACENTER=ENC[AES256_GCM,data:Eec0X65EMsV2PD3Qvn+JjGqYaHtLupn0k99H918vmuRuAinP3rv/pwEoyKHmygazrUExg7U2PUELycyzq3lU6RIGtO+r0pRAn/n0S8RwdoZS,iv:T+bfbvULwSLRVD/hyW7rDN8tLLBf1FQkwCEbpiuBB+0=,tag:W/YHfl5U2yaA7ZOXgAFw+Q==,type:str]
|
||||
WEBSHARE_DOWNLOAD_URL=ENC[AES256_GCM,data:1D9VRZ3MCXPQWfiMH8+CLcrxeYnVVcQgZDvt5kltvbSTuSHQ2hHDmZpBkTOMIBJnw4JLZ2JQKHgG4OaYDtsM2VltFPnfwaRgVI9G5PSenR3o4PeQmYO1AqWOmjn19jPxNXRhEXdupP9UT+xQNXoBJsl6RR20XOpMA5AipUHmSjD0UIKXoZLU,iv:uWUkAydac//qrOTPUThuOLKAKXK4xcZmK9qBVFwpqt4=,tag:1vYhukBW9kEuSXCLAiZZmQ==,type:str]
|
||||
CIRCUIT_BREAKER_THRESHOLD=
|
||||
#ENC[AES256_GCM,data:ZcX/OEbrMfKizIQYq3CYGnvzeTEX7KsmQaz2+Jj1rG5tbTy2aljQBIEkjtiwuo8NsNAD+FhIGRGVfBmKe1CAKME1MuiCbgSG,iv:4BSkeD3jZFawP09qECcqyuiWcDnCNSgbIjBATYhazq4=,tag:Ep1d2Uk700MOlWcLWaQ/ig==,type:comment]
|
||||
GSC_SERVICE_ACCOUNT_PATH=
|
||||
@@ -70,7 +73,7 @@ GEONAMES_USERNAME=ENC[AES256_GCM,data:aSkVdLNrhiF6tlg=,iv:eemFGwDIv3EG/P3lVHGZj9
|
||||
CENSUS_API_KEY=ENC[AES256_GCM,data:qqG971573aGq9MiHI2xLlanKKFwjfcNNoMXtm8LNbyh0rMbQN2XukQ==,iv:az2i0ldH75nHGah4DeOxaXmDbVYqmC1c77ptZqFA9BI=,tag:zoDdKj9bR7fgIDo1/dEU2g==,type:str]
|
||||
sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxNWNmUzVNUGdWRnE0ZFpF\nM0JQZWZ3UDdEVzlwTmIxakxOZXBkT2x2ZlNrClRtV2M3S2daSGxUZmFDSWQ2Nmh4\neU51QndFcUxlSE00RFovOVJTcDZmUUUKLS0tIDcvL3hRMDRoMWZZSXljNzA3WG5o\nMWFic21MV0krMzlIaldBTVU0ZDdlTE0K7euGQtA+9lHNws+x7TMCArZamm9att96\nL8cXoUDWe5fNI5+M1bXReqVfNwPTwZsV6j/+ZtYKybklIzWz02Ex4A==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a
|
||||
sops_lastmodified=2026-02-26T14:31:14Z
|
||||
sops_mac=ENC[AES256_GCM,data:iqFuTexTS9U/Nv8xoTpHljTNQTGX9ITcJ3AjhDEtxrh0Z9/lngfBvGtjiKmpwFGlobQw/x+/YLM+u3MhciwXF7qNwFfJ/StN2Y66uF71SxWotbL70Dxl4oWSVL3sU+2NYbw5yP0p+xCbE+rEd5SqAe6K5yyq5X25hz8fIapxlYA=,iv:foqoWQVMipuOAQ0Kp799PaIhCIrxV8T5cC811wIzxR8=,tag:yNfxSV3R21XEXksjmdsKBw==,type:str]
|
||||
sops_lastmodified=2026-03-01T13:34:16Z
|
||||
sops_mac=ENC[AES256_GCM,data:JLfGLbNTEcI6M/sUA5Zez6cfEUObgnUBmX52560PzBmeLZt0F5Y5QpeojIBqEDMuNB0hp1nnPI59WClLJtQ12VlHo9TkL3x9uCNUG+KneQrn1bTmJpA3cwNkWTzIm4l+TGbJbd4FpKJ9H0v1w+sqoKOgG8DqbtOeVdUfsVspAso=,iv:UqYxooXkEtx+y7fYzl+GFncpkjz8dcP7o9fp+kFf6w4=,tag:/maSb1aZGo+Ia8eGpB7PYw==,type:str]
|
||||
sops_unencrypted_suffix=_unencrypted
|
||||
sops_version=3.12.1
|
||||
|
||||
@@ -32,10 +32,6 @@ LITESTREAM_R2_BUCKET=ENC[AES256_GCM,data:pAqSkoJzsw==,iv:5J1Js7JPH/j1oTmEBdNXjwd
|
||||
LITESTREAM_R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:e89yGzousImmdO7WVqmRWLJNejDFH5eTaw7G74CyZSw=,iv:bR1jgqSzJlxPA8LMMg2Mc1Lnp01iZgaqa9dgAoV0RpY=,tag:m92xzCP0qaP2onK7ChwA1Q==,type:str]
|
||||
LITESTREAM_R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:yzXeb8c/Y0d+EluY7g6buo4BnFvBDEVblOi7doNgOp3siLvfMmPkjdRLqZzA14ET6CW5vef9i51yijPYwuhnbw==,iv:IYQRZ8SsquUQpsHH3X/iovz2wFskR4iHyvr0arY7Ag4=,tag:9G5lpHloacjQbEhSk9T2pw==,type:str]
|
||||
LITESTREAM_R2_ENDPOINT=ENC[AES256_GCM,data:qqDLfsPeiWOfwtgpZeItypnYNmIOD07fV0IPlZfphhUFeY0Z/BRpkVXA7nfqQ2M6PmcYKVIlBiBY,iv:hsEBxxv1+fvUY4v3nhBP8puKlu216eAGZDUNBAjibas=,tag:MvnsJ8W3oSrv4ZrWW/p+dg==,type:str]
|
||||
#ENC[AES256_GCM,data:YGV2exKdGOUkblNZZos=,iv:NuabFM/gNHIzYmDMRZ2tglFYdMPVFuHFGd+AAWvvu6Q=,tag:gZRoNNEmjL9v3nC8j9YkHw==,type:comment]
|
||||
DUCKDB_PATH=ENC[AES256_GCM,data:GgOEQ5B1KeQrVavhoMU/JGXcVu3H,iv:XY8JiaosxaUDv5PwizrZFWuNKMSOeuE3cfVyp51r++8=,tag:RnoDE5+7WQolFLejfRZ//w==,type:str]
|
||||
SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:U2X9KmlgnWXM9uCfhHCJ03HMGCLm,iv:KHHdBTq+ct4AG7Jt4zLog/5jbDC7LvHA6KzWNTDS/Yw=,tag:m5uIG/bS4vaBooSYoYa6SA==,type:str]
|
||||
LANDING_DIR=ENC[AES256_GCM,data:NkEmV8LOwEiN9Sal,iv:mQHBVT6lNoEEEVbl7a5bNN5qoF/LvTyWXQvvkv/z/B0=,tag:IgA5A1nfF91fOBdYxEN71g==,type:str]
|
||||
#ENC[AES256_GCM,data:jvZYm7ceM4jtNRg=,iv:nuv65SDTZiaVukVZ40seBZevpqP8uiKCgJyQcIrY524=,tag:cq6gB3vmJzJWIXCLHaIc9g==,type:comment]
|
||||
REPO_DIR=ENC[AES256_GCM,data:ae8i6PpGFaiYFA/gGIhczg==,iv:nmsIRMPJYocIO6Z2Gz4OIzAOvSpdgDYmUaIr2hInFo0=,tag:EmAYG5NujnHg8lPaO/uAnQ==,type:str]
|
||||
WORKFLOWS_PATH=ENC[AES256_GCM,data:sGU4l68Pbb1thsPyG104mWXWD+zJGTIcR/TqVbPmew==,iv:+xhGkX+ep4kFEAU65ELdDrfjrl/WyuaOi35JI3OB/zM=,tag:brauZhFq8twPXmvhZKjhDQ==,type:str]
|
||||
@@ -43,7 +39,10 @@ ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:4sXQk8zklruC525J279TUUatdDJQ43qweuoPhtpI82
|
||||
NTFY_TOKEN=ENC[AES256_GCM,data:YlOxhsRJ8P1y4kk6ugWm41iyRCsM6oAWjvbU9lGcD0A=,iv:JZXOvi3wTOPV9A46c7fMiqbszNCvXkOgh9i/H1hob24=,tag:8xnPimgy7sesOAnxhaXmpg==,type:str]
|
||||
SUPERVISOR_GIT_PULL=ENC[AES256_GCM,data:mg==,iv:KgqMVYj12FjOzWxtA1T0r0pqCDJ6MtHzMjE+4W/W+s4=,tag:czFaOqhHG8nqrQ8AZ8QiGw==,type:str]
|
||||
#ENC[AES256_GCM,data:hzAZvCWc4RTk290=,iv:RsSI4OpAOQGcFVpfXDZ6t705yWmlO0JEWwWF5uQu9As=,tag:UPqFtA2tXiSa0vzJAv8qXg==,type:comment]
|
||||
PROXY_URLS=ENC[AES256_GCM,data:nm4B++SkZZgN3p2xru3WrpVA0X6O8yvb45tH/ovF4006zBy28xqVxbsd44Mz6b5FMinjOXRmGwoI/GDWmdJLzBYdpryQ/FhpbzSUpr1ZOjOz+7P0vn2jfBGAB8ksU3i5kuYglud3EyQGFL+v+uooxwrIUCjfzmmB4vCmf7phssKDsK1CqzmdZ1c54ehSu4bRRdmGp9d0+r+j1SpXb/JbZ8LTqUIhLlZXrHFqkCfN1czhFK9IwMVgR00Q4v2YkjaRBME4lVqwk1NwwatbS9Fq8LlzwuT1uKk+T6ZDkFKC8ZoPW1YRqF13X7hFGFXCNRqABRDZ45lqxYQbBoRrWmH2tfMiAmTrIuRsdPM8bZ/Ol5mXSDhs0HyWX2urX+LD65rIOO0zN/lwjXSwh5mwwBdB61akdzsWRyLZsdafuQUmgGul8y0eGMEbFWaty3bdrtAmqtsvHwxD/Dp/gQWScESXvPd1arn55zaXmefOy+ZLwcmx+FAJPpTMXRaq6Y/Z+D1PZZ+Uhu2D6tsAR4VvqqwlUgpsrAFXk6chJzOry8rmmxoMuIj9mXfjG+BqPFhV2oQsKSuIqFQqd/ZidJLO8ZSxA7L+h1eH4cQjcUd2nfzroG8nnKZ+cA8hQMfLuFiMY1I=,iv:nTaNQlC3px/lnodLphnILWbPVnelaUKKOZAFAaHi8MU=,tag:TYkIX1nrc+PKbvvnWYcvbg==,type:str]
|
||||
PROXY_URLS_RESIDENTIAL=ENC[AES256_GCM,data:vxRcXQ/8TUTCtr6hKWBD1zVF47GFSfluIHZ8q0tt8SqQOWDdDe2D7Of6boy/kG3lqlpl7TjqMGJ7fLORcr0klKCykQ==,iv:YjegXXtIXm2qr0a3ZHRHxj3L1JoGZ1iQXkVXQupGQ2E=,tag:kahoHRskXbzplZasWOeiig==,type:str]
|
||||
PROXY_URLS_DATACENTER=ENC[AES256_GCM,data:23TgU6oUeO7J+MFkraALQ5/RO38DZ3ib5oYYJr7Lj3KXQSlRsgwA+bJlweI5gcUpFphnPXvmwFGiuL6AeY8LzAQ3bx46dcZa5w9LfKw2PMFt,iv:AGXwYLqWjT5VmU02qqada3PbdjfC0mLK2sPruO0uru8=,tag:Z2IS/JPOqWX+x0LZYwyArA==,type:str]
|
||||
WEBSHARE_DOWNLOAD_URL=ENC[AES256_GCM,data:/N77CFf6tJWCk7HrnBOm2Q1ynx7XoblzfbzJySeCjrxqiu4r+CB90aDkaPahlQKI00DUZih3pcy7WhnjdAwI30G5kJZ3P8H8/R0tP7OBK1wPVbsJq8prQJPFOAWewsS4KWNtSURZPYSCxslcBb7DHLX6ZAjv6A5KFOjRK2N8usR9sIabrCWh,iv:G3Ropu/JGytZK/zKsNGFjjSu3Wt6fvHaAqI9RpUHvlI=,tag:fv6xuS94OR+4xfiyKrYELA==,type:str]
|
||||
PROXY_CONCURRENCY=ENC[AES256_GCM,data:vdEZ,iv:+eTNQO+s/SsVDBLg1/+fneMzEEsFkuEFxo/FcVV+mWc=,tag:i/EPwi/jOoWl3xW8H0XMdw==,type:str]
|
||||
RECHECK_WINDOW_MINUTES=ENC[AES256_GCM,data:L2s=,iv:fV3mCKmK5fxUmIWRePELBDAPTb8JZqasVIhnAl55kYw=,tag:XL+PO6sblz/7WqHC3dtk1w==,type:str]
|
||||
#ENC[AES256_GCM,data:RC+t2vqLwLjapdAUql8rQls=,iv:Kkiz3ND0g0MRAgcPJysIYMzSQS96Rq+3YP5yO7yWfIY=,tag:Y6TbZd81ihIwn+U515qd1g==,type:comment]
|
||||
GSC_SERVICE_ACCOUNT_PATH=ENC[AES256_GCM,data:Vki6yHk+gd4n,iv:rxzKvwrGnAkLcpS41EZ097E87NrIpNZGFfl4iXFvr40=,tag:EZkBJpCq5rSpKYVC4H3JHQ==,type:str]
|
||||
@@ -53,13 +52,17 @@ BING_SITE_URL=ENC[AES256_GCM,data:M33VI97DyxH8gRR3ZUXoXg4QrEv5og==,iv:GxZtwfbBVi
|
||||
#ENC[AES256_GCM,data:OTUMKNkRW0zrupNppXthwE1oieILhNjM+cjx5hFn69g=,iv:48ID2qtSe9ggD2X+G/iUqp3v2uwEc7fZw8lxHIvVXmk=,tag:okBn0Npk1K9dDOFWA/AB1A==,type:comment]
|
||||
GEONAMES_USERNAME=ENC[AES256_GCM,data:UXd/S2TzXPiGmLY=,iv:OMURM5E6SFEsaqroUlH76DEnr7C/ujNk9UQnbWT0hK4=,tag:VsjjS12QDbudiEhdAQ/OCQ==,type:str]
|
||||
CENSUS_API_KEY=ENC[AES256_GCM,data:9RbKlxSD17LqIuuNXaOKSgZ8LnFh9Wbze3XHgpctfV/1TqBMZTIedQ==,iv:WwsmR3HLUEcgUpLliGRaUPhGM9vFNPMGXSAQQ6+9UVc=,tag:R4EMNy5MxxvK0UTaCL0umA==,type:str]
|
||||
#ENC[AES256_GCM,data:SL402gYB8ngjqkrG03FmaA==,iv:I326cYnOWdFnaUwnSfP+s2p9oCDCnqDzUJuPOzSFJc0=,tag:MBW5AqAaq4hTMmNXq1tXKw==,type:comment]
|
||||
R2_LANDING_BUCKET=ENC[AES256_GCM,data:yZXLNQb8yN9nQPdxqmqv61fLWbRYCjjOqQ==,iv:fAwBLC/EuU0lgYOxZSkTagWyeQCdEadjssapxpCEGjA=,tag:VUmuVw76WZAaukp71Desag==,type:str]
|
||||
R2_LANDING_ACCESS_KEY_ID=ENC[AES256_GCM,data:Y6y+U1ayhpFDcoaDjl7hyMVjU3gVvtORAH5gbd+HXbM=,iv:ra9kuch1DT+2tfz140bvxQRIXypsdiUrX1QYQ59gNRI=,tag:Wt85qliUMFvgbvoUrOXT7A==,type:str]
|
||||
R2_LANDING_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:99wB9aKSq2GihW9FOwBSMgHYzNKBHlol2Mf2kg4Ma6Fr4Cr21t/blzPxNQ7YRdeKk6ypFgViXlS4BJz9nC+v0g==,iv:/AmbXtj/uSGcMp+NBhN5tiVb2U56tvO5e1UpG2/ijPo=,tag:Qg2Tt11DUJPyeYcq9iSVnQ==,type:str]
|
||||
sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBaUVk0UEVqdmtsM3VzQnpZ\nZjJDZ1lsM0VqWFpVVXUvNzdQcCtHbVJLNjFnCmhna01vTkVBaFQ5ZVlXeGhYNXdH\ncWJ5Qi9PdkxLaHBhQnR3cmtoblkxdEUKLS0tIDhHamY4NXhxOG9YN1NpbTN1aVRh\nOHVKcEN1d0QwQldVTDlBWUU4SDVDWlUKRJU+CTfTzIx6LLKin9sTXAHPVAfiUerZ\nCqYVFncsCJE3TbMI424urQj7kragPoGl1z4++yqAXNTRxfZIY4KTkg==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a
|
||||
sops_age__list_1__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBmVEticFRVemlzZnlzek4x\nbWJ0d0h5ejJVUk5remo1VkdxNjVpdllqbFhFClc1UXlNd09xVVA5MnltMlN5MWRy\nYUlNRmNybHh1RGdPVC9yWlYrVmRTdkkKLS0tIHBUbU9qSDMrVGVHZDZGSFdpWlBh\nT3NXTGl0SmszaU9hRmU5bXI0cDRoRW8KLvbNYsBEwz+ITKvn7Yn+iNHiRzyyjtQt\no9/HupykJ3WjSdleGz7ZN6UiPGelHp0D/rzSASTYaI1+0i0xZ4PUoQ==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_1__map_recipient=age1wjepykv3glvsrtegu25tevg7vyn3ngpl607u3yjc9ucay04s045s796msw
|
||||
sops_age__list_2__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFeHhaOURNZnRVMEwxNThu\nUjF4Q0kwUXhTUE1QSzZJbmpubnh3RnpQTmdvCjRmWWxpNkxFUmVGb3NRbnlydW5O\nWEg3ZXJQTU4vcndzS2pUQXY3Q0ttYjAKLS0tIE9IRFJ1c2ZxbGVHa2xTL0swbGN1\nTzgwMThPUDRFTWhuZHJjZUYxOTZrU00KY62qrNBCUQYxwcLMXFEnLkwncxq3BPJB\nKm4NzeHBU87XmPWVrgrKuf+PH1mxJlBsl7Hev8xBTy7l6feiZjLIvQ==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_2__map_recipient=age1c783ym2q5x9tv7py5d28uc4k44aguudjn03g97l9nzs00dd9tsrqum8h4d
|
||||
sops_lastmodified=2026-02-26T14:32:28Z
|
||||
sops_mac=ENC[AES256_GCM,data:pyHQHwTtjh7OLiMqbqhUjfrmetEtYS7yB342C/TWfDCwEotWLVwnGWlC4+HIl53pw9+3AgoBVRnW0t86e4kG9O8KyHnk68S9qBcpUsybW3lyGPNXmBydv1W9gQHuK8f/4WGIbkhNxyIToKg9ZAmYWFxNhRKSoYKm5P9Uh7B7CF4=,iv:syrX8VdL3JsDsawvFWbX04Ygcr18hjSSHfEwHkyKETk=,tag:qrhWkh/e+21OKGU2+rCeyg==,type:str]
|
||||
sops_lastmodified=2026-03-01T16:31:40Z
|
||||
sops_mac=ENC[AES256_GCM,data:+9Sk7wVRPMDeDf6FkuNAOyUT6/OD8Rk6jtJuy5CGQXdxxCYY12F6dAGF6V5fE0toqfYxhVTJbSqH32qTZM2Tc28n36zCtXNnaTdv9rS4XFfPq+MrhuIIv5bJYwDXDgW4F5TCeCBB09jgUKDRaQVGBn2hO3+k8auaPdqWp2cd+es=,iv:wtN61uo7vixY1/EQteyTMzG73C6Gz8AFu1qodR9JvQw=,tag:Z1izDo6EAS03OhA1bj0ArA==,type:str]
|
||||
sops_unencrypted_suffix=_unencrypted
|
||||
sops_version=3.12.1
|
||||
|
||||
@@ -17,9 +17,9 @@ jobs:
|
||||
- run: uv run pytest web/tests/ -x -q -p no:faulthandler
|
||||
- run: uv run ruff check web/src/ web/tests/
|
||||
|
||||
# Creates v<N> tag after tests pass. The on-server supervisor polls for new
|
||||
# tags every 60s and deploys automatically. No SSH keys or deploy credentials
|
||||
# needed in CI — only the built-in github.token.
|
||||
# Creates a v{YYYYMMDDHHMM} tag after tests pass on master.
|
||||
# The on-server supervisor polls for new tags every 60s and deploys
|
||||
# automatically. No SSH keys or deploy credentials needed in CI.
|
||||
tag:
|
||||
needs: [test]
|
||||
runs-on: ubuntu-latest
|
||||
@@ -32,5 +32,6 @@ jobs:
|
||||
run: |
|
||||
git config user.name "CI"
|
||||
git config user.email "ci@noreply"
|
||||
git tag "v${{ github.run_number }}"
|
||||
git push origin "v${{ github.run_number }}"
|
||||
TAG="v$(date -u +%Y%m%d%H%M)"
|
||||
git tag "$TAG"
|
||||
git push origin "$TAG"
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
stages:
|
||||
- test
|
||||
- tag
|
||||
|
||||
test:
|
||||
stage: test
|
||||
image: python:3.12-slim
|
||||
before_script:
|
||||
- pip install uv
|
||||
script:
|
||||
- uv sync
|
||||
- uv run pytest web/tests/ -x -q -p no:faulthandler
|
||||
- uv run ruff check web/src/ web/tests/
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
tag:
|
||||
stage: tag
|
||||
image:
|
||||
name: alpine/git
|
||||
entrypoint: [""]
|
||||
script:
|
||||
- git tag "v${CI_PIPELINE_IID}"
|
||||
- git push "https://gitlab-ci-token:${CI_JOB_TOKEN}@${CI_SERVER_HOST}/${CI_PROJECT_PATH}.git" "v${CI_PIPELINE_IID}"
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
|
||||
# Deployment is handled by the on-server supervisor (src/padelnomics/supervisor.py).
|
||||
# It polls git every 60s, fetches tags, and deploys only when a new passing tag exists.
|
||||
# No CI secrets needed — zero SSH keys, zero deploy credentials.
|
||||
62
CHANGELOG.md
62
CHANGELOG.md
@@ -6,6 +6,68 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Fixed
|
||||
- **Stale-tier failures no longer exhaust the next proxy tier** — with parallel workers, threads that fetched a proxy just before tier escalation reported failures after the tier changed, immediately blowing through the new tier's circuit breaker before it ever got tried (Rayobyte was skipped entirely). `record_failure(proxy_url)` now checks which tier the proxy belongs to and ignores the circuit breaker when the proxy is from an already-escalated tier.
|
||||
|
||||
- **Proxy URL scheme validation in `load_proxy_tiers()`** — URLs in `PROXY_URLS_DATACENTER` / `PROXY_URLS_RESIDENTIAL` that are missing an `http://` or `https://` scheme are now logged as a warning and skipped, rather than being passed through and causing SSL handshake failures or connection errors at request time. Also fixed a missing `http://` prefix in the dev `.env` `PROXY_URLS_DATACENTER` entry.
|
||||
|
||||
### Changed
|
||||
- **Per-proxy dead tracking in tiered cycler** — `make_tiered_cycler` now accepts a `proxy_failure_limit` parameter (default 3). Individual proxies that hit the limit are marked dead and permanently skipped by `next_proxy()`. If all proxies in the active tier are dead, `next_proxy()` auto-escalates to the next tier without needing the tier-level threshold. `record_failure(proxy_url)` and `record_success(proxy_url)` accept an optional `proxy_url` argument for per-proxy tracking; callers without `proxy_url` are fully backward-compatible. New `dead_proxy_count()` callable exposed for monitoring.
|
||||
- `extract/padelnomics_extract/src/padelnomics_extract/proxy.py`: added per-proxy state (`proxy_failure_counts`, `dead_proxies`), updated `next_proxy`/`record_failure`/`record_success`, added `dead_proxy_count`
|
||||
- `extract/padelnomics_extract/src/padelnomics_extract/playtomic_tenants.py`: `_fetch_page_via_cycler` passes `proxy_url` to `record_success`/`record_failure`
|
||||
- `extract/padelnomics_extract/src/padelnomics_extract/playtomic_availability.py`: `_worker` returns `(proxy_url, result)` tuple; serial loops in `extract` and `extract_recheck` capture `proxy_url` before passing to `record_success`/`record_failure`
|
||||
- `web/tests/test_supervisor.py`: 11 new tests in `TestTieredCyclerDeadProxyTracking` covering dead proxy skipping, auto-escalation, `dead_proxy_count`, backward compat, and thread safety
|
||||
|
||||
### Added
|
||||
- **Visual upgrades for longform articles** — 4 reusable CSS article components added to `input.css` and applied across 6 cornerstone articles (EN + DE):
|
||||
- `article-timeline`: horizontal numbered phase diagram with connecting lines; collapses to vertical stack on mobile. Replaces ASCII art code blocks in build guide articles.
|
||||
- `article-callout` (warning/tip/info variants): left-bordered callout box with icon, title, and body. Replaces `>` blockquotes and bold-text warnings in build and risk guides.
|
||||
- `article-cards`: 2-column card grid with colored accent bars (success/failure/neutral/established/growth/emerging). Replaces sequential bold-text pattern paragraphs in build, risk, and location guides.
|
||||
- `severity` pills: inline colored badge for High/Medium-High/Medium/Low-Medium/Low. Applied to risk overview tables in both risk guide articles.
|
||||
- Articles updated: `padel-hall-build-guide-en`, `padel-halle-bauen-de`, `padel-hall-investment-risks-en`, `padel-halle-risiken-de`, `padel-hall-location-guide-en`, `padel-standort-analyse-de`
|
||||
|
||||
- **Pipeline Transform tab + live extraction status** — new "Transform" tab in the pipeline admin with status cards for SQLMesh transform and export-serving tasks, a "Run Full Pipeline" button, and a recent run history table. The Overview tab now auto-polls every 5 s while an extraction task is pending and stops automatically when quiet. Per-extractor "Run" buttons use HTMX in-place updates instead of redirects. The header "Run Pipeline" button now enqueues the full ELT pipeline (extract → transform → export) instead of extraction only. Three new worker task handlers: `run_transform` (sqlmesh plan prod --auto-apply, 2 h timeout), `run_export` (export_serving.py, 10 min timeout), `run_pipeline` (sequential, stops on first failure). Concurrency guard prevents double-enqueuing the same step.
|
||||
- `web/src/padelnomics/worker.py`: `handle_run_transform`, `handle_run_export`, `handle_run_pipeline`
|
||||
- `web/src/padelnomics/admin/pipeline_routes.py`: `_render_overview_partial()`, `_fetch_pipeline_tasks()`, `_format_duration()`, `pipeline_transform()`, `pipeline_trigger_transform()`; `pipeline_trigger_extract()` now HTMX-aware
|
||||
- `web/src/padelnomics/admin/templates/admin/pipeline.html`: pulse animation on `.status-dot.running`, Transform tab button, rewired header button
|
||||
- `web/src/padelnomics/admin/templates/admin/partials/pipeline_overview.html`: self-polling wrapper, HTMX Run buttons
|
||||
- `web/src/padelnomics/admin/templates/admin/partials/pipeline_transform.html`: new file
|
||||
|
||||
- **Affiliate programs management** — centralised retailer config (`affiliate_programs` table) with URL template + tracking tag + commission %. Products now use a program dropdown + product identifier (e.g. ASIN) instead of manually baking full URLs. URL is assembled at redirect time via `build_affiliate_url()`, so changing a tag propagates instantly to all products. Legacy products (baked `affiliate_url`) continue to work via fallback. Amazon OneLink configured in the Associates dashboard handles geo-redirect to local marketplaces — no per-country programs needed.
|
||||
- `web/src/padelnomics/migrations/versions/0027_affiliate_programs.py`: `affiliate_programs` table, nullable `program_id` + `product_identifier` columns on `affiliate_products`, seeds "Amazon" program, backfills ASINs from existing URLs
|
||||
- `web/src/padelnomics/affiliate.py`: `get_all_programs()`, `get_program()`, `get_program_by_slug()`, `build_affiliate_url()`; `get_product()` JOINs program for redirect assembly; `_parse_product()` extracts `_program` sub-dict
|
||||
- `web/src/padelnomics/app.py`: `/go/<slug>` uses `build_affiliate_url()` — program-based products get URLs assembled at redirect time
|
||||
- `web/src/padelnomics/admin/routes.py`: program CRUD routes (list, new, edit, delete — delete blocked if products reference the program); product form updated to program dropdown + identifier; `retailer` auto-populated from program name
|
||||
- New templates: `admin/affiliate_programs.html`, `admin/affiliate_program_form.html`, `admin/partials/affiliate_program_results.html`
|
||||
- Updated templates: `admin/affiliate_form.html` (program dropdown + JS toggle), `admin/base_admin.html` (Programs subnav tab)
|
||||
- 15 new tests in `web/tests/test_affiliate.py` (41 total)
|
||||
|
||||
### Fixed
|
||||
- **Data Platform admin view showing stale/zero row counts** — Docker web containers were mounting `/opt/padelnomics/data` (stale copy) instead of `/data/padelnomics` (live supervisor output). Fixed volume mount in all 6 containers (blue/green × app/worker/scheduler) and added `LANDING_DIR=/app/data/pipeline/landing` so extraction stats and landing zone file stats are visible to the web app.
|
||||
- **`workflows.toml` never found in dev** — `_REPO_ROOT` in `pipeline_routes.py` used `parents[5]` (one level too far up) instead of `parents[4]`. Workflow schedules now display correctly on the pipeline overview tab in dev.
|
||||
- **Article preview frontmatter bug** — `_rebuild_article()` in `admin/routes.py` now strips YAML frontmatter before passing markdown to `mistune.html()`, preventing raw `title:`, `slug:` etc. from appearing as visible text in article previews.
|
||||
|
||||
### Added
|
||||
- **Affiliate product system** — "Wirecutter for padel" editorial affiliate cards embedded in articles via `[product:slug]` and `[product-group:category]` markers, baked at build time into static HTML. `/go/<slug>` click-tracking redirect (302, GDPR-compliant daily-rotated IP hash). Admin CRUD (`/admin/affiliate`) with live preview, inline status toggle, HTMX search/filter. Click stats dashboard (pure CSS bar chart, top products/articles/retailers). 10 German equipment review article scaffolds seeded.
|
||||
- `web/src/padelnomics/migrations/versions/0026_affiliate_products.py`: `affiliate_products` + `affiliate_clicks` tables; `UNIQUE(slug, language)` constraint mirrors articles schema
|
||||
- `web/src/padelnomics/affiliate.py`: `get_product()`, `get_products_by_category()`, `get_all_products()`, `log_click()`, `hash_ip()`, `get_click_stats()`, `get_click_counts()`, `get_distinct_retailers()`
|
||||
- `web/src/padelnomics/content/routes.py`: `PRODUCT_RE`, `PRODUCT_GROUP_RE`, `bake_product_cards()` — chained after `bake_scenario_cards()` in `generate_articles()` and `preview_article()`
|
||||
- `web/src/padelnomics/app.py`: `/go/<slug>` route with rate limiting (60/min per IP) and referer-based article/language extraction
|
||||
- `web/src/padelnomics/admin/routes.py`: affiliate CRUD routes + `bake_product_cards()` chained in article rebuild flows
|
||||
- New templates: `partials/product_card.html`, `partials/product_group.html`, `admin/affiliate_products.html`, `admin/affiliate_form.html`, `admin/affiliate_dashboard.html`, `admin/partials/affiliate_results.html`, `admin/partials/affiliate_row.html`
|
||||
- `locales/en.json` + `locales/de.json`: 6 new affiliate i18n keys
|
||||
- `data/content/articles/`: 10 new German equipment review scaffolds (rackets, balls, shoes, accessories, gifts)
|
||||
- 26 tests in `web/tests/test_affiliate.py`
|
||||
|
||||
### Added
|
||||
- **Three-tier proxy system** for extraction pipeline: free (Webshare auto-fetched) → datacenter (`PROXY_URLS_DATACENTER`) → residential (`PROXY_URLS_RESIDENTIAL`). Webshare free proxies are now auto-fetched from their download API on each run — no more manually copying stale proxy lists.
|
||||
- `proxy.py`: added `fetch_webshare_proxies()` (stdlib urllib, bounded read + timeout), `load_proxy_tiers()` (assembles N tiers from env), generalised `make_tiered_cycler()` to accept `list[list[str]]` with N-level escalation. Exposes `is_exhausted()`, `active_tier_index()`, `tier_count()`.
|
||||
- `playtomic_availability.py`: both `extract()` and `extract_recheck()` now use `load_proxy_tiers()` + N-tier cycler. `_fetch_venues_parallel` `fallback_urls` param removed. `is_fallback_active()` replaced by `is_exhausted()`.
|
||||
- `playtomic_tenants.py`: uses `load_proxy_tiers()` flattened for simple round-robin.
|
||||
|
||||
### Changed
|
||||
- **Env vars renamed** (breaking): `PROXY_URLS` → removed, `PROXY_URLS_FALLBACK` → removed. New vars: `WEBSHARE_DOWNLOAD_URL`, `PROXY_URLS_DATACENTER`, `PROXY_URLS_RESIDENTIAL`.
|
||||
|
||||
### Added
|
||||
- **Phase 2a — NUTS-1 regional income differentiation** (`opportunity_score`): Munich and Berlin no longer share the same income figure as Chemnitz.
|
||||
- `eurostat.py`: added `nama_10r_2hhinc` dataset config (NUTS-2 cube with NUTS-1 entries); filter params now appended to API URL so the server pre-filters the large cube before download (also makes `ilc_di03` requests smaller).
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Padelnomics — Project Tracker
|
||||
|
||||
> Move tasks across columns as you work. Add new tasks at the top of the relevant column.
|
||||
> Last updated: 2026-02-27 (Phase 2b — EU NUTS-2 spatial join + US state income).
|
||||
> Last updated: 2026-02-28 (Affiliate programs management — centralised retailer config + URL template assembly).
|
||||
|
||||
---
|
||||
|
||||
@@ -132,6 +132,8 @@
|
||||
- [x] **pSEO article noindex** — `noindex` column on articles (migration 0025), `NOINDEX_THRESHOLDS` per-template lambdas in `content/__init__.py`, robots meta tag in `article_detail.html`, sitemap exclusion, pSEO dashboard count card + article row badge; 20 tests
|
||||
- [x] **group_key static article grouping** — migration 0020 adds `group_key TEXT` column; `_sync_static_articles()` auto-upserts `data/content/articles/*.md` on admin page load; `_get_article_list_grouped()` groups by `COALESCE(group_key, url_path)` so EN/DE static cornerstones pair into one row
|
||||
- [x] **Email-gated report PDF** — `reports/` blueprint with email capture gate + PDF download; premium WeasyPrint PDF (full-bleed navy cover, Padelnomics wordmark watermark, gold/teal accents); `make report-pdf` target; EN + DE i18n (26 keys, native German); state-of-padel report moved to `data/content/reports/`
|
||||
- [x] **Affiliate product system** — "Wirecutter for padel" editorial gear cards embedded in articles via `[product:slug]` / `[product-group:category]` markers, baked at build time; `/go/<slug>` click-tracking redirect (302, GDPR daily-rotated IP hash, rate-limited); admin CRUD with live preview, HTMX filter/search, status toggle; click stats dashboard (pure CSS charts); 10 German equipment review article scaffolds; 26 tests
|
||||
- [x] **Affiliate programs management** — `affiliate_programs` table centralises retailer configs (URL template, tracking tag, commission %); product form uses program dropdown + product identifier (ASIN etc.); `build_affiliate_url()` assembles at redirect time; legacy baked-URL products still work; admin CRUD (delete blocked if products reference program); Amazon OneLink for multi-marketplace; article frontmatter preview bug fixed; 41 tests
|
||||
|
||||
### SEO & Legal
|
||||
- [x] Sitemap (both language variants, `<lastmod>` on all entries)
|
||||
@@ -243,7 +245,6 @@
|
||||
|
||||
### Marketing & Content
|
||||
- [ ] LinkedIn presence (ongoing — founder posts, thought leadership)
|
||||
- [ ] "Wirecutter for padel" affiliate site (racket reviews, gear guides)
|
||||
- [ ] "The Padel Business Report" newsletter
|
||||
- [ ] Equipment supplier affiliate partnerships (€500–1,000/lead or 5%)
|
||||
- [ ] Padel podcasts (guest appearances)
|
||||
|
||||
27
README.md
27
README.md
@@ -396,18 +396,19 @@ docker compose logs -f app # tail logs
|
||||
|
||||
## CI/CD
|
||||
|
||||
Go to GitLab → padelnomics → Settings → CI/CD → Variables and add:
|
||||
Pull-based deployment via Gitea Actions — no SSH keys or deploy credentials in CI.
|
||||
|
||||
| Variable | Value | Notes |
|
||||
|----------|-------|-------|
|
||||
| SSH_PRIVATE_KEY | Your ed25519 private key | Mask it, type "Variable" |
|
||||
| DEPLOY_HOST | Your Hetzner server IP | e.g. 1.2.3.4 |
|
||||
| DEPLOY_USER | SSH username on the server | e.g. deploy or root |
|
||||
| SSH_KNOWN_HOSTS | Server host key | Run `ssh-keyscan $YOUR_SERVER_IP` |
|
||||
1. Push to master → Gitea Actions runs tests (`.gitea/workflows/ci.yaml`)
|
||||
2. On success, CI creates tag `v<run_number>` using the built-in `github.token`
|
||||
3. On-server supervisor polls for new tags every 60s and deploys automatically
|
||||
|
||||
Server-side one-time setup:
|
||||
1. Add the matching public key to `~/.ssh/authorized_keys` for the deploy user
|
||||
2. Clone the repo to `/opt/padelnomics`
|
||||
3. Create `.env` from `padelnomics/.env.example` with production values
|
||||
4. `chmod +x deploy.sh && ./deploy.sh` for the first deploy
|
||||
5. Point reverse proxy to port 5000
|
||||
**Server-side one-time setup:**
|
||||
```bash
|
||||
bash infra/setup_server.sh # creates padelnomics_service user, keys, dirs
|
||||
ssh root@<server> 'bash -s' < infra/bootstrap_supervisor.sh
|
||||
```
|
||||
|
||||
1. `setup_server.sh` generates an ed25519 SSH deploy key — add the printed public key to Gitea:
|
||||
`git.padelnomics.io → padelnomics → Settings → Deploy Keys → Add key (read-only)`
|
||||
2. Add the printed age public key to `.sops.yaml`, re-encrypt, commit + push
|
||||
3. Run `bootstrap_supervisor.sh` — clones from `git.padelnomics.io:2222`, decrypts secrets, starts systemd supervisor
|
||||
|
||||
88
data/content/articles/beste-padelschlaeger-de.md
Normal file
88
data/content/articles/beste-padelschlaeger-de.md
Normal file
@@ -0,0 +1,88 @@
|
||||
---
|
||||
title: "Die besten Padelschläger 2026: Unser ausführlicher Vergleich"
|
||||
slug: beste-padelschlaeger-de
|
||||
language: de
|
||||
url_path: /beste-padelschlaeger-2026
|
||||
meta_description: "Welcher Padelschläger ist der beste 2026? Wir haben die wichtigsten Modelle für Anfänger, Fortgeschrittene und Profis getestet und verglichen."
|
||||
---
|
||||
|
||||
# Die besten Padelschläger 2026: Unser ausführlicher Vergleich
|
||||
|
||||
<!-- TODO: Einleitung mit Hauptkeyword und USP dieser Seite (200–300 Wörter) -->
|
||||
|
||||
Wer einen neuen Padelschläger kaufen will, steht vor einer unüberschaubaren Auswahl. Mehr als 50 Marken, Hunderte von Modellen — und kein einziges unabhängiges Testlabor. Wir haben die meistverkauften und meistempfohlenen Schläger zusammengetragen und nach drei Kriterien bewertet: Spielgefühl, Haltbarkeit und Preis-Leistungs-Verhältnis.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-Empfehlungen
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Detail
|
||||
|
||||
<!-- TODO: Ausführliche Besprechung der Top 3–5 Modelle, je 300–500 Wörter pro Schläger -->
|
||||
|
||||
### Platz 1: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-1-amazon]
|
||||
|
||||
<!-- TODO: Erfahrungsbericht + Vor- und Nachteile im Prosatext -->
|
||||
|
||||
### Platz 2: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-2-amazon]
|
||||
|
||||
### Platz 3: [Produktname einfügen]
|
||||
|
||||
[product:platzhalter-schlaeger-3-amazon]
|
||||
|
||||
---
|
||||
|
||||
## So haben wir getestet
|
||||
|
||||
<!-- TODO: Kurze Beschreibung der Testmethodik (2–3 Absätze) -->
|
||||
|
||||
---
|
||||
|
||||
## Kaufberatung: Welcher Schläger passt zu mir?
|
||||
|
||||
<!-- TODO: Entscheidungsbaum / Tabelle nach Spielertyp -->
|
||||
|
||||
| Spielertyp | Empfohlene Form | Empfohlenes Gewicht |
|
||||
|---|---|---|
|
||||
| Anfänger | Rund | 355–365 g |
|
||||
| Allspieler | Tropfen | 360–370 g |
|
||||
| Fortgeschrittener | Diamant | 365–380 g |
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie oft sollte man einen Padelschläger wechseln?</summary>
|
||||
|
||||
<!-- TODO: Antwort (50–100 Wörter) -->
|
||||
|
||||
Bei regelmäßigem Spielen (2–3 Mal pro Woche) empfehlen wir einen Wechsel alle 12 bis 18 Monate. Der größte Qualitätsverlust entsteht nicht durch sichtbare Schäden, sondern durch den Abbau der Schaumstoffkerns, der das Spielgefühl verändert.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Was kostet ein guter Padelschläger?</summary>
|
||||
|
||||
<!-- TODO: Preisklassen-Überblick -->
|
||||
|
||||
Gute Einstiegsschläger gibt es ab 50 Euro. Für Fortgeschrittene empfehlen wir 100–200 Euro, für ambitionierte Spieler 200–350 Euro. Über 400 Euro kostet nur das Pro-Segment, das für die meisten Freizeitspieler überdimensioniert ist.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Runder oder Diamant-Schläger — was ist besser?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Runde Schläger verzeihen mehr Fehlschläge und eignen sich für Anfänger und defensive Spieler. Diamant-Schläger liefern mehr Power und werden von Angriffsspielern bevorzugt. Für die meisten Freizeitspieler ist eine Tropfen- oder runde Form die sicherere Wahl.
|
||||
|
||||
</details>
|
||||
69
data/content/articles/padel-ausruestung-anfaenger-de.md
Normal file
69
data/content/articles/padel-ausruestung-anfaenger-de.md
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
title: "Padel-Ausrüstung für Anfänger: Was brauche ich wirklich?"
|
||||
slug: padel-ausruestung-anfaenger-de
|
||||
language: de
|
||||
url_path: /padel-ausruestung-anfaenger
|
||||
meta_description: "Was braucht man für Padel? Unser Ausrüstungsguide für Einsteiger — von Schläger und Schuhen bis zur Schutztasche. Was ist unverzichtbar, was ist Luxus?"
|
||||
---
|
||||
|
||||
# Padel-Ausrüstung für Anfänger: Was brauche ich wirklich?
|
||||
|
||||
<!-- TODO: Einleitung — klare Orientierung für Einsteiger -->
|
||||
|
||||
Padel ist im Vergleich zu vielen anderen Sportarten günstig einzusteigen. Wer zum ersten Mal auf den Court geht, braucht eigentlich nur drei Dinge: einen Schläger, die richtigen Schuhe und Bälle. Der Rest ist komfortsteigerndes Zubehör — notwendig wird es erst, wenn man ernsthafter spielt.
|
||||
|
||||
---
|
||||
|
||||
## Die unverzichtbare Grundausstattung
|
||||
|
||||
### 1. Schläger
|
||||
|
||||
[product:platzhalter-anfaenger-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: 1–2 Absätze zum Einstiegsschläger -->
|
||||
|
||||
### 2. Schuhe
|
||||
|
||||
[product:platzhalter-padelschuh-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### 3. Bälle
|
||||
|
||||
[product:platzhalter-ball-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Was kann ich mir zunächst sparen?
|
||||
|
||||
<!-- TODO: Schläger-Tasche, Griffband, Sportbrille — wann sinnvoll? -->
|
||||
|
||||
---
|
||||
|
||||
## Das komplette Anfänger-Set: Unsere Empfehlung
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie viel kostet ein komplettes Padel-Starterpaket?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für rund 150 Euro bekommt man einen soliden Anfängerschläger (60–90 €), passende Padelschuhe (50–70 €) und eine Dose Bälle (6–10 €). Alles darüber hinaus ist optional.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Kann ich mit geliehener Ausrüstung starten?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, für die ersten Einheiten ist das sinnvoll. Die meisten Padel-Center verleihen Schläger für 2–5 Euro pro Einheit. Wer mehr als 3–4 Mal spielen will, lohnt sich ein eigener Schläger — schon allein wegen des vertrauten Spielgefühls.
|
||||
|
||||
</details>
|
||||
@@ -91,6 +91,8 @@ Die Bilanz am ersten Betriebstag: Aktiva (Anlagevermögen nach CAPEX, Anfangsliq
|
||||
|
||||
## KfW-Förderprogramme für Padelhallen
|
||||
|
||||
Abschnitt 9 des Gliederungsrahmens verlangt: Welche Förderprogramme wurden geprüft? Hier ist die Antwort, die Ihr Businessplan liefern muss.
|
||||
|
||||
Die KfW bietet mehrere Programme, die für Padelhallen-Projekte relevant sein können. Wichtig: KfW-Kredite werden nicht direkt bei der KfW beantragt, sondern über die Hausbank. Die Hausbank leitet den Antrag weiter und trägt einen Teil des Ausfallrisikos mit — was erklärt, warum sie ein starkes Eigeninteresse an der Qualität des Businessplans hat.
|
||||
|
||||
**KfW Unternehmerkredit (037/047)**
|
||||
@@ -129,7 +131,7 @@ Was passiert, wenn die Auslastung 10 Prozentpunkte unter Plan liegt? Wenn die Ba
|
||||
|
||||
### 4. Unvollständiger CAPEX
|
||||
|
||||
Häufig unterschätzt: Nebenkosten des Baus (Architektenhonorar, Baunebenkosten, Baugenehmigungsgebühren), Working Capital für die Anlaufphase (3–6 Monate Betriebskosten als Puffer), Kosten der Betriebsaufnahme (Marketing, Erstausstattung, Versicherungen vor Eröffnung), Unvorhergesehenes (Bankstandard: 10 Prozent Contingency auf den Rohbau). Wer diese Positionen vergisst, finanziert sich zu knapp — und die Bank bemerkt es.
|
||||
Häufig unterschätzt: Nebenkosten des Baus (Architektenhonorar, Baunebenkosten, Baugenehmigungsgebühren), Working Capital für die Anlaufphase (3–6 Monate Betriebskosten als Puffer), Kosten der Betriebsaufnahme (Marketing, Erstausstattung, Versicherungen vor Eröffnung), Unvorhergesehenes (Mindestpuffer: 10 Prozent auf den Rohbau — bei Sportstättenumbauten realistisch eher 15–20 Prozent). Wer diese Positionen vergisst, finanziert sich zu knapp — und die Bank bemerkt es.
|
||||
|
||||
### 5. KfW nicht adressiert
|
||||
|
||||
@@ -148,7 +150,7 @@ Fragen, die Sie sich vor der Bürgschaftsübernahme stellen sollten:
|
||||
- Gibt es Vermögenswerte, die ich herauslösen kann (z.B. durch Schenkung an Ehepartner vor Gründung — hier unbedingt Rechtsberatung einholen, da Anfechtungsrisiken bestehen)?
|
||||
- Wie viele Monate Verlustbetrieb kann ich aus eigenen Mitteln abfedern?
|
||||
|
||||
Wer diese Fragen beantwortet hat, hat das Projekt ernst genommen.
|
||||
Wer diese Fragen beantwortet hat, hat das Projekt ernst genommen. Das spüren Banken.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ The formula:
|
||||
DSCR = operating cash flow ÷ annual debt service (interest + principal)
|
||||
```
|
||||
|
||||
The standard in German SME lending: **1.2 to 1.5x**. For every €1 of debt service, the project needs to generate €1.20–1.50 of cash flow. Below 1.2x, you'll either face rejection or be asked to inject more equity. A plan that doesn't make the DSCR calculation transparent forces the loan officer to do the math himself — and he'll be more conservative than you.
|
||||
The standard in German SME lending: **1.2 to 1.5x**. For every €1 of debt service, the project needs to generate €1.20–1.50 of cash flow. Below 1.2x, you'll either face rejection or be asked to inject more equity. A plan that doesn't make the DSCR calculation transparent forces the loan officer to do the math himself — and they'll be more conservative than you.
|
||||
|
||||
The other hard constraint is **equity contribution** (*Eigenkapitalquote*): banks typically expect the founder to put in 20–30% of total investment. KfW subsidy programs can partly substitute for equity (more on that below), but they never replace it entirely. Coming to the table with 10% equity rarely works.
|
||||
|
||||
@@ -89,6 +89,8 @@ The balance sheet on Day 1: assets (fixed assets after CAPEX, opening cash) vers
|
||||
|
||||
## KfW Subsidy Programs for Padel Hall Projects
|
||||
|
||||
Section 9 of the business plan framework above asks which financing programs have been evaluated. Here's the answer your plan needs to provide.
|
||||
|
||||
KfW (Germany's state development bank) offers several programs relevant to padel hall construction and launch. One crucial operational detail: KfW loans are not applied for directly at KfW. They're applied for through your *Hausbank* (house bank), which passes the application to KfW and shares a portion of the default risk. This is precisely why your Hausbank cares so much about the quality of your business plan — they're on the hook too.
|
||||
|
||||
**KfW Unternehmerkredit (programs 037/047)**
|
||||
@@ -109,7 +111,7 @@ Each German state (*Bundesland*) runs its own SME and startup lending programs t
|
||||
- Hamburg: IFB Hamburg
|
||||
- Saxony: Sächsische Aufbaubank (SAB)
|
||||
|
||||
These programs are overlooked in the majority of business plans we've reviewed — despite the fact that combining them with KfW can meaningfully reduce the equity burden.
|
||||
These programs are overlooked in the majority of business plans we've reviewed — even though combining them with KfW can meaningfully reduce the equity burden.
|
||||
|
||||
---
|
||||
|
||||
@@ -129,7 +131,7 @@ What happens if utilization comes in 10 percentage points below plan? If constru
|
||||
|
||||
### 4. Incomplete CAPEX
|
||||
|
||||
Frequently underestimated items: architect and engineering fees, permitting fees and costs of the *Baugenehmigung* (building permit), working capital for the ramp-up period (3–6 months of operating costs), pre-opening expenses (marketing, initial inventory, pre-opening insurance), and contingency (the industry standard is 10% of raw construction costs). Forget these, and you're underfunded from Day 1.
|
||||
Frequently underestimated items: architect and engineering fees, permitting fees and costs of the *Baugenehmigung* (building permit), working capital for the ramp-up period (3–6 months of operating costs), pre-opening expenses (marketing, initial inventory, pre-opening insurance), and contingency (minimum 10% of raw construction costs — 15–20% is more realistic for sports hall conversions). Forget these, and you're underfunded from Day 1.
|
||||
|
||||
### 5. No mention of KfW or subsidy programs
|
||||
|
||||
@@ -148,7 +150,7 @@ Questions worth answering before you proceed:
|
||||
- Are there assets that could be structured outside the exposure (specialist legal advice is essential here, as pre-signing asset transfers can be challenged under German insolvency law)?
|
||||
- How many months of operating losses could I absorb from personal resources?
|
||||
|
||||
A founder who has worked through these questions has taken the project seriously. That comes across in a bank conversation.
|
||||
A founder who has worked through these questions has taken the project seriously. Banks can tell.
|
||||
|
||||
---
|
||||
|
||||
|
||||
67
data/content/articles/padel-geschenke-de.md
Normal file
67
data/content/articles/padel-geschenke-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padel-Geschenke: Die besten Ideen für Padelbegeisterte"
|
||||
slug: padel-geschenke-de
|
||||
language: de
|
||||
url_path: /padel-geschenke
|
||||
meta_description: "Padel-Geschenke für Geburtstage, Weihnachten oder als Überraschung. Von der günstigen Kleinigkeit bis zum hochwertigen Schläger — für jeden Budget."
|
||||
---
|
||||
|
||||
# Padel-Geschenke: Die besten Ideen für Padelbegeisterte
|
||||
|
||||
<!-- TODO: Einleitung — Padel boomt, Geschenkideen gefragt -->
|
||||
|
||||
Padel ist der am schnellsten wachsende Sport Europas — und viele haben gerade erst damit begonnen. Wer einem Padel-Fan ein Geschenk machen will, steht vor der Frage: Was fehlt ihm noch? Dieser Guide listet die besten Ideen nach Preisklassen, vom kleinen Mitbringsel bis zum Wunschschläger.
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 15 Euro
|
||||
|
||||
[product-group:grip]
|
||||
|
||||
<!-- TODO: Griffband, Bälle, kleine Accessoires -->
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 50 Euro
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
<!-- TODO: Sporttasche, Cover, Trainingszubehör -->
|
||||
|
||||
---
|
||||
|
||||
## Geschenke unter 100 Euro
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
[product:platzhalter-schuh-amazon]
|
||||
|
||||
---
|
||||
|
||||
## Das perfekte Geschenk: Ein neuer Schläger
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
<!-- TODO: Hinweis auf Wunschliste / Amazon-Wunschliste-Tipp -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie finde ich heraus, welcher Schläger passt?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Fragen Sie die beschenkte Person nach ihrem aktuellen Modell oder lassen Sie sie aus einer Empfehlungsliste wählen. Schläger sind sehr persönlich — eine Gutscheinkarte für einen Fachhandel ist oft die sicherste Option.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Gibt es Padel-Geschenksets?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Einige Marken bieten Starter-Sets an (Schläger + Bälle + Cover). Diese sind im Vergleich zum Einzelkauf oft günstiger und eignen sich als Komplett-Einstiegsgeschenk für Neuspieler.
|
||||
|
||||
</details>
|
||||
@@ -17,21 +17,54 @@ This guide walks through all five phases and 23 steps between your initial marke
|
||||
|
||||
## The 5 Phases at a Glance
|
||||
|
||||
```
|
||||
Phase 1 Phase 2 Phase 3 Phase 4 Phase 5
|
||||
Feasibility → Planning & → Construction → Pre- → Operations &
|
||||
& Concept Design / Conversion Opening Optimization
|
||||
|
||||
Month 1–3 Month 3–6 Month 6–12 Month 10–13 Ongoing
|
||||
|
||||
Steps 1–5 Steps 6–11 Steps 12–16 Steps 17–20 Steps 21–23
|
||||
```
|
||||
<div class="article-timeline">
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">1</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Feasibility & Concept</div>
|
||||
<div class="article-timeline__subtitle">Market research, concept, site scouting</div>
|
||||
<div class="article-timeline__meta">Month 1–3 · Steps 1–5</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">2</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Planning & Design</div>
|
||||
<div class="article-timeline__subtitle">Architect, permits, financing</div>
|
||||
<div class="article-timeline__meta">Month 3–6 · Steps 6–11</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">3</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Construction</div>
|
||||
<div class="article-timeline__subtitle">Build, courts, IT systems</div>
|
||||
<div class="article-timeline__meta">Month 6–12 · Steps 12–16</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">4</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Pre-Opening</div>
|
||||
<div class="article-timeline__subtitle">Hiring, marketing, soft launch</div>
|
||||
<div class="article-timeline__meta">Month 10–13 · Steps 17–20</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">5</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Operations</div>
|
||||
<div class="article-timeline__subtitle">Revenue streams, optimization</div>
|
||||
<div class="article-timeline__meta">Ongoing · Steps 21–23</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Feasibility and Concept (Months 1–3)
|
||||
|
||||
This is the most important phase and the one where projects most often go wrong in one of two directions: either stopping too early because the first obstacle looks daunting, or moving too fast because enthusiasm outpaces analysis. Rigorous work here prevents expensive corrections later.
|
||||
This is the most important phase — and where projects most often go wrong in one of two directions: stopping too early because the first obstacle looks daunting, or moving too fast because enthusiasm outpaces analysis. Rigorous work here prevents expensive corrections later.
|
||||
|
||||
### Step 1: Market Research
|
||||
|
||||
@@ -49,7 +82,7 @@ Good market research won't guarantee success, but it will protect you from the m
|
||||
|
||||
Your market research should drive your concept. How many courts? Which customer segments — competitive recreational players, club training, corporate wellness, broad community use? What service level — a pure booking facility or a full-concept venue with lounge, bar, pro shop, and coaching program?
|
||||
|
||||
Every decision here cascades into investment requirements, operating costs, and revenue potential. Nail this down before moving to site selection.
|
||||
Every decision here cascades into investment requirements, operating costs, and revenue potential. Nail the concept before moving to site selection.
|
||||
|
||||
### Step 3: Location Scouting
|
||||
|
||||
@@ -105,7 +138,12 @@ Deliverables from this phase:
|
||||
- **MEP design (mechanical, electrical, plumbing):** Heating, ventilation, air conditioning, electrical, drainage — typically the most expensive trade package in a sports hall conversion
|
||||
- **Fire safety strategy**
|
||||
|
||||
> **The most expensive planning mistake in padel hall builds:** underestimating HVAC complexity and budget. Large indoor courts need precise temperature and humidity control — not just for player comfort, but for playing surface longevity and air quality. Courts installed in a poorly climate-controlled building will degrade faster and generate complaints. Budget for it properly from the start, not as a value-engineering target.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">The most expensive planning mistake in padel hall builds</span>
|
||||
<p>Underestimating HVAC complexity and budget. Large indoor courts need precise temperature and humidity control — not just for player comfort, but for playing surface longevity and air quality. Courts installed in a poorly climate-controlled building will degrade faster and generate complaints. Budget for it properly from the start, not as a value-engineering target.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Step 8: Court Supplier Selection
|
||||
|
||||
@@ -125,7 +163,7 @@ Approach lenders with your full business plan. Typical capital structure for pad
|
||||
- 50–70% debt (bank loan)
|
||||
- 30–50% equity (own funds, silent partners, shareholder loans)
|
||||
|
||||
What lenders will require: a credible financial model, collateral, your track record, and — almost universally for single-asset leisure facilities — personal guarantees from principal shareholders. See the companion article on investment risks for a full treatment of personal guarantee exposure.
|
||||
What lenders will require: a credible financial model, collateral, your track record, and — almost universally for single-asset leisure facilities — personal guarantees from principal shareholders. The companion article on investment risks covers personal guarantee exposure in full.
|
||||
|
||||
Investigate public funding programs: development bank loans, regional sports infrastructure grants, and municipal co-investment schemes can reduce either equity requirements or interest burden. This research is worth several hours of your time.
|
||||
|
||||
@@ -160,7 +198,12 @@ Courts are installed after the building envelope is weathertight. This is a hard
|
||||
|
||||
Glass panels, artificial turf, and court metalwork must not be exposed to construction dust, moisture, and site traffic. Projects that try to accelerate schedules by installing courts before the building is properly enclosed regularly end up with surface contamination, glass damage, and voided manufacturer warranties.
|
||||
|
||||
> **The most common construction mistake on padel hall projects:** rushing court installation sequencing under schedule pressure. The pressure to hit an opening date is real — but installing courts into an unenclosed building is one of the most reliable ways to add cost and delay, not reduce them. Hold the sequence.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">The most common construction mistake on padel hall projects</span>
|
||||
<p>Rushing court installation sequencing under schedule pressure. The pressure to hit an opening date is real — but installing courts into an unenclosed building is one of the most reliable ways to add cost and delay, not reduce them. Hold the sequence.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Allow two to four weeks for court installation per batch, depending on the manufacturer's crew capacity. Build this explicitly into your master program.
|
||||
|
||||
@@ -174,7 +217,12 @@ Decide early: which booking platform, which point-of-sale system, and whether yo
|
||||
|
||||
Access control systems must be coordinated with the electrical design. Adding them in the final stages of construction is possible but costs more.
|
||||
|
||||
> **The most common pre-opening mistake:** the booking system isn't fully configured, tested, and working on day one. A broken booking flow, failed test payments, or a QR code that leads to an error page on opening day kills your launch momentum in a way that's difficult to recover from. Test the system end-to-end — including real bookings, real payments, and real cancellations — two to four weeks before opening.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">The most common pre-opening mistake</span>
|
||||
<p>The booking system isn't fully configured, tested, and working on day one. A broken booking flow, failed test payments, or a QR code that leads to an error page on opening day kills your launch momentum in a way that's difficult to recover from. Test the system end-to-end — including real bookings, real payments, and real cancellations — two to four weeks before opening.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Step 16: Inspections and Certifications
|
||||
|
||||
@@ -248,13 +296,38 @@ Court bookings are your core revenue, but rarely your only opportunity:
|
||||
|
||||
Patterns emerge when you observe padel hall projects across a market over time.
|
||||
|
||||
**Projects that go over budget** almost always cut at the wrong place early — too little HVAC budget, no construction contingency, a cheap general contractor without adequate contractual protection. The savings on the way in become much larger costs on the way out.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projects that go over budget</span>
|
||||
<p class="article-card__body">Almost always cut at the wrong place early — too little HVAC budget, no construction contingency, a cheap general contractor without adequate contractual protection. The savings on the way in become much larger costs on the way out.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projects that slip their schedule</span>
|
||||
<p class="article-card__body">Consistently underestimate the regulatory process. Permits, noise assessments, and change-of-use applications take time that money cannot buy once you've started too late. Start conversations with authorities before you need the approvals.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projects that open weakly</span>
|
||||
<p class="article-card__body">Started marketing too late and tested the booking system too late. An empty calendar on day one and a broken booking page create impressions that stick longer than the opening week.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projects that succeed long-term</span>
|
||||
<p class="article-card__body">Treat all three phases — planning, build, and opening — with equal rigor, and invest early and consistently in community and repeat customers.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
**Projects that slip their schedule** consistently underestimate the regulatory process. Permits, noise assessments, and change-of-use applications take time that money cannot buy once you've started too late. Start conversations with authorities before you need the approvals, not when you need them.
|
||||
|
||||
**Projects that open weakly** started marketing too late and tested the booking system too late. An empty calendar on day one and a broken booking page create impressions that stick longer than the opening week.
|
||||
|
||||
**Projects that succeed long-term** treat all three phases — planning, build, and opening — with equal rigor, and invest early and consistently in community and repeat customers.
|
||||
Building a padel hall is complex, but it is a solved problem. The failures are nearly always the same failures. So are the successes.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -9,11 +9,11 @@ cornerstone: C2
|
||||
|
||||
# How Much Does It Cost to Open a Padel Hall in Germany? Complete 2026 CAPEX Breakdown
|
||||
|
||||
Anyone who has started researching padel hall investment in Germany has encountered the same frustrating non-answer: "it depends." And it genuinely does — total project costs for a six-court indoor facility range from **€930,000 to €1.9 million**, a span wide enough to make planning feel impossible.
|
||||
Anyone researching padel hall investment in Germany hits the same frustrating non-answer: "it depends." And it genuinely does — total project costs for a six-court indoor facility range from **€930,000 to €1.9 million**, a span wide enough to make planning feel impossible.
|
||||
|
||||
But that range is not noise. It reflects specific, quantifiable decisions: whether you're fitting out an existing warehouse or building from scratch, whether you're in Munich or Leipzig, whether you want panorama glass courts or standard construction. Once you understand where the variance lives, the numbers become plannable.
|
||||
|
||||
This article gives you the complete picture: itemized CAPEX, city-by-city rent and booking rates, a full operating cost breakdown, a three-year P&L projection, and the key metrics your bank will want to see. All figures are based on real German market data from 2025–2026. By the end, you should be able to build a credible first-pass financial model for your specific scenario — and walk into a lender conversation with confidence.
|
||||
This article gives you the complete picture: itemized CAPEX, city-by-city rent and booking rates, a full operating cost breakdown, a three-year P&L projection, and the key metrics your bank will want to see. All figures are based on real German market data from 2025–2026. By the end, you'll have everything you need to build a credible first-pass financial model for your specific scenario — and walk into a lender conversation with confidence.
|
||||
|
||||
---
|
||||
|
||||
@@ -21,7 +21,7 @@ This article gives you the complete picture: itemized CAPEX, city-by-city rent a
|
||||
|
||||
The single largest driver of CAPEX variance is construction. Converting a suitable existing warehouse — one that already has the necessary ceiling height (8–9 m clear) and adequate structural load — costs vastly less than a ground-up build or a complete gut-renovation. This line item alone accounts for €400,000 to €800,000 of the total budget.
|
||||
|
||||
Location adds another layer of variance. The same 2,000 sqm hall costs 40–60% more to rent in Munich than in Leipzig. That gap shows up not just in annual OPEX but in the lease deposit and the working capital reserve you need to fund the ramp-up — both of which are part of your initial CAPEX.
|
||||
Location adds another layer of variance. The same 2,000 sqm hall costs 40–60% more to rent in Munich than in Leipzig across comparable market tiers — at the extremes, the gap is considerably wider. That difference runs through every budget line: not just annual rent, but the lease deposit and working capital reserve needed at launch, both part of your initial CAPEX.
|
||||
|
||||
For a **six-court indoor facility** with solid but not extravagant fit-out, the realistic planning figure is **€1.2–1.5 million all-in**. Projects that come in below that typically either benefited from an exceptional real estate deal or — more often — undercounted one of the three most expensive items: construction, HVAC, and the operating reserve.
|
||||
|
||||
@@ -56,6 +56,8 @@ For a **six-court indoor facility** with solid but not extravagant fit-out, the
|
||||
|
||||
## Commercial Rent by German City
|
||||
|
||||
Construction and courts consume most of your initial budget. What determines long-term viability is what you pay every month: rent.
|
||||
|
||||
A six-court facility with changing rooms, a reception area, and a lounge requires **1,500–2,500 sqm** of floor space. Current industrial/warehouse lease rates across major German cities:
|
||||
|
||||
| City | Rent €/sqm/month | Typical monthly cost (2,000 sqm) |
|
||||
@@ -77,7 +79,7 @@ One structural note: German commercial landlords typically require lease terms o
|
||||
|
||||
## Court Hire Rates: What the Market Will Bear
|
||||
|
||||
Booking rates vary significantly by city and time slot. The following figures are drawn from platform data and direct market surveys:
|
||||
Revenue potential tracks location almost as closely as rent does. The following booking rates are drawn from platform data and direct market surveys:
|
||||
|
||||
| City | Off-Peak (€/hr) | Peak (€/hr) | Confidence |
|
||||
|---|---|---|---|
|
||||
@@ -113,6 +115,8 @@ Operating cost projections are where business plans most often diverge from real
|
||||
| Admin, accounting, legal | €20,000 | €22,000 | €24,000 |
|
||||
| **Total OPEX** | **€490,000** | **€530,000** | **€566,000** |
|
||||
|
||||
Note: the rent line reflects a well-positioned facility in a mid-tier city. For Munich or Berlin, adjust upward using the city rent table above — and recalibrate your revenue assumptions accordingly.
|
||||
|
||||
**Staffing** is the line that most first-time operators get wrong. Five FTEs is a genuine minimum for professional operations — reception, court management, a coach, administration. In Germany, employer social security contributions add roughly 20% on top of gross wages. €200k in Year 1 for a five-person team is lean, not generous.
|
||||
|
||||
**Energy** depends heavily on the building envelope. An older warehouse with poor insulation and an oversized, inefficient HVAC installation can run 30–50% higher than the figures shown here. Commissioning a quick energy audit before signing the lease is cheap insurance.
|
||||
@@ -167,13 +171,13 @@ On an €800k loan at 5% over 10 years, annual debt service is approximately €
|
||||
|
||||
## What Lenders Actually Look For
|
||||
|
||||
A padel hall is an unusual asset class for most bank credit officers. What moves a credit committee is not enthusiasm for the sport — it is the rigor of the financial documentation.
|
||||
A padel hall is an unfamiliar asset class for most bank credit officers. They have no mental model for court utilization rates or booking yield — and that is actually an opportunity. What moves a credit committee is not enthusiasm for the sport. It is the rigor of the financial documentation. Arrive with clean numbers and you stand out from the start.
|
||||
|
||||
**DSCR of 1.2–1.5x minimum.** Lenders want operating cash flow to cover debt service with a 20–50% buffer. The base case in this model clears that bar easily; your job is to show it holds under stress scenarios too.
|
||||
|
||||
**Signed lease agreement.** Without a lease in place, the credit assessment stays hypothetical. A long-term lease with indexed escalation is a positive signal to lenders — it translates future revenue into something closer to contracted income.
|
||||
**Signed lease agreement.** Without a lease in place, the credit assessment stays hypothetical. A long-term lease with indexed escalation is a positive signal — it converts uncertain future revenue into something closer to contracted income on the credit committee's worksheet.
|
||||
|
||||
**Monthly cashflow model for Year 1.** Lenders do not expect monthly forecasts to be accurate. They use them to assess whether you have thought through the ramp-up — the timing of fit-out completion, the month of first bookings, the staffing build-out. A monthly model signals operational seriousness.
|
||||
**Monthly cash flow model for Year 1.** Lenders do not expect monthly forecasts to be accurate. They use them to assess whether you have thought through the ramp-up — the timing of fit-out completion, the month of first bookings, the staffing build-out. A monthly model signals operational seriousness.
|
||||
|
||||
**Sensitivity analysis.** Show three scenarios: base case (45–60% utilization), downside (35%), and stress (25%). If your project only works at optimistic assumptions, that is important information — for you, not just for the bank.
|
||||
|
||||
@@ -183,8 +187,8 @@ A dedicated article on structuring a padel hall business plan and navigating Ger
|
||||
|
||||
## Bottom Line
|
||||
|
||||
Opening a padel hall in Germany in 2026 is a real capital commitment: €930k on the low end, €1.9M at the top, with €1.2–1.5M as the honest planning figure for a solid six-court operation. The economics, modelled carefully, are genuinely attractive — payback in 3–5 years, 60%+ cash-on-cash return at maturity, and a market that continues to grow.
|
||||
Opening a padel hall in Germany in 2026 is a real capital commitment: €930k on the low end, €1.9M at the top, with €1.2–1.5M as the honest planning figure for a solid six-court operation. The economics, done right, are genuinely attractive — payback in 3–5 years, 60%+ cash-on-cash return at maturity, and a market that continues to grow.
|
||||
|
||||
The investors who succeed in this space are not the ones who found a cheaper build. They are the ones who understood the numbers precisely enough to make the right location and concept decisions early — and to structure their financing before the costs escalated.
|
||||
The investors who succeed here are not the ones who found a cheaper build. They are the ones who understood the numbers precisely enough to make the right location and concept decisions early — and to structure their financing before the costs escalated.
|
||||
|
||||
**Next step:** Use the Padelnomics Financial Planner to model your specific scenario — your city, your financing mix, your pricing assumptions. The model above is the starting point. Your hall deserves a projection built around your actual numbers.
|
||||
**Next step:** Use the Padelnomics Financial Planner to model your specific scenario — your city, your financing mix, your pricing assumptions. The figures in this article are your starting point; your hall deserves a projection built around your actual numbers.
|
||||
|
||||
@@ -121,6 +121,8 @@ Every state has a development bank: Investitionsbank Schleswig-Holstein, Thürin
|
||||
|
||||
## Personal Guarantee Reality: Don't Avoid This Conversation
|
||||
|
||||
Once the debt structure is in place, there is one more item that belongs in every financing conversation — and that is too often skipped until the term sheet arrives.
|
||||
|
||||
German banks financing a padel hall through a standalone project company will almost always require **persönliche Bürgschaft** (personal guarantee) from the founders. This means your personal assets — home, savings, existing investments — are at risk if the business fails.
|
||||
|
||||
Three ways to limit this exposure:
|
||||
|
||||
@@ -21,20 +21,20 @@ This article covers the 14 risks that don't get enough airtime in investor discu
|
||||
|
||||
| # | Risk | Category | Severity |
|
||||
|---|------|----------|----------|
|
||||
| 1 | Trend / fad risk | Strategic | High |
|
||||
| 2 | Construction cost overruns | Construction & Development | High |
|
||||
| 3 | Construction delays | Construction & Development | High |
|
||||
| 4 | Landlord risk: sale, insolvency, non-renewal | Property & Lease | High |
|
||||
| 5 | New competitor in your catchment | Competition | Medium–High |
|
||||
| 6 | Key-person dependency | Operations | Medium |
|
||||
| 7 | Staff retention and wage pressure | Operations | Medium |
|
||||
| 8 | Court surface and maintenance cycles | Operations | Medium |
|
||||
| 9 | Energy price volatility | Financial | Medium |
|
||||
| 10 | Interest rate risk | Financial | Medium |
|
||||
| 11 | Personal guarantee exposure | Financial | High |
|
||||
| 12 | Customer concentration | Financial | Medium |
|
||||
| 13 | Noise complaints and regulatory restrictions | Regulatory & Legal | Medium |
|
||||
| 14 | Booking platform dependency | Regulatory & Legal | Low–Medium |
|
||||
| 1 | Trend / fad risk | Strategic | <span class="severity severity--high">High</span> |
|
||||
| 2 | Construction cost overruns | Construction & Development | <span class="severity severity--high">High</span> |
|
||||
| 3 | Construction delays | Construction & Development | <span class="severity severity--high">High</span> |
|
||||
| 4 | Landlord risk: sale, insolvency, non-renewal | Property & Lease | <span class="severity severity--high">High</span> |
|
||||
| 5 | New competitor in your catchment | Competition | <span class="severity severity--medium-high">Medium–High</span> |
|
||||
| 6 | Key-person dependency | Operations | <span class="severity severity--medium">Medium</span> |
|
||||
| 7 | Staff retention and wage pressure | Operations | <span class="severity severity--medium">Medium</span> |
|
||||
| 8 | Court surface and maintenance cycles | Operations | <span class="severity severity--medium">Medium</span> |
|
||||
| 9 | Energy price volatility | Financial | <span class="severity severity--medium">Medium</span> |
|
||||
| 10 | Interest rate risk | Financial | <span class="severity severity--medium">Medium</span> |
|
||||
| 11 | Personal guarantee exposure | Financial | <span class="severity severity--high">High</span> |
|
||||
| 12 | Customer concentration | Financial | <span class="severity severity--medium">Medium</span> |
|
||||
| 13 | Noise complaints and regulatory restrictions | Regulatory & Legal | <span class="severity severity--medium">Medium</span> |
|
||||
| 14 | Booking platform dependency | Regulatory & Legal | <span class="severity severity--low-medium">Low–Medium</span> |
|
||||
|
||||
---
|
||||
|
||||
@@ -50,7 +50,7 @@ Squash followed a strikingly similar pattern in the 1980s: grassroots boom, infr
|
||||
|
||||
The counterargument has real merit: padel requires permanent, fixed courts. That infrastructure creates genuine stickiness that squash never had — players build habits, drive to a venue, become regulars. Padel is also demonstrably more accessible and social than squash, which supports long-term participation. German player numbers show no plateau effect yet.
|
||||
|
||||
Even so: if utilization falls from 65% to 35% in year five because hype fades, your model breaks. That scenario is largely unhedgeable — but it can be modeled. What does your P&L look like at 40% utilization sustained for two years? Can your financing structure survive it? If you haven't answered that question, you're not done with your business plan.
|
||||
Even so — if utilization falls from 65% to 35% in year five because hype fades, your model breaks. That scenario is largely unhedgeable — but it can be modeled. What does your P&L look like at 40% utilization sustained for two years? Can your financing structure survive it? If you haven't answered that question, you're not done with your business plan.
|
||||
|
||||
---
|
||||
|
||||
@@ -91,7 +91,7 @@ When a new competitor opens ten minutes away in year three, you feel it in utili
|
||||
|
||||
Padel has no real moat. No patents, no network effects, no meaningful switching costs. What you have is location, the community you've built, and service quality — genuine advantages, but ones that require continuous investment to maintain.
|
||||
|
||||
**The right move is to model this explicitly.** What does your P&L look like when a competitor opens in year three and takes 20% of your demand? What operational responses are available — pricing, loyalty programs, corporate contracts, additional programming? Having thought through the competitive response in advance means you won't be improvising when it happens.
|
||||
**Model this explicitly.** What does your P&L look like when a competitor opens in year three and takes 20% of your demand? What operational responses are available — pricing, loyalty programs, corporate contracts, additional programming? Thinking through the competitive response in advance means you won't be improvising when it happens.
|
||||
|
||||
---
|
||||
|
||||
@@ -111,7 +111,7 @@ Good facility managers, coaches who combine technical skill with genuine hospita
|
||||
|
||||
Courts need replacing. Artificial turf has a lifespan of five to eight years. Glass panels and framework require regular inspection and periodic replacement. If this isn't in your long-term financial model, you're looking at a significant unplanned capital call in year six or seven. Budget a per-court annual refurbishment reserve — and set it conservatively above zero.
|
||||
|
||||
**A note on F&B:** Running a café or bar inside your facility is an entirely different business — different skills, thin margins, and separate regulatory requirements. If food and beverage is part of your concept, outsourcing to a dedicated operator deserves serious consideration before you commit to running it in-house.
|
||||
**A note on F&B:** Running a café or bar inside your facility is an entirely different business — different skills, thin margins, and separate regulatory requirements. If food and beverage is part of your concept, outsourcing to a dedicated operator deserves serious consideration before committing to running it in-house.
|
||||
|
||||
---
|
||||
|
||||
@@ -137,9 +137,12 @@ Your costs will increase three to five percent per year. Whether you can pass th
|
||||
|
||||
## The Risk No One Talks About: Personal Guarantees
|
||||
|
||||
**This section gets skipped in almost every padel hall investment conversation. That's a serious mistake.**
|
||||
|
||||
Banks financing a single-asset leisure facility without corporate backing will almost universally require personal guarantees from the principal shareholders. Not as an unusual request — as standard terms for this type of deal.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">This section gets skipped in almost every padel hall investment conversation. That's a serious mistake.</span>
|
||||
<p>Banks financing a single-asset leisure facility without corporate backing will almost universally require personal guarantees from the principal shareholders. Not as an unusual request — as standard terms for this type of deal.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Here is what that means in practice:
|
||||
|
||||
@@ -180,13 +183,36 @@ Building a parallel booking capability — even a simple direct booking option
|
||||
|
||||
The investors who succeed long-term in padel aren't the ones who found a risk-free opportunity. There isn't one. They're the ones who went in with their eyes open.
|
||||
|
||||
**They modeled the bad scenarios before assuming the good ones.** A business plan that shows only the base case isn't a planning tool — it's wishful thinking. Explicit downside modeling — 40% utilization, six-month delay, new competitor in year three — is the baseline, not an optional exercise.
|
||||
|
||||
**They built structural buffers into the plan.** Liquid reserves covering at least six months of fixed costs. Construction contingency treated as a budget line, not a hedge. These aren't comfort margins; they're operational requirements.
|
||||
|
||||
**They got the contractual foundations right from the start.** Lease terms. Financing conditions. Guarantee scope. The cost of good legal and financial advice at the planning stage is trivial relative to the downside exposure it addresses.
|
||||
|
||||
**They planned for competition.** Not by hoping it wouldn't come, but by building a product — community, quality, service — that gives existing customers a reason to stay when someone cheaper opens nearby.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Model the bad scenarios first</span>
|
||||
<p class="article-card__body">A business plan showing only the base case isn't a planning tool — it's wishful thinking. Explicit downside modeling — 40% utilization, six-month delay, new competitor in year three — is the baseline, not an optional exercise.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Build structural buffers in</span>
|
||||
<p class="article-card__body">Liquid reserves covering at least six months of fixed costs. Construction contingency treated as a budget line, not a hedge. These aren't comfort margins; they're operational requirements.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Get the contractual foundations right</span>
|
||||
<p class="article-card__body">Lease terms. Financing conditions. Guarantee scope. The cost of good legal and financial advice at the planning stage is trivial relative to the downside exposure it addresses.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Plan for competition</span>
|
||||
<p class="article-card__body">Not by hoping it won't come, but by building a product — community, quality, service — that gives existing customers a reason to stay when someone cheaper opens nearby.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -111,7 +111,7 @@ Key checks before committing to a site:
|
||||
|
||||
## The Site Scoring Framework: From 8 Criteria to a Decision
|
||||
|
||||
Anyone evaluating multiple sites in parallel needs a comparison tool. A weighted scoring matrix works well: each criterion is rated 1–5 and multiplied by a weighting factor.
|
||||
Any investor evaluating multiple sites in parallel needs a comparison tool. A weighted scoring matrix works well: each criterion is rated 1–5 and multiplied by a weighting factor.
|
||||
|
||||
A suggested weighting:
|
||||
|
||||
@@ -148,11 +148,29 @@ The matrix also reveals where trade-offs are being made explicitly, which makes
|
||||
|
||||
The 8 criteria above evaluate specific sites. But before shortlisting sites, it is worth stepping back to read the stage of the overall market — because the right operational strategy differs fundamentally depending on where a city sits in its padel development cycle.
|
||||
|
||||
**Established markets**: Booking platforms show consistent peak-hour sell-out across most venues. Waiting lists are common. Demand is validated beyond doubt. The challenge here is elevated rent, elevated build costs, and entrenched operators who have already captured community loyalty. New entrants need a genuine differentiation angle — a superior facility specification, a better location within the city, or an F&B and coaching product that existing venues don't offer. Entry costs are high; returns, if execution is strong, are also high. Munich is the canonical German example.
|
||||
|
||||
**Growth markets**: Demand is clearly building — booking availability tightens at weekends, new facilities are announced regularly, and the sport is gaining local media visibility. Supply hasn't caught up, so identifiable gaps still exist in specific districts or the surrounding hinterland. The risk profile is lower than in emerging markets, but the window for securing good real estate at reasonable rent is narrowing. The premium for moving decisively goes to those who arrive before the obvious sites are taken.
|
||||
|
||||
**Emerging markets**: Limited current supply, a small but growing player base, and padel not yet mainstream enough to generate organic walk-in demand. Entry costs — rent especially — are lower. The constraint is that demand must be actively created rather than captured. Operators who succeed here invest in community: beginner programmes, local leagues, school partnerships, conversions from tennis clubs. The time to first profitability is longer, but the competitive position built in the first two years is often decisive for the long term.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--established">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Established markets</span>
|
||||
<p class="article-card__body">Booking platforms show consistent peak-hour sell-out. Demand is validated. The challenge: elevated rent, high build costs, entrenched operators. New entrants need a genuine differentiation angle — superior spec, better location, or F&B and coaching that existing venues don't offer. Entry costs are high; returns, if execution is strong, are also high. Munich is the canonical German example.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--growth">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Growth markets</span>
|
||||
<p class="article-card__body">Demand is clearly building — booking availability tightens at weekends, new facilities are announced regularly. Supply hasn't caught up; identifiable gaps still exist. The risk profile is lower, but the window for securing good real estate at reasonable rent is narrowing. The premium goes to those who arrive before the obvious sites are taken.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--emerging">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Emerging markets</span>
|
||||
<p class="article-card__body">Limited supply, a small but growing player base, padel not yet mainstream. Entry costs — rent especially — are lower. The constraint: demand must be actively created rather than captured. Operators who succeed invest in community: beginner programmes, local leagues, school partnerships. Time to profitability is longer, but the competitive position built in the first two years is often decisive.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Before committing to a site search in any city, calibrate where it sits on this spectrum. The 8-criteria framework then tells you whether a specific site works; market maturity tells you what kind of operator and strategy is required to make it work at all.
|
||||
|
||||
|
||||
@@ -17,15 +17,48 @@ Dieser Leitfaden zeigt Ihnen alle 5 Phasen und 23 Schritte, die zwischen Ihrer e
|
||||
|
||||
## Die 5 Phasen im Überblick
|
||||
|
||||
```
|
||||
Phase 1 Phase 2 Phase 3 Phase 4 Phase 5
|
||||
Machbarkeit → Planung & → Bau / → Voreröff- → Betrieb &
|
||||
& Konzept Design Umbau nung Optimierung
|
||||
|
||||
Monat 1–3 Monat 3–6 Monat 6–12 Monat 10–13 laufend
|
||||
|
||||
Schritte 1–5 Schritte 6–11 Schritte 12–16 Schritte 17–20 Schritte 21–23
|
||||
```
|
||||
<div class="article-timeline">
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">1</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Machbarkeit & Konzept</div>
|
||||
<div class="article-timeline__subtitle">Marktanalyse, Konzept, Standortsuche</div>
|
||||
<div class="article-timeline__meta">Monat 1–3 · Schritte 1–5</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">2</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Planung & Design</div>
|
||||
<div class="article-timeline__subtitle">Architekt, Genehmigungen, Finanzierung</div>
|
||||
<div class="article-timeline__meta">Monat 3–6 · Schritte 6–11</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">3</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Bau / Umbau</div>
|
||||
<div class="article-timeline__subtitle">Rohbau, Courts, IT-Systeme</div>
|
||||
<div class="article-timeline__meta">Monat 6–12 · Schritte 12–16</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">4</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Voreröffnung</div>
|
||||
<div class="article-timeline__subtitle">Personal, Marketing, Soft Launch</div>
|
||||
<div class="article-timeline__meta">Monat 10–13 · Schritte 17–20</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-timeline__phase">
|
||||
<div class="article-timeline__num">5</div>
|
||||
<div class="article-timeline__card">
|
||||
<div class="article-timeline__title">Betrieb & Optimierung</div>
|
||||
<div class="article-timeline__subtitle">Einnahmen, Community, Optimierung</div>
|
||||
<div class="article-timeline__meta">laufend · Schritte 21–23</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
@@ -104,7 +137,12 @@ Was in dieser Phase entsteht:
|
||||
- MEP-Planung (Haustechnik): Heizung, Lüftung, Klimaanlage, Elektro, Sanitär — das sind bei Sporthallen oft die kostenintensivsten Gewerke
|
||||
- Brandschutzkonzept
|
||||
|
||||
**Häufiger Fehler in dieser Phase:** Die Haustechnik wird unterschätzt. Eine große Innenhalle braucht präzise Temperatur- und Feuchtigkeitskontrolle — für die Spielqualität, für die Langlebigkeit des Belags und für das Wohlbefinden der Spieler. Eine schlechte HVAC-Anlage ist eine Dauerbaustelle.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">Häufiger Fehler in dieser Phase</span>
|
||||
<p>Die Haustechnik wird unterschätzt. Eine große Innenhalle braucht präzise Temperatur- und Feuchtigkeitskontrolle — für die Spielqualität, für die Langlebigkeit des Belags und für das Wohlbefinden der Spieler. Eine schlechte HVAC-Anlage ist eine Dauerbaustelle.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Schritt 8: Courtlieferant auswählen
|
||||
|
||||
@@ -122,7 +160,7 @@ Mit dem detaillierten Businessplan gehen Sie zu Banken und ggf. Fördermittelgeb
|
||||
- 50–70 Prozent Fremdkapital (Bankdarlehen)
|
||||
- 30–50 Prozent Eigenkapital (eigene Mittel, stille Beteiligungen, Gesellschafterdarlehen)
|
||||
|
||||
Was Banken sehen wollen: belastbares Finanzmodell, Sicherheiten, Ihr persönliches Track Record, und — fast immer — eine persönliche Bürgschaft. (Mehr dazu im separaten Artikel zu Investitionsrisiken.)
|
||||
Was Banken sehen wollen: belastbares Finanzmodell, Sicherheiten, Ihr persönlicher Track Record, und — fast immer — eine persönliche Bürgschaft. Der separate Artikel zu Investitionsrisiken behandelt das Thema Bürgschaftsexposition ausführlich.
|
||||
|
||||
Klären Sie Förderprogramme: KfW-Mittel, Landesförderbanken und kommunale Sportförderprogramme können den Eigenkapitalbedarf oder die Zinsbelastung reduzieren. Diese Recherche lohnt sich.
|
||||
|
||||
@@ -155,7 +193,12 @@ Verhandeln Sie Festpreise, wo möglich. Lesen Sie die Risikoverteilung in den Ve
|
||||
|
||||
Courts werden nach Fertigstellung der Gebäudehülle montiert — das ist eine harte Reihenfolge, keine Empfehlung. Glaselemente dürfen nicht Feuchtigkeit, Staub und Baustellenverkehr ausgesetzt werden, bevor das Gebäude dicht ist.
|
||||
|
||||
**Ein häufiger und vermeidbarer Fehler:** Projekte, die unter Zeitdruck stehen, versuchen, Court-Montage vorzuziehen. Das Ergebnis sind beschädigte Oberflächen, Glasschäden, Verschmutzungen im Belag und Gewährleistungsprobleme mit dem Hersteller. Halten Sie die Reihenfolge ein — konsequent.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">Ein häufiger und vermeidbarer Fehler</span>
|
||||
<p>Projekte unter Zeitdruck versuchen, die Court-Montage vorzuziehen. Das Ergebnis sind beschädigte Oberflächen, Glasschäden, Verschmutzungen im Belag und Gewährleistungsprobleme mit dem Hersteller. Halten Sie die Reihenfolge ein — konsequent.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Die Montage von Courts dauert je nach Hersteller und Parallelkapazität zwei bis vier Wochen pro Charge. Planen Sie das in den Gesamtablauf ein.
|
||||
|
||||
@@ -169,7 +212,12 @@ Frühzeitig entscheiden: Playtomic, Matchi, ein anderes System oder eine Hybridl
|
||||
|
||||
Zugangskontrolle (falls gewünscht) muss mit der Elektroplanung koordiniert werden. Wer das in der letzten Bauphase ergänzen möchte, zahlt dafür.
|
||||
|
||||
**Der häufigste Fehler kurz vor der Eröffnung:** Am Tag der Eröffnung ist das Buchungssystem noch nicht richtig konfiguriert, Testzahlungen schlagen fehl, der QR-Code am Eingang führt auf eine Fehlerseite. Der Eröffnungsbuzz ist ein einmaliges Gut. Testen Sie das System zwei bis vier Wochen vorher vollständig — inklusive echter Buchungen, echter Zahlungen und echter Stornierungen.
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">Der häufigste Fehler kurz vor der Eröffnung</span>
|
||||
<p>Am Tag der Eröffnung ist das Buchungssystem noch nicht richtig konfiguriert, Testzahlungen schlagen fehl, der QR-Code am Eingang führt auf eine Fehlerseite. Der Eröffnungsbuzz ist ein einmaliges Gut. Testen Sie das System zwei bis vier Wochen vorher vollständig — inklusive echter Buchungen, echter Zahlungen und echter Stornierungen.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Schritt 16: Abnahmen und Zertifizierungen
|
||||
|
||||
@@ -243,13 +291,38 @@ Die Court-Buchung ist Ihr Kernangebot — aber nicht die einzige Einnahmequelle:
|
||||
|
||||
Wer Dutzende Padelhallenprojekte in Europa beobachtet, sieht Muster auf beiden Seiten:
|
||||
|
||||
**Die Projekte, die über Budget laufen**, haben fast immer früh an der falschen Stelle gespart — zu wenig Haustechnikbudget, kein Baukostenpuffer, zu günstiger Generalunternehmer ohne ausreichende Vertragsabsicherung.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projekte, die über Budget laufen</span>
|
||||
<p class="article-card__body">Haben fast immer früh an der falschen Stelle gespart — zu wenig Haustechnikbudget, kein Baukostenpuffer, zu günstiger Generalunternehmer ohne ausreichende Vertragsabsicherung.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projekte, die terminlich entgleisen</span>
|
||||
<p class="article-card__body">Haben die behördlichen Prozesse unterschätzt. Genehmigungen, Lärmschutzgutachten, Nutzungsänderungen brauchen Zeit — und diese Zeit lässt sich nicht kaufen, sobald man zu spät damit anfängt.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--failure">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projekte, die schwach starten</span>
|
||||
<p class="article-card__body">Haben das Marketing zu spät begonnen und das Buchungssystem zu spät getestet. Ein leerer Kalender am Eröffnungstag und eine kaputte Buchungsseite erzeugen Eindrücke, die sich festsetzen.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Projekte, die langfristig erfolgreich sind</span>
|
||||
<p class="article-card__body">Behandeln alle drei Phasen — Planung, Bau, Eröffnung — mit derselben Sorgfalt und investieren früh in Community und Stammkundschaft.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
**Die Projekte, die terminlich entgleisen**, haben die behördlichen Prozesse unterschätzt. Genehmigungen, Lärmschutzgutachten, Nutzungsänderungen brauchen Zeit — und diese Zeit lässt sich nicht kaufen, sobald man zu spät damit anfängt.
|
||||
|
||||
**Die Projekte, die schwach starten**, haben das Marketing zu spät begonnen und das Buchungssystem zu spät getestet. Ein leerer Kalender am Eröffnungstag und eine kaputte Buchungsseite erzeugen Eindrücke, die sich festsetzen.
|
||||
|
||||
**Die Projekte, die langfristig erfolgreich sind**, haben alle drei Phasen — Planung, Bau, Eröffnung — mit derselben Sorgfalt behandelt und früh in Community und Stammkundschaft investiert.
|
||||
Eine Padelhalle zu bauen ist komplex — aber kein ungelöstes Problem. Die Fehler, die Projekte scheitern lassen, sind fast immer dieselben. Genauso wie die Entscheidungen, die sie gelingen lassen.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -159,6 +159,8 @@ Der Kapitaldienstdeckungsgrad (DSCR) auf den Bankkredit (€700k, 5 %, 10 Jahre
|
||||
|
||||
## Das persönliche Risiko: Bürgschaften offen ansprechen
|
||||
|
||||
Steht die Fremdkapitalstruktur, bleibt eine Frage, die in fast jedem Finanzierungsgespräch zu spät gestellt wird — und die zu oft erst auf dem Konditionenblatt der Bank auftaucht.
|
||||
|
||||
Banken werden für eine Padelhalle, die eine eigenständige Projektgesellschaft ist, fast immer eine **persönliche Bürgschaft** des Gründers fordern. Das bedeutet: Ihre privaten Vermögenswerte — Eigenheim, Ersparnisse, Beteiligungen — haften im Zweifelsfall.
|
||||
|
||||
Es gibt drei Wege, dieses Risiko zu begrenzen:
|
||||
|
||||
@@ -9,7 +9,7 @@ cornerstone: C2
|
||||
|
||||
# Padel Halle Kosten 2026: Die komplette CAPEX-Aufstellung
|
||||
|
||||
Wer ernsthaft über eine Padelhalle nachdenkt, bekommt auf die Frage nach den Kosten zunächst eine frustrierende Antwort: "Das kommt drauf an." Und ja — die Spanne ist tatsächlich enorm. Je nach Standort, Konzept und Bausubstanz liegen die Gesamtinvestitionskosten für eine sechsstellige Anlage zwischen **€930.000 und €1,9 Millionen**. Diese Streuung ist kein Zufall, sondern Ausdruck ganz konkreter Entscheidungen, die Sie als Investor treffen werden.
|
||||
Wer eine Padelhalle plant, bekommt auf die Kostenfrage zunächst eine frustrierende Antwort: „Das kommt drauf an." Und ja — die Spanne ist tatsächlich enorm. Je nach Standort, Konzept und Bausubstanz liegen die Gesamtinvestitionskosten für eine sechsstellige Anlage zwischen **€930.000 und €1,9 Millionen**. Diese Streuung ist kein Zufall, sondern Ausdruck ganz konkreter Entscheidungen, die Sie als Investor treffen werden.
|
||||
|
||||
Dieser Artikel schlüsselt die vollständige Investition auf — von der Bausubstanz über Platztechnik und Ausstattung bis hin zu Betriebskosten, Standortmieten und einer belastbaren 3-Jahres-Ergebnisprognose. Alle Zahlen basieren auf realen deutschen Marktdaten aus 2025/2026. Das Ziel: Sie sollen nach der Lektüre in der Lage sein, eine erste realistische Wirtschaftlichkeitsrechnung für Ihre konkrete Situation aufzustellen — und wissen, welche Fragen Sie Ihrer Bank stellen müssen.
|
||||
|
||||
@@ -19,7 +19,7 @@ Dieser Artikel schlüsselt die vollständige Investition auf — von der Bausubs
|
||||
|
||||
Warum liegen €930.000 und €1,9 Millionen so weit auseinander? Der größte Einzeltreiber ist der bauliche Aufwand. Wer eine bestehende Gewerbehalle — etwa einen ehemaligen Produktions- oder Logistikbau — kostengünstig anmieten und mit minimalem Umbau bespielen kann, landet am unteren Ende der Spanne. Wer dagegen auf grüner Wiese baut oder ein Gebäude von Grund auf saniert, zahlt entsprechend mehr.
|
||||
|
||||
Dazu kommt der Standortfaktor. In München oder Berlin kostet dasselbe Objekt in der Miete 40–60 % mehr als in Leipzig oder Kassel. Das drückt sich nicht nur in der laufenden OPEX aus, sondern auch in der Kaution und dem nötigen Working-Capital-Puffer — beides Teil der initialen CAPEX.
|
||||
Dazu kommt der Standortfaktor. In München oder Berlin kostet dasselbe Objekt in vergleichbaren Marktsegmenten 40–60 % mehr als in Leipzig oder Kassel — an den Extremen fällt der Abstand erheblich größer aus. Das schlägt sich nicht nur in der laufenden OPEX nieder, sondern auch in der Kaution und dem nötigen Working-Capital-Puffer — beides Teil der initialen CAPEX.
|
||||
|
||||
Realistischer Planungsansatz für eine **6-Court-Innenhalle** mit solider Ausstattung: **€1,2–1,5 Millionen Gesamtinvestition**. Wer mit deutlich weniger kalkuliert, unterschätzt in der Regel einen der drei teuersten Posten: Bau/Umbau, Lüftungstechnik oder den Kapitalpuffer für den Anlauf.
|
||||
|
||||
@@ -56,6 +56,8 @@ Die folgende Tabelle zeigt die typischen Bandbreiten für eine sechsstellige Inn
|
||||
|
||||
## Hallenmiete in Deutschland: Was Sie nach Standort zahlen
|
||||
|
||||
Bau und Courts binden den größten Teil des Startkapitals. Was über die langfristige Wirtschaftlichkeit entscheidet, zahlen Sie monatlich: die Miete.
|
||||
|
||||
Eine 6-Court-Halle benötigt je nach Konzept (Nebenräume, Lounge, Pro Shop) eine Fläche von **1.500 bis 2.500 qm**. Auf Basis aktueller Gewerberaummieten für Industrie- und Hallenflächen in deutschen Städten ergibt sich folgende Einschätzung:
|
||||
|
||||
| Stadt | Miete €/qm/Monat | Typische Monatsmiete (2.000 qm) |
|
||||
@@ -69,15 +71,15 @@ Eine 6-Court-Halle benötigt je nach Konzept (Nebenräume, Lounge, Pro Shop) ein
|
||||
| Stuttgart | €7–10 | €14.000–€20.000 |
|
||||
| Leipzig | €4–7 | €8.000–€14.000 |
|
||||
|
||||
In Hochpreislagen Berlins (Mitte, Prenzlauer Berg) oder Münchens (Schwabing, Maxvorstadt) liegen die Preise auch für Gewerbehallen teils noch darüber. Die in der OPEX-Tabelle verwendete Jahresmiete von €120.000 entspricht einer Monatsmiete von €10.000 — das ist ein realistischer Wert für eine mittelgroße deutsche Stadt mit einem Standort leicht außerhalb der Innenstadt.
|
||||
In Hochpreislagen Berlins (Mitte, Prenzlauer Berg) oder Münchens (Schwabing, Maxvorstadt) liegen die Preise auch für Gewerbehallen teils noch darüber. Die in der OPEX-Tabelle verwendete Jahresmiete von €120.000 entspricht einer Monatsmiete von €10.000 — das ist ein realistischer Wert für eine mittelgroße deutsche Stadt mit einem Standort leicht außerhalb der Innenstadt. Für München oder Berlin kalkulieren Sie mit den Werten aus der Stadtübersicht oben — und passen Sie die Erlösannahme entsprechend an.
|
||||
|
||||
Ein Hinweis zur Mietstruktur: Viele Vermieter verlangen bei Hallenflächen eine Laufzeit von mindestens 5–10 Jahren, oft mit Verlängerungsoptionen. Das bindet Sie, schafft aber auch Planungssicherheit für die Finanzierung. Banken bewerten einen langen Mietvertrag mit festen Konditionen positiv.
|
||||
Ein Hinweis zur Mietstruktur: Viele Vermieter verlangen bei Hallenflächen eine Laufzeit von mindestens 5–10 Jahren, oft mit Verlängerungsoptionen. Das bindet Sie, schafft aber auch Planungssicherheit für die Finanzierung. Ein langfristiger Mietvertrag mit indexierter Staffelung ist für die Bank ein echtes Positivsignal — er macht aus unsicheren künftigen Einnahmen etwas, das im Kreditbescheid wie planbarer Cashflow aussieht.
|
||||
|
||||
---
|
||||
|
||||
## Platzbuchungspreise: Was der Markt trägt
|
||||
|
||||
Die Mietpreise sind das Fundament Ihrer Ertragsrechnung. Hier die aktuellen Marktpreise nach Stadt, basierend auf Plattformdaten und direkten Hallenerhebungen:
|
||||
Das Ertragspotenzial folgt der Standortlogik ähnlich eng wie die Mietkosten. Hier die aktuellen Marktpreise nach Stadt, basierend auf Plattformdaten und direkten Hallenerhebungen:
|
||||
|
||||
| Stadt | Nebenzeiten (€/Std.) | Hauptzeiten (€/Std.) | Datenbasis |
|
||||
|---|---|---|---|
|
||||
@@ -167,7 +169,7 @@ Bei einem Darlehen von €800.000 (z. B. KfW oder Hausbank), 5 % Zinsen und 10 J
|
||||
|
||||
## Was Banken wirklich wollen
|
||||
|
||||
Eine Padelhalle ist für die meisten Bankberater ein ungewohntes Investitionsobjekt. Was zählt, ist nicht die Begeisterung für Padel — sondern die Qualität Ihrer Zahlengrundlage.
|
||||
Eine Padelhalle ist für die meisten Bankberater unbekanntes Terrain. Auslastungsquoten und Erlöse pro Court sind keine Größen, mit denen Kreditausschüsse täglich arbeiten — das ist Ihr Vorteil. Wer mit sauberen Zahlen und strukturierter Dokumentation ins Gespräch geht, fällt sofort positiv auf. Was den Kreditausschuss bewegt, ist nicht die Begeisterung für den Sport, sondern die Belastbarkeit der Unterlagen.
|
||||
|
||||
**Debt Service Coverage Ratio (DSCR) 1,2–1,5x:** Die Bank will sehen, dass Ihr operativer Cashflow den Schuldendienst mit einem Puffer von 20–50 % abdeckt. Mit einem EBITDA von €310.000 im ersten Jahr und einem Schuldendienst von €102.000 liegt der DSCR bei 3,0 — auf dem Papier sehr solide. Aber: Banken werden nachfragen, wie empfindlich dieses Ergebnis auf niedrigere Auslastung reagiert.
|
||||
|
||||
@@ -185,6 +187,6 @@ Wie Sie einen vollständigen Businessplan strukturieren und welche Unterlagen Ba
|
||||
|
||||
Die Kosten für eine Padelhalle sind real und erheblich — €930.000 bis €1,9 Millionen, realistischer Mittelpunkt €1,2–1,5 Millionen. Wer diese Zahlen kennt und versteht, wo die Hebel sitzen, kann daraus ein belastbares Investitionsmodell bauen. Wer mit Schätzungen aus zweiter Hand ins Bankgespräch geht, verliert Zeit und Glaubwürdigkeit.
|
||||
|
||||
Die Wirtschaftlichkeit stimmt: Bei konservativen Annahmen und solider Betriebsführung ist die Amortisation in 3–5 Jahren realistisch. Der deutsche Padel-Markt wächst weiter — aber mit wachsendem Angebot steigen auch die Erwartungen der Spieler und die Anforderungen an Konzept, Lage und Service.
|
||||
Richtig aufgesetzt, stimmt die Wirtschaftlichkeit: Bei konservativen Annahmen und solider Betriebsführung ist die Amortisation in 3–5 Jahren realistisch. Der deutsche Padel-Markt wächst weiter — aber mit wachsendem Angebot steigen auch die Erwartungen der Spieler und die Anforderungen an Konzept, Lage und Service.
|
||||
|
||||
**Nächster Schritt:** Nutzen Sie den Padelnomics Financial Planner, um Ihre spezifische Konstellation durchzurechnen — mit Ihrem Standort, Ihrer Finanzierungsstruktur und Ihren Preisannahmen. Das Modell oben ist der Einstieg. Ihre Halle verdient eine maßgeschneiderte Kalkulation.
|
||||
**Nächster Schritt:** Nutzen Sie den Padelnomics Financial Planner, um Ihre spezifische Konstellation durchzurechnen — mit Ihrem Standort, Ihrer Finanzierungsstruktur und Ihren Preisannahmen. Die Zahlen in diesem Artikel sind Ihr Ausgangspunkt — Ihre Halle verdient eine Kalkulation, die auf Ihren tatsächlichen Rahmenbedingungen aufbaut.
|
||||
|
||||
@@ -21,20 +21,20 @@ Dieser Artikel zeigt Ihnen die 14 Risiken, über die in Investorenrunden zu weni
|
||||
|
||||
| # | Risiko | Kategorie | Schwere |
|
||||
|---|--------|-----------|---------|
|
||||
| 1 | Trend-/Modeerscheinung | Strategisch | Hoch |
|
||||
| 2 | Baukostenüberschreitungen | Bau & Entwicklung | Hoch |
|
||||
| 3 | Verzögerungen während des Baus | Bau & Entwicklung | Hoch |
|
||||
| 4 | Vermieterproblem: Verkauf, Insolvenz, keine Verlängerung | Immobilie & Mietvertrag | Hoch |
|
||||
| 5 | Neue Konkurrenz im Einzugsgebiet | Wettbewerb | Mittel–Hoch |
|
||||
| 6 | Schlüsselpersonen-Abhängigkeit | Betrieb | Mittel |
|
||||
| 7 | Fachkräftemangel und Lohndruck | Betrieb | Mittel |
|
||||
| 8 | Instandhaltungszyklen für Belag, Glas, Kunstrasen | Betrieb | Mittel |
|
||||
| 9 | Energiepreisvolatilität | Finanzen | Mittel |
|
||||
| 10 | Zinsänderungsrisiko | Finanzen | Mittel |
|
||||
| 11 | Persönliche Bürgschaft | Finanzen | Hoch |
|
||||
| 12 | Kundenkonzentration | Finanzen | Mittel |
|
||||
| 13 | Lärmbeschwerden und behördliche Auflagen | Regulatorisch & Rechtlich | Mittel |
|
||||
| 14 | Buchungsplattform-Abhängigkeit | Regulatorisch & Rechtlich | Niedrig–Mittel |
|
||||
| 1 | Trend-/Modeerscheinung | Strategisch | <span class="severity severity--high">Hoch</span> |
|
||||
| 2 | Baukostenüberschreitungen | Bau & Entwicklung | <span class="severity severity--high">Hoch</span> |
|
||||
| 3 | Verzögerungen während des Baus | Bau & Entwicklung | <span class="severity severity--high">Hoch</span> |
|
||||
| 4 | Vermieterproblem: Verkauf, Insolvenz, keine Verlängerung | Immobilie & Mietvertrag | <span class="severity severity--high">Hoch</span> |
|
||||
| 5 | Neue Konkurrenz im Einzugsgebiet | Wettbewerb | <span class="severity severity--medium-high">Mittel–Hoch</span> |
|
||||
| 6 | Schlüsselpersonen-Abhängigkeit | Betrieb | <span class="severity severity--medium">Mittel</span> |
|
||||
| 7 | Fachkräftemangel und Lohndruck | Betrieb | <span class="severity severity--medium">Mittel</span> |
|
||||
| 8 | Instandhaltungszyklen für Belag, Glas, Kunstrasen | Betrieb | <span class="severity severity--medium">Mittel</span> |
|
||||
| 9 | Energiepreisvolatilität | Finanzen | <span class="severity severity--medium">Mittel</span> |
|
||||
| 10 | Zinsänderungsrisiko | Finanzen | <span class="severity severity--medium">Mittel</span> |
|
||||
| 11 | Persönliche Bürgschaft | Finanzen | <span class="severity severity--high">Hoch</span> |
|
||||
| 12 | Kundenkonzentration | Finanzen | <span class="severity severity--medium">Mittel</span> |
|
||||
| 13 | Lärmbeschwerden und behördliche Auflagen | Regulatorisch & Rechtlich | <span class="severity severity--medium">Mittel</span> |
|
||||
| 14 | Buchungsplattform-Abhängigkeit | Regulatorisch & Rechtlich | <span class="severity severity--low-medium">Niedrig–Mittel</span> |
|
||||
|
||||
---
|
||||
|
||||
@@ -89,7 +89,7 @@ Wenn in Jahr drei ein neuer Wettbewerber 10 Fahrminuten entfernt aufmacht, ist I
|
||||
|
||||
Einen echten Burggraben gibt es im Padel-Geschäft kaum. Keine Patente, keine Netzwerkeffekte, keine Wechselkosten. Was bleibt, ist: Standort, Gemeinschaft, Servicequalität und die Beziehung zu Stammkunden. Das sind reale Vorteile — aber sie müssen aktiv aufgebaut und gepflegt werden.
|
||||
|
||||
**Was Sie jetzt schon tun können:** Modellieren Sie im Businessplan explizit das Szenario "neuer Wettbewerber in Jahr drei". Was ändert sich? Wie reagieren Sie? Welche Maßnahmen senken die Auslastungsschwelle für Profitabilität?
|
||||
**Rechnen Sie das durch.** Modellieren Sie im Businessplan explizit das Szenario „neuer Wettbewerber in Jahr drei". Was ändert sich? Wie reagieren Sie? Welche Maßnahmen senken die Auslastungsschwelle für Profitabilität?
|
||||
|
||||
---
|
||||
|
||||
@@ -133,9 +133,14 @@ Ihre Kosten steigen jedes Jahr um drei bis fünf Prozent. Können Sie diese Stei
|
||||
|
||||
## Sonderbox: Persönliche Bürgschaft — das unterschätzte Risiko Nr. 1
|
||||
|
||||
**Dieses Thema wird in fast jedem Gespräch über Padelhallen-Investitionen ausgelassen. Das ist ein Fehler.**
|
||||
<div class="article-callout article-callout--warning">
|
||||
<div class="article-callout__body">
|
||||
<span class="article-callout__title">Dieses Thema wird in fast jedem Gespräch über Padelhallen-Investitionen ausgelassen. Das ist ein Fehler.</span>
|
||||
<p>Banken, die einer Einzelanlage ohne Konzernrückhalt Kapital bereitstellen, verlangen in der Praxis fast immer eine persönliche Bürgschaft des oder der Hauptgesellschafter.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Banken, die einer Einzelanlage ohne Konzernrückhalt Kapital bereitstellen, verlangen in der Praxis fast immer eine persönliche Bürgschaft des oder der Hauptgesellschafter. Das bedeutet: Wenn das Unternehmen in Zahlungsschwierigkeiten gerät, haftet nicht die GmbH allein — Sie haften persönlich. Mit dem Eigenheim. Mit dem Ersparten. Mit dem Depot.
|
||||
Das bedeutet: Wenn das Unternehmen in Zahlungsschwierigkeiten gerät, haftet nicht die GmbH allein — Sie haften persönlich. Mit dem Eigenheim. Mit dem Ersparten. Mit dem Depot.
|
||||
|
||||
Die Struktur sieht dann typischerweise so aus:
|
||||
|
||||
@@ -176,13 +181,36 @@ Mittel- bis langfristig sollten Sie eine eigene Buchungsfähigkeit aufbauen —
|
||||
|
||||
Niemand kann alle Risiken eliminieren. Aber die Investoren, die langfristig erfolgreich sind, tun Folgendes:
|
||||
|
||||
**Sie rechnen mit den schlechten Szenarien, bevor sie das Gute annehmen.** Ein Businessplan, der nur das Base-Case zeigt, ist kein Werkzeug — er ist Wunschdenken. Rechnen Sie explizit durch: Was passiert bei 40 Prozent Auslastung? Bei einem Bauverzug von sechs Monaten? Bei einem neuen Wettbewerber in Jahr drei?
|
||||
|
||||
**Sie bauen Puffer ein, nicht als Komfortpolster, sondern als betriebliche Notwendigkeit.** Liquide Reserven von mindestens sechs Monaten Fixkosten sind kein Luxus.
|
||||
|
||||
**Sie sichern Mietverträge und Finanzierungskonditionen von Anfang an sorgfältig ab.** Die Kosten für gute Rechts- und Finanzberatung sind verglichen mit dem Downside verschwindend gering.
|
||||
|
||||
**Sie planen für Wettbewerb.** Nicht indem sie auf keine Konkurrenz hoffen, sondern indem sie ein Produkt aufbauen, das Stammkunden bindet — durch Qualität, Community und Dienstleistung.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Schlechte Szenarien zuerst durchrechnen</span>
|
||||
<p class="article-card__body">Ein Businessplan, der nur das Base-Case zeigt, ist kein Werkzeug — er ist Wunschdenken. Was passiert bei 40 Prozent Auslastung? Bei sechs Monaten Bauverzug? Bei einem neuen Wettbewerber in Jahr drei?</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Puffer als betriebliche Notwendigkeit</span>
|
||||
<p class="article-card__body">Liquide Reserven von mindestens sechs Monaten Fixkosten sind kein Luxus, sondern Pflicht. Baukostenpuffer ist eine Budgetlinie — kein optionales Polster.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Verträge von Anfang an absichern</span>
|
||||
<p class="article-card__body">Mietvertrag, Finanzierungskonditionen, Bürgschaftsumfang. Die Kosten für gute Rechts- und Finanzberatung in der Planungsphase sind verglichen mit dem Downside verschwindend gering.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--success">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Für Wettbewerb planen</span>
|
||||
<p class="article-card__body">Nicht indem man auf keine Konkurrenz hofft, sondern indem man ein Produkt aufbaut, das Stammkunden bindet — durch Qualität, Community und Dienstleistungsqualität.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -138,11 +138,29 @@ Das Ergebnis ist ein Gesamtscore pro Standort, der einen strukturierten Vergleic
|
||||
|
||||
Die acht Kriterien oben bewerten konkrete Objekte. Bevor Sie aber mit der Objektsuche beginnen, lohnt ein Schritt zurück: In welcher Entwicklungsphase befindet sich der Markt in Ihrer Zielstadt? Die Antwort bestimmt, welche Betreiberstrategie überhaupt Aussicht auf Erfolg hat.
|
||||
|
||||
**Etablierte Märkte**: Buchungsplattformen zeigen durchgehende Vollauslastung zu Stoßzeiten, Wartelisten sind verbreitet, und die Nachfrage ist über jeden Zweifel hinaus belegt. Die Herausforderung liegt nicht mehr in der Nachfrage — sie liegt im Wettbewerb. Etablierte Betreiber haben Markenloyalität aufgebaut, günstige Flächen sind längst vergeben, und Bau- sowie Mietkosten spiegeln die Nachfragesituation wider. Wer in einem solchen Markt neu eintritt, braucht einen echten Differenzierungsansatz: eine bessere Standortlage innerhalb der Stadt, ein überlegenes Hallenprofil oder ein Gastronomie- und Coaching-Angebot, das die bestehenden Anlagen nicht haben. Das Eintrittsinvestment ist hoch — das Ertragspotenzial bei konsequenter Umsetzung aber auch. München ist das paradigmatische Beispiel für Deutschland.
|
||||
|
||||
**Wachstumsmärkte**: Die Nachfrage wächst sichtbar — Buchungszeiten füllen sich an Wochenenden, neue Anlagen werden regelmäßig eröffnet, und der Sport erreicht lokale Medienöffentlichkeit. Das Angebot hat die Nachfrage noch nicht vollständig eingeholt; in bestimmten Stadtteilen oder im Umland sind Versorgungslücken erkennbar. Das Risikoprofil ist geringer als in Frühmärkten, aber das Fenster für attraktive Flächen zu vertretbaren Konditionen schließt sich. Wer wartet, bis der Markt offensichtlich attraktiv ist, zahlt für dieses Wissen einen Aufpreis — in Form höherer Mieten, weniger Auswahl und mehr Konkurrenz beim Eintritt.
|
||||
|
||||
**Frühmärkte**: Geringes aktuelles Angebot, eine kleine aber wachsende Spielerbasis und ein noch nicht hinreichend bekannter Sport — die Rahmenbedingungen für günstigen Markteintritt sind vorhanden, aber Nachfrage muss aktiv aufgebaut werden, nicht abgeschöpft. Mietkosten sind niedriger, Standortauswahl größer. Der limitierende Faktor ist Geduld und Marketingfähigkeit: Anfängerkurse, Vereinskooperationen, lokale Ligen und die Konversion bestehender Tennisclubs sind die Instrumente, mit denen Betreiber in Frühmärkten Community und damit Auslastung aufbauen. Der Weg zur ersten Profitabilität ist länger — aber die Wettbewerbsposition, die in den ersten zwei Betriebsjahren aufgebaut wird, erweist sich oft als strukturell dauerhaft.
|
||||
<div class="article-cards">
|
||||
<div class="article-card article-card--established">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Etablierte Märkte</span>
|
||||
<p class="article-card__body">Buchungsplattformen zeigen durchgehende Vollauslastung zu Stoßzeiten, Wartelisten sind verbreitet. Die Herausforderung liegt im Wettbewerb: Etablierte Betreiber haben Markenloyalität aufgebaut, günstige Flächen sind vergeben. Neueintretende Betreiber brauchen echten Differenzierungsansatz. Eintrittsinvestment ist hoch — das Ertragspotenzial bei konsequenter Umsetzung ebenfalls. München ist das paradigmatische Beispiel.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--growth">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Wachstumsmärkte</span>
|
||||
<p class="article-card__body">Die Nachfrage wächst sichtbar — Buchungszeiten füllen sich, neue Anlagen werden eröffnet. Das Angebot hat die Nachfrage noch nicht eingeholt; Versorgungslücken sind erkennbar. Das Fenster für attraktive Flächen zu vertretbaren Konditionen schließt sich. Wer wartet, zahlt den Aufpreis des offensichtlich attraktiven Markts.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="article-card article-card--emerging">
|
||||
<div class="article-card__accent"></div>
|
||||
<div class="article-card__inner">
|
||||
<span class="article-card__title">Frühmärkte</span>
|
||||
<p class="article-card__body">Geringes Angebot, kleine aber wachsende Spielerbasis. Mietkosten niedriger, Standortauswahl größer — aber Nachfrage muss aktiv aufgebaut werden. Anfängerkurse, Vereinskooperationen, lokale Ligen und Konversion von Tennisclubs sind die zentralen Instrumente. Der Weg zur Profitabilität ist länger; die aufgebaute Wettbewerbsposition erweist sich oft als dauerhaft.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Bevor Sie in einer Stadt konkret nach Objekten suchen, sollten Sie deren Marktreife einordnen. Der Kriterienkatalog zeigt, ob ein bestimmtes Objekt geeignet ist; die Marktreife zeigt, welches Betreiberprofil und welche Strategie überhaupt die Voraussetzung für Erfolg ist.
|
||||
|
||||
|
||||
67
data/content/articles/padel-zubehoer-de.md
Normal file
67
data/content/articles/padel-zubehoer-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padel-Zubehör: Das braucht jeder Spieler wirklich"
|
||||
slug: padel-zubehoer-de
|
||||
language: de
|
||||
url_path: /padel-zubehoer
|
||||
meta_description: "Welches Padel-Zubehör lohnt sich wirklich? Von Griffband und Vibrationsdämpfer bis zur Sporttasche — was ist nützlich, was ist Marketing?"
|
||||
---
|
||||
|
||||
# Padel-Zubehör: Das braucht jeder Spieler wirklich
|
||||
|
||||
<!-- TODO: Einleitung — Zubehör gibt es viel, sinnvoll ist wenig -->
|
||||
|
||||
Wer Padel ernsthafter betreibt, wird früh von Empfehlungen überhäuft: Griffband kaufen! Schutzhülle! Vibrationsdämpfer! Nicht alles davon ist sinnvoll — aber einiges tatsächlich unverzichtbar. Dieser Guide hilft dabei, nützliches Zubehör von überteuertem Marketing zu trennen.
|
||||
|
||||
---
|
||||
|
||||
## Das sinnvollste Zubehör im Überblick
|
||||
|
||||
[product-group:accessory]
|
||||
|
||||
---
|
||||
|
||||
## Griffband: Ja, unbedingt
|
||||
|
||||
<!-- TODO: Erklärung, welches Griffband sich lohnt -->
|
||||
|
||||
[product:platzhalter-griffband-amazon]
|
||||
|
||||
---
|
||||
|
||||
## Schläger-Schutzhülle: Ja, wenn man häufig transportiert
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Vibrationsdämpfer: Geschmackssache
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Sporttasche: Erst ab regelmäßigem Spiel
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie oft sollte man das Griffband wechseln?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Bei regelmäßigem Spielen empfehlen wir einen Wechsel alle 4–8 Wochen. Ein abgenutztes Griffband erhöht das Risiko, den Schläger wegzuschleudern, und mindert die Kontrolle.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Brauche ich eine spezielle Padeltasche?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Eine Padeltasche schützt den Schläger vor Beschädigungen beim Transport. Für gelegentliche Spieler reicht ein einfaches Cover. Wer mehrere Schläger trägt oder regelmäßig zum Club fährt, profitiert von einer Sporttasche mit gepolstertem Schlägerfach.
|
||||
|
||||
</details>
|
||||
70
data/content/articles/padelbaelle-vergleich-de.md
Normal file
70
data/content/articles/padelbaelle-vergleich-de.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "Beste Padelbälle 2026: Test und Vergleich der populärsten Modelle"
|
||||
slug: padelbaelle-vergleich-de
|
||||
language: de
|
||||
url_path: /padelbaelle-vergleich
|
||||
meta_description: "Welche Padelbälle sind am besten? Wir vergleichen die beliebtesten Modelle nach Druckhaltigkeit, Spielgefühl und Preis-Leistungs-Verhältnis."
|
||||
---
|
||||
|
||||
# Beste Padelbälle 2026: Test und Vergleich der populärsten Modelle
|
||||
|
||||
<!-- TODO: Einleitung — warum Bälle oft unterschätzt werden -->
|
||||
|
||||
Der Ball ist das am häufigsten unterschätzte Equipment im Padel. Dabei entscheidet seine Druckhaltigkeit maßgeblich über das Spielgefühl. Ein Padelball verliert nach 4–6 Stunden intensivem Spiel merklich an Druck — und damit an Tempo, Kontrolle und Spaß.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlungen
|
||||
|
||||
[product-group:ball]
|
||||
|
||||
---
|
||||
|
||||
## Druckhaltigkeit: Was wirklich zählt
|
||||
|
||||
<!-- TODO: Erklärung des Druckverlusts + Testzeitraum -->
|
||||
|
||||
---
|
||||
|
||||
## Turnier- vs. Freizeitball
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Überblick
|
||||
|
||||
[product:platzhalter-ball-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Wie lange hält ein Padelball?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ein hochwertiger Padelball ist nach etwa 4–8 Stunden Spielzeit merklich weicher. Im Freizeitbereich merkt man den Unterschied oft erst später. Profis und ambitionierte Spieler wechseln Bälle bereits nach einem Set.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Muss ich WCT- oder FIP-zertifizierte Bälle kaufen?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für den Freizeiteinsatz nein. Für Turniere und Ligaspiele ja — die meisten Ligen schreiben zugelassene Ballmodelle vor. Im Training können beliebige Qualitätsbälle verwendet werden.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Wie lagere ich Padelbälle richtig?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kühl und trocken lagern, nicht im Auto lassen. Manche Spieler verwenden Druckbehälter, um den Druckverlust zu verlangsamen — das funktioniert tatsächlich für bereits angebrochene Dosen.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padelschlaeger-anfaenger-de.md
Normal file
67
data/content/articles/padelschlaeger-anfaenger-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padelschläger für Anfänger 2026: Die 5 besten Einstiegsmodelle"
|
||||
slug: padelschlaeger-anfaenger-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-anfaenger
|
||||
meta_description: "Welcher Padelschläger eignet sich für Anfänger? Unsere Empfehlungen für Einsteiger: verzeihendes Spielgefühl, robuste Verarbeitung, fairer Preis."
|
||||
---
|
||||
|
||||
# Padelschläger für Anfänger 2026: Die 5 besten Einstiegsmodelle
|
||||
|
||||
<!-- TODO: Einleitung, warum Anfängerschläger sich von Profimodellen unterscheiden (150–200 Wörter) -->
|
||||
|
||||
Für den Einstieg ins Padel braucht man keinen teuren Profischaft. Im Gegenteil: Die meisten Hochleistungsschläger sind für Anfänger kontraproduktiv — ihr kleines Sweetspot-Fenster bestraft Fehlschläge, die in der Lernphase normal sind. Ein guter Anfängerschläger ist leicht, hat eine runde Form und verzeiht ungenaue Treffpunkte.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-5 für Einsteiger
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Was macht einen guten Anfängerschläger aus?
|
||||
|
||||
<!-- TODO: Erklärung der relevanten Schläger-Eigenschaften (Form, Gewicht, Material) -->
|
||||
|
||||
### Schlägerkopfform: Rund schlägt Diamant
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Gewicht: Leichter ist nicht immer besser
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Material: EVA vs. Foam
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlung im Detail
|
||||
|
||||
[product:platzhalter-anfaenger-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: Ausführliche Besprechung mit Praxistest -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Ab welchem Preis lohnt sich ein eigener Schläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wer mehr als einmal pro Woche spielt, sollte in einen eigenen Schläger investieren. Leihschläger im Club sind oft abgenutzt und vermitteln ein falsches Spielgefühl. Ab 60–80 Euro gibt es solide Einsteigerschläger.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Kann ich als Anfänger direkt mit einem 150-Euro-Schläger starten?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, sofern es sich um ein anfängerfreundliches Modell aus diesem Preisbereich handelt. Preisschilder allein sagen wenig — ein 150-Euro-Diamantschläger kann für Einsteiger schlechter sein als ein 70-Euro-Rundschläger.
|
||||
|
||||
</details>
|
||||
55
data/content/articles/padelschlaeger-defensiv-de.md
Normal file
55
data/content/articles/padelschlaeger-defensiv-de.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: "Padelschläger für defensive Spieler: Die besten Kontrollschläger 2026"
|
||||
slug: padelschlaeger-defensiv-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-defensiv
|
||||
meta_description: "Die besten Padelschläger für defensive und kontrollbetonte Spieler. Runde und Tropfenform mit großem Sweetspot für sicheres Spiel vom Grundfeld."
|
||||
---
|
||||
|
||||
# Padelschläger für defensive Spieler: Die besten Kontrollschläger 2026
|
||||
|
||||
<!-- TODO: Einleitung zur defensiven Spielweise und warum der Schläger einen Unterschied macht -->
|
||||
|
||||
Im Padel entscheidet das Grundfeld. Wer vom hinteren Drittel sauber und kontrolliert spielen kann, zwingt den Gegner zu Fehlern. Für diesen Spielstil braucht man einen Schläger mit großem Sweetspot, weichem EVA-Kern und einer runden oder Tropfenform — nicht die auffälligsten Geräte, aber die effektivsten.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Empfehlungen für defensive Spieler
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Warum Kontrolle wichtiger ist als Power
|
||||
|
||||
<!-- TODO: Erklärung Spielstil + Schlägercharakteristik -->
|
||||
|
||||
---
|
||||
|
||||
## Testsieger im Detail
|
||||
|
||||
[product:platzhalter-defensiv-schlaeger-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Was ist der Unterschied zwischen einem Kontroll- und einem Powerschläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kontrollschläger (runde Form, weicher Kern) vergrößern den Sweetspot und ermöglichen feingefühliges Spiel. Powerschläger (Diamantform, harter Kern) bieten mehr Hebelwirkung beim Smash, verzeihen aber weniger Fehlschläge.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Für welche Spielstufe sind Kontrollschläger geeignet?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Kontrollschläger sind für Anfänger, Freizeitspieler und taktisch orientierte Spieler aller Stufen geeignet. Auch viele erfahrene Spieler bevorzugen sie, weil Konsistenz auf Dauer mehr Punkte bringt als gelegentliche Powerschläge.
|
||||
|
||||
</details>
|
||||
67
data/content/articles/padelschlaeger-fortgeschrittene-de.md
Normal file
67
data/content/articles/padelschlaeger-fortgeschrittene-de.md
Normal file
@@ -0,0 +1,67 @@
|
||||
---
|
||||
title: "Padelschläger für Fortgeschrittene: Die besten Modelle 2026"
|
||||
slug: padelschlaeger-fortgeschrittene-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-fortgeschrittene
|
||||
meta_description: "Die besten Padelschläger für fortgeschrittene und ambitionierte Spieler. High-End-Modelle mit Carbon, Kevlar und ausgereifter Schlagbalance für Spieler ab 3.0."
|
||||
---
|
||||
|
||||
# Padelschläger für Fortgeschrittene: Die besten Modelle 2026
|
||||
|
||||
<!-- TODO: Einleitung — wann ist man bereit für einen Fortgeschrittenenschläger? -->
|
||||
|
||||
Ab einem gewissen Spielniveau lohnt sich der Griff zu einem anspruchsvolleren Schläger. Wer sauber trifft, kann von einer härteren Bespannung und einer präziseren Balance profitieren. Die Schläger in dieser Liste sind kein Selbstläufer — aber in den richtigen Händen ein echter Vorteil.
|
||||
|
||||
---
|
||||
|
||||
## Top-Schläger für Fortgeschrittene im Überblick
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Carbon, Kevlar, Glasfaser: Was steckt drin?
|
||||
|
||||
<!-- TODO: Materialüberblick mit Vor- und Nachteilen -->
|
||||
|
||||
### Carbon-Rahmen
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### 3K vs. 12K Carbon
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Kevlar-Einlagen
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Testbericht: Unser Empfehlungsschläger
|
||||
|
||||
[product:platzhalter-fortgeschrittene-schlaeger-amazon]
|
||||
|
||||
<!-- TODO: Praxistest -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Ab welcher Spielstufe lohnt sich ein Fortgeschrittenenschläger?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wer regelmäßig spielt (2–3 Mal pro Woche), seit mindestens einem Jahr dabei ist und an Taktik und Technik arbeitet, kann von einem hochwertigeren Schläger profitieren. Für gelegentliche Spieler ist der Unterschied zu einem Mittelklassemodell kaum spürbar.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Müssen Fortgeschrittenenschläger teurer sein?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Nicht zwingend. Es gibt ausgezeichnete Modelle im 150–200-Euro-Segment, die professionell verarbeitete Carbon-Elemente enthalten. Alles über 300 Euro richtet sich meist an Spieler mit Wettkampfambitionen.
|
||||
|
||||
</details>
|
||||
55
data/content/articles/padelschlaeger-unter-100-de.md
Normal file
55
data/content/articles/padelschlaeger-unter-100-de.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: "Padelschläger unter 100 Euro: Die besten günstigen Modelle 2026"
|
||||
slug: padelschlaeger-unter-100-de
|
||||
language: de
|
||||
url_path: /padelschlaeger-unter-100
|
||||
meta_description: "Gute Padelschläger müssen nicht teuer sein. Die besten Modelle unter 100 Euro — mit echtem Spielgefühl, ohne Kompromisse bei der Verarbeitung."
|
||||
---
|
||||
|
||||
# Padelschläger unter 100 Euro: Die besten günstigen Modelle 2026
|
||||
|
||||
<!-- TODO: Einleitung — Gibt es wirklich gute Schläger für unter 100 Euro? -->
|
||||
|
||||
Wer sagt, dass Padel teuer sein muss? In der 50-100-Euro-Klasse gibt es Schläger, die sich von 200-Euro-Modellen im Freizeitspiel kaum unterscheiden. Der entscheidende Unterschied liegt oft im Material des Rahmens und im Kern — nicht im Spielgefühl.
|
||||
|
||||
---
|
||||
|
||||
## Die besten Schläger unter 100 Euro
|
||||
|
||||
[product-group:racket]
|
||||
|
||||
---
|
||||
|
||||
## Was bekommt man unter 100 Euro?
|
||||
|
||||
<!-- TODO: Realistische Erwartungen setzen -->
|
||||
|
||||
---
|
||||
|
||||
## Unser Preisklassen-Tipp
|
||||
|
||||
[product:platzhalter-budget-schlaeger-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Sind günstige Padelschläger schlechter verarbeitet?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Nicht zwangsläufig. Im Bereich 60–100 Euro findet man solide Fiberglas-Schläger bekannter Marken. Der Hauptunterschied zu teureren Modellen ist das Rahmenmaterial (kein Carbon) und ein schlichtes Design.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Lohnt es sich, für einen Einsteiger 100 Euro auszugeben?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Ja, wenn er weiß, dass er das Spiel ernsthafter betreiben will. Für einen ersten Test reicht auch ein 50-Euro-Schläger — aber wer nach der ersten Saison weiterspielen will, wird früh aufwerten wollen.
|
||||
|
||||
</details>
|
||||
61
data/content/articles/padelschuhe-test-de.md
Normal file
61
data/content/articles/padelschuhe-test-de.md
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
title: "Padelschuhe Test 2026: Die besten Schuhe für Sand- und Kunstgras"
|
||||
slug: padelschuhe-test-de
|
||||
language: de
|
||||
url_path: /padelschuhe-test
|
||||
meta_description: "Welche Padelschuhe sind am besten? Unser Test der beliebtesten Modelle — für Sand, Kunstgras und Kunststoffbelag mit optimaler Dämpfung und Stabilität."
|
||||
---
|
||||
|
||||
# Padelschuhe Test 2026: Die besten Schuhe für Sand- und Kunstgras
|
||||
|
||||
<!-- TODO: Einleitung — warum normale Tennisschuhe nicht reichen -->
|
||||
|
||||
Padelschuhe werden häufig unterschätzt. Auf dem Sandbelag des Padel-Courts braucht man eine völlig andere Sohle als auf Tennishartplatz oder Hallenboden. Ein falscher Schuh erhöht nicht nur das Verletzungsrisiko — er kostet auch Punkte, weil man in Kurven wegrutscht.
|
||||
|
||||
---
|
||||
|
||||
## Unsere Top-Empfehlungen
|
||||
|
||||
[product-group:shoe]
|
||||
|
||||
---
|
||||
|
||||
## Welche Sohle für welchen Belag?
|
||||
|
||||
<!-- TODO: Sohlentypen und Untergrundtabelle -->
|
||||
|
||||
| Belag | Empfohlene Sohle |
|
||||
|---|---|
|
||||
| Sand (feiner Quarzsand) | Fishbone / Fischgrät |
|
||||
| Kunstgras | Multicourt / Omnidirectional |
|
||||
| Kunststoff/Beton | Glatte Multicourt-Sohle |
|
||||
|
||||
---
|
||||
|
||||
## Testbericht: Bester Allround-Schuh
|
||||
|
||||
[product:platzhalter-padelschuh-amazon]
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
---
|
||||
|
||||
## Häufige Fragen
|
||||
|
||||
<details>
|
||||
<summary>Kann ich Tennisschuhe für Padel verwenden?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Für den gelegentlichen Einstieg ja. Auf Dauer ist es nicht empfehlenswert: Tennisschuhe bieten auf Sand zu wenig Halt, und die Abnutzung ist höher. Nach 3–4 Monaten regelmäßigen Spielens zahlen sich dedizierte Padelschuhe aus.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Wie erkenne ich verschlissene Padelschuhe?</summary>
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
Wenn die Außenfläche der Sohle glatt wird oder das Profil auf unter 2 mm abgenutzt ist, verliert der Schuh seinen Halt. Bei Padel ist das gefährlicher als bei vielen anderen Sportarten, weil häufige Richtungswechsel auf losem Sand stattfinden.
|
||||
|
||||
</details>
|
||||
@@ -59,10 +59,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
@@ -81,10 +82,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -97,10 +99,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -114,10 +117,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
@@ -136,10 +140,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
@@ -152,10 +157,11 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/app/data/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/app/data/pipeline/analytics.duckdb
|
||||
- LANDING_DIR=/app/data/pipeline/landing
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/padelnomics/analytics.duckdb:/app/data/analytics.duckdb:ro
|
||||
- /data/padelnomics:/app/data/pipeline:ro
|
||||
networks:
|
||||
- net
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ extract-census-usa = "padelnomics_extract.census_usa:main"
|
||||
extract-census-usa-income = "padelnomics_extract.census_usa_income:main"
|
||||
extract-ons-uk = "padelnomics_extract.ons_uk:main"
|
||||
extract-geonames = "padelnomics_extract.geonames:main"
|
||||
extract-gisco = "padelnomics_extract.gisco:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
|
||||
@@ -11,9 +11,12 @@ from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from .utils import end_run, open_state_db, start_run
|
||||
|
||||
load_dotenv()
|
||||
|
||||
LANDING_DIR = Path(os.environ.get("LANDING_DIR", "data/landing"))
|
||||
|
||||
HTTP_TIMEOUT_SECONDS = 30
|
||||
|
||||
@@ -7,7 +7,7 @@ A graphlib.TopologicalSorter schedules them: tasks with no unmet dependencies
|
||||
run immediately in parallel; each completion may unlock new tasks.
|
||||
|
||||
Current dependency graph:
|
||||
- All 8 non-availability extractors have no dependencies (run in parallel)
|
||||
- All 9 non-availability extractors have no dependencies (run in parallel)
|
||||
- playtomic_availability depends on playtomic_tenants (starts as soon as
|
||||
tenants finishes, even if other extractors are still running)
|
||||
"""
|
||||
@@ -26,6 +26,8 @@ from .eurostat_city_labels import EXTRACTOR_NAME as EUROSTAT_CITY_LABELS_NAME
|
||||
from .eurostat_city_labels import extract as extract_eurostat_city_labels
|
||||
from .geonames import EXTRACTOR_NAME as GEONAMES_NAME
|
||||
from .geonames import extract as extract_geonames
|
||||
from .gisco import EXTRACTOR_NAME as GISCO_NAME
|
||||
from .gisco import extract as extract_gisco
|
||||
from .ons_uk import EXTRACTOR_NAME as ONS_UK_NAME
|
||||
from .ons_uk import extract as extract_ons_uk
|
||||
from .overpass import EXTRACTOR_NAME as OVERPASS_NAME
|
||||
@@ -50,6 +52,7 @@ EXTRACTORS: dict[str, tuple] = {
|
||||
CENSUS_USA_INCOME_NAME: (extract_census_usa_income, []),
|
||||
ONS_UK_NAME: (extract_ons_uk, []),
|
||||
GEONAMES_NAME: (extract_geonames, []),
|
||||
GISCO_NAME: (extract_gisco, []),
|
||||
TENANTS_NAME: (extract_tenants, []),
|
||||
AVAILABILITY_NAME: (extract_availability, [TENANTS_NAME]),
|
||||
}
|
||||
|
||||
95
extract/padelnomics_extract/src/padelnomics_extract/gisco.py
Normal file
95
extract/padelnomics_extract/src/padelnomics_extract/gisco.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""GISCO NUTS-2 boundary GeoJSON extractor.
|
||||
|
||||
Downloads NUTS-2 boundary polygons from Eurostat GISCO. The file is stored
|
||||
uncompressed because DuckDB's ST_Read cannot read gzipped files.
|
||||
|
||||
NUTS classification revises approximately every 7 years (current: 2021).
|
||||
The partition path is fixed to the revision year, not the run date, making
|
||||
the source version explicit. Cursor tracking still uses year_month to avoid
|
||||
re-downloading on every monthly run.
|
||||
|
||||
Landing: {LANDING_DIR}/gisco/2024/01/nuts2_boundaries.geojson (~5 MB, uncompressed)
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging
|
||||
from .utils import get_last_cursor
|
||||
|
||||
logger = setup_logging("padelnomics.extract.gisco")
|
||||
|
||||
EXTRACTOR_NAME = "gisco"
|
||||
|
||||
# NUTS 2021 revision, 20M scale (1:20,000,000), WGS84 (EPSG:4326), LEVL_2 only.
|
||||
# 20M resolution gives simplified polygons that are fast for point-in-polygon
|
||||
# matching without sacrificing accuracy at the NUTS-2 boundary level.
|
||||
GISCO_URL = (
|
||||
"https://gisco-services.ec.europa.eu/distribution/v2/nuts/geojson/"
|
||||
"NUTS_RG_20M_2021_4326_LEVL_2.geojson"
|
||||
)
|
||||
|
||||
# Fixed partition: NUTS boundaries are a static reference file, not time-series data.
|
||||
# The 2024/01 partition reflects when this NUTS 2021 dataset was first ingested.
|
||||
DEST_REL = Path("gisco/2024/01/nuts2_boundaries.geojson")
|
||||
|
||||
_GISCO_TIMEOUT_SECONDS = HTTP_TIMEOUT_SECONDS * 4 # ~5 MB; generous for slow upstreams
|
||||
|
||||
|
||||
def extract(
|
||||
landing_dir: Path,
|
||||
year_month: str,
|
||||
conn: sqlite3.Connection,
|
||||
session: niquests.Session,
|
||||
) -> dict:
|
||||
"""Download NUTS-2 GeoJSON. Skips if already run this month or file exists."""
|
||||
last_cursor = get_last_cursor(conn, EXTRACTOR_NAME)
|
||||
if last_cursor == year_month:
|
||||
logger.info("already ran for %s — skipping", year_month)
|
||||
return {"files_written": 0, "files_skipped": 1, "bytes_written": 0}
|
||||
|
||||
dest = landing_dir / DEST_REL
|
||||
if dest.exists():
|
||||
logger.info("file already exists (skipping download): %s", dest)
|
||||
return {
|
||||
"files_written": 0,
|
||||
"files_skipped": 1,
|
||||
"bytes_written": 0,
|
||||
"cursor_value": year_month,
|
||||
}
|
||||
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
logger.info("GET %s", GISCO_URL)
|
||||
resp = session.get(GISCO_URL, timeout=_GISCO_TIMEOUT_SECONDS)
|
||||
resp.raise_for_status()
|
||||
|
||||
content = resp.content
|
||||
assert len(content) > 100_000, (
|
||||
f"GeoJSON too small ({len(content)} bytes) — download may have failed"
|
||||
)
|
||||
assert b'"FeatureCollection"' in content, "Response does not look like GeoJSON"
|
||||
|
||||
# Write uncompressed — ST_Read requires a plain file, not .gz
|
||||
tmp = dest.with_suffix(".geojson.tmp")
|
||||
tmp.write_bytes(content)
|
||||
tmp.rename(dest)
|
||||
|
||||
size_mb = len(content) / 1_000_000
|
||||
logger.info("written %s (%.1f MB)", dest, size_mb)
|
||||
|
||||
return {
|
||||
"files_written": 1,
|
||||
"files_skipped": 0,
|
||||
"bytes_written": len(content),
|
||||
"cursor_value": year_month,
|
||||
}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
run_extractor(EXTRACTOR_NAME, extract)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -33,7 +33,7 @@ from pathlib import Path
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging, ua_for_proxy
|
||||
from .proxy import load_fallback_proxy_urls, load_proxy_urls, make_tiered_cycler
|
||||
from .proxy import load_proxy_tiers, make_tiered_cycler
|
||||
from .utils import (
|
||||
compress_jsonl_atomic,
|
||||
flush_partial_batch,
|
||||
@@ -52,6 +52,9 @@ MAX_VENUES_PER_RUN = 20_000
|
||||
MAX_RETRIES_PER_VENUE = 2
|
||||
RECHECK_WINDOW_MINUTES = int(os.environ.get("RECHECK_WINDOW_MINUTES", "30"))
|
||||
CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10")
|
||||
# Worker count: defaults to MAX_PROXY_CONCURRENCY (200). Override via PROXY_CONCURRENCY env var.
|
||||
_PROXY_CONCURRENCY = os.environ.get("PROXY_CONCURRENCY", "").strip()
|
||||
MAX_PROXY_CONCURRENCY = 200
|
||||
|
||||
# Parallel mode submits futures in batches so the circuit breaker can stop
|
||||
# new submissions after it opens. Already-inflight futures in the current
|
||||
@@ -76,8 +79,10 @@ def _load_tenant_ids(landing_dir: Path) -> list[str]:
|
||||
if not playtomic_dir.exists():
|
||||
return []
|
||||
|
||||
# Prefer JSONL (new format), fall back to blob (old format)
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.jsonl.gz"), reverse=True)
|
||||
# Prefer daily partition (YYYY/MM/DD), fall back to older monthly/weekly partitions
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/*/tenants.jsonl.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.jsonl.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
tenant_files = sorted(playtomic_dir.glob("*/*/tenants.json.gz"), reverse=True)
|
||||
if not tenant_files:
|
||||
@@ -190,14 +195,13 @@ def _fetch_venues_parallel(
|
||||
start_max_str: str,
|
||||
worker_count: int,
|
||||
cycler: dict,
|
||||
fallback_urls: list[str],
|
||||
on_result=None,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Fetch availability for multiple venues in parallel.
|
||||
|
||||
Submits futures in batches of PARALLEL_BATCH_SIZE. After each batch
|
||||
completes, checks the circuit breaker: if it opened and there is no
|
||||
fallback configured, stops submitting further batches.
|
||||
completes, checks the circuit breaker: if all proxy tiers are exhausted,
|
||||
stops submitting further batches.
|
||||
|
||||
on_result: optional callable(result: dict) invoked inside the lock for
|
||||
each successful result — used for incremental partial-file flushing.
|
||||
@@ -209,16 +213,17 @@ def _fetch_venues_parallel(
|
||||
completed_count = 0
|
||||
lock = threading.Lock()
|
||||
|
||||
def _worker(tenant_id: str) -> dict | None:
|
||||
def _worker(tenant_id: str) -> tuple[str | None, dict | None]:
|
||||
proxy_url = cycler["next_proxy"]()
|
||||
return _fetch_venue_availability(tenant_id, start_min_str, start_max_str, proxy_url)
|
||||
result = _fetch_venue_availability(tenant_id, start_min_str, start_max_str, proxy_url)
|
||||
return proxy_url, result
|
||||
|
||||
with ThreadPoolExecutor(max_workers=worker_count) as pool:
|
||||
for batch_start in range(0, len(tenant_ids), PARALLEL_BATCH_SIZE):
|
||||
# Stop submitting new work if circuit is open with no fallback
|
||||
if cycler["is_fallback_active"]() and not fallback_urls:
|
||||
# Stop submitting new work if all proxy tiers are exhausted
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error(
|
||||
"Circuit open with no fallback — stopping after %d/%d venues",
|
||||
"All proxy tiers exhausted — stopping after %d/%d venues",
|
||||
completed_count, len(tenant_ids),
|
||||
)
|
||||
break
|
||||
@@ -227,17 +232,17 @@ def _fetch_venues_parallel(
|
||||
batch_futures = {pool.submit(_worker, tid): tid for tid in batch}
|
||||
|
||||
for future in as_completed(batch_futures):
|
||||
result = future.result()
|
||||
proxy_url, result = future.result()
|
||||
with lock:
|
||||
completed_count += 1
|
||||
if result is not None:
|
||||
venues_data.append(result)
|
||||
cycler["record_success"]()
|
||||
cycler["record_success"](proxy_url)
|
||||
if on_result is not None:
|
||||
on_result(result)
|
||||
else:
|
||||
venues_errored += 1
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"](proxy_url)
|
||||
|
||||
if completed_count % 500 == 0:
|
||||
logger.info(
|
||||
@@ -294,10 +299,9 @@ def extract(
|
||||
venues_to_process = [tid for tid in all_venues_to_process if tid not in already_done]
|
||||
|
||||
# Set up tiered proxy cycler with circuit breaker
|
||||
proxy_urls = load_proxy_urls()
|
||||
fallback_urls = load_fallback_proxy_urls()
|
||||
worker_count = len(proxy_urls) if proxy_urls else 1
|
||||
cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD)
|
||||
tiers = load_proxy_tiers()
|
||||
worker_count = min(int(_PROXY_CONCURRENCY), MAX_PROXY_CONCURRENCY) if _PROXY_CONCURRENCY else (MAX_PROXY_CONCURRENCY if tiers else 1)
|
||||
cycler = make_tiered_cycler(tiers, CIRCUIT_BREAKER_THRESHOLD)
|
||||
|
||||
start_min_str = start_min.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
start_max_str = start_max.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
@@ -325,26 +329,27 @@ def extract(
|
||||
venues_errored = 0
|
||||
|
||||
if worker_count > 1:
|
||||
logger.info("Parallel mode: %d workers, %d proxies", worker_count, len(proxy_urls))
|
||||
logger.info("Parallel mode: %d workers, %d tier(s)", worker_count, len(tiers))
|
||||
new_venues_data, venues_errored = _fetch_venues_parallel(
|
||||
venues_to_process, start_min_str, start_max_str, worker_count, cycler, fallback_urls,
|
||||
venues_to_process, start_min_str, start_max_str, worker_count, cycler,
|
||||
on_result=_on_result,
|
||||
)
|
||||
else:
|
||||
logger.info("Serial mode: 1 worker, %d venues", len(venues_to_process))
|
||||
for i, tenant_id in enumerate(venues_to_process):
|
||||
proxy_url = cycler["next_proxy"]()
|
||||
result = _fetch_venue_availability(
|
||||
tenant_id, start_min_str, start_max_str, cycler["next_proxy"](),
|
||||
tenant_id, start_min_str, start_max_str, proxy_url,
|
||||
)
|
||||
if result is not None:
|
||||
new_venues_data.append(result)
|
||||
cycler["record_success"]()
|
||||
cycler["record_success"](proxy_url)
|
||||
_on_result(result)
|
||||
else:
|
||||
venues_errored += 1
|
||||
circuit_opened = cycler["record_failure"]()
|
||||
if circuit_opened and not fallback_urls:
|
||||
logger.error("Circuit open with no fallback — writing partial results")
|
||||
cycler["record_failure"](proxy_url)
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error("All proxy tiers exhausted — writing partial results")
|
||||
break
|
||||
|
||||
if (i + 1) % 100 == 0:
|
||||
@@ -485,28 +490,28 @@ def extract_recheck(
|
||||
start_max_str = window_end.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
# Set up tiered proxy cycler with circuit breaker
|
||||
proxy_urls = load_proxy_urls()
|
||||
fallback_urls = load_fallback_proxy_urls()
|
||||
worker_count = len(proxy_urls) if proxy_urls else 1
|
||||
cycler = make_tiered_cycler(proxy_urls, fallback_urls, CIRCUIT_BREAKER_THRESHOLD)
|
||||
tiers = load_proxy_tiers()
|
||||
worker_count = min(int(_PROXY_CONCURRENCY), MAX_PROXY_CONCURRENCY) if _PROXY_CONCURRENCY else (MAX_PROXY_CONCURRENCY if tiers else 1)
|
||||
cycler = make_tiered_cycler(tiers, CIRCUIT_BREAKER_THRESHOLD)
|
||||
|
||||
if worker_count > 1 and len(venues_to_recheck) > 10:
|
||||
venues_data, venues_errored = _fetch_venues_parallel(
|
||||
venues_to_recheck, start_min_str, start_max_str, worker_count, cycler, fallback_urls,
|
||||
venues_to_recheck, start_min_str, start_max_str, worker_count, cycler,
|
||||
)
|
||||
else:
|
||||
venues_data = []
|
||||
venues_errored = 0
|
||||
for tid in venues_to_recheck:
|
||||
result = _fetch_venue_availability(tid, start_min_str, start_max_str, cycler["next_proxy"]())
|
||||
proxy_url = cycler["next_proxy"]()
|
||||
result = _fetch_venue_availability(tid, start_min_str, start_max_str, proxy_url)
|
||||
if result is not None:
|
||||
venues_data.append(result)
|
||||
cycler["record_success"]()
|
||||
cycler["record_success"](proxy_url)
|
||||
else:
|
||||
venues_errored += 1
|
||||
circuit_opened = cycler["record_failure"]()
|
||||
if circuit_opened and not fallback_urls:
|
||||
logger.error("Circuit open with no fallback — writing partial recheck results")
|
||||
cycler["record_failure"](proxy_url)
|
||||
if cycler["is_exhausted"]():
|
||||
logger.error("All proxy tiers exhausted — writing partial recheck results")
|
||||
break
|
||||
|
||||
# Write recheck file as JSONL — one venue per line with metadata injected
|
||||
|
||||
@@ -10,11 +10,11 @@ API notes (discovered 2026-02):
|
||||
- `size=100` is the maximum effective page size
|
||||
- ~14K venues globally as of Feb 2026
|
||||
|
||||
Parallel mode: when PROXY_URLS is set, fires batch_size = len(proxy_urls)
|
||||
pages concurrently. Each page gets its own fresh session + proxy. Pages beyond
|
||||
the last one return empty lists (safe — just triggers the done condition).
|
||||
Without proxies, falls back to single-threaded with THROTTLE_SECONDS between
|
||||
pages.
|
||||
Parallel mode: when proxy tiers are configured, fires BATCH_SIZE pages
|
||||
concurrently. Each page gets its own fresh session + proxy from the tiered
|
||||
cycler. On failure the cycler escalates through free → datacenter →
|
||||
residential tiers. Without proxies, falls back to single-threaded with
|
||||
THROTTLE_SECONDS between pages.
|
||||
|
||||
Rate: 1 req / 2 s per IP (see docs/data-sources-inventory.md §1.2).
|
||||
|
||||
@@ -22,15 +22,17 @@ Landing: {LANDING_DIR}/playtomic/{year}/{month}/tenants.jsonl.gz
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
|
||||
from ._shared import HTTP_TIMEOUT_SECONDS, run_extractor, setup_logging, ua_for_proxy
|
||||
from .proxy import load_proxy_urls, make_round_robin_cycler
|
||||
from .proxy import load_proxy_tiers, make_tiered_cycler
|
||||
from .utils import compress_jsonl_atomic, landing_path
|
||||
|
||||
logger = setup_logging("padelnomics.extract.playtomic_tenants")
|
||||
@@ -41,6 +43,9 @@ PLAYTOMIC_TENANTS_URL = "https://api.playtomic.io/v1/tenants"
|
||||
THROTTLE_SECONDS = 2
|
||||
PAGE_SIZE = 100
|
||||
MAX_PAGES = 500 # safety bound — ~50K venues max, well above current ~14K
|
||||
BATCH_SIZE = 20 # concurrent pages per batch (fixed, independent of proxy count)
|
||||
CIRCUIT_BREAKER_THRESHOLD = int(os.environ.get("CIRCUIT_BREAKER_THRESHOLD") or "10")
|
||||
MAX_PAGE_ATTEMPTS = 5 # max retries per individual page before giving up
|
||||
|
||||
|
||||
def _fetch_one_page(proxy_url: str | None, page: int) -> tuple[int, list[dict]]:
|
||||
@@ -60,34 +65,79 @@ def _fetch_one_page(proxy_url: str | None, page: int) -> tuple[int, list[dict]]:
|
||||
return (page, tenants)
|
||||
|
||||
|
||||
def _fetch_pages_parallel(pages: list[int], next_proxy) -> list[tuple[int, list[dict]]]:
|
||||
"""Fetch multiple pages concurrently. Returns [(page_num, tenants_list), ...]."""
|
||||
def _fetch_page_via_cycler(cycler: dict, page: int) -> tuple[int, list[dict]]:
|
||||
"""Fetch a single page, retrying across proxy tiers via the circuit breaker.
|
||||
|
||||
On each attempt, pulls the next proxy from the active tier. Records
|
||||
success/failure so the circuit breaker can escalate tiers. Raises
|
||||
RuntimeError if all tiers are exhausted or MAX_PAGE_ATTEMPTS is exceeded.
|
||||
"""
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(MAX_PAGE_ATTEMPTS):
|
||||
proxy_url = cycler["next_proxy"]()
|
||||
if proxy_url is None: # all tiers exhausted
|
||||
raise RuntimeError(f"All proxy tiers exhausted fetching page {page}")
|
||||
try:
|
||||
result = _fetch_one_page(proxy_url, page)
|
||||
cycler["record_success"](proxy_url)
|
||||
return result
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
logger.warning(
|
||||
"Page %d attempt %d/%d failed (proxy=%s): %s",
|
||||
page,
|
||||
attempt + 1,
|
||||
MAX_PAGE_ATTEMPTS,
|
||||
proxy_url,
|
||||
exc,
|
||||
)
|
||||
cycler["record_failure"](proxy_url)
|
||||
if cycler["is_exhausted"]():
|
||||
raise RuntimeError(f"All proxy tiers exhausted fetching page {page}") from exc
|
||||
raise RuntimeError(f"Page {page} failed after {MAX_PAGE_ATTEMPTS} attempts") from last_exc
|
||||
|
||||
|
||||
def _fetch_pages_parallel(pages: list[int], cycler: dict) -> list[tuple[int, list[dict]]]:
|
||||
"""Fetch multiple pages concurrently using the tiered cycler.
|
||||
|
||||
Returns [(page_num, tenants_list), ...]. Raises if any page exhausts all tiers.
|
||||
"""
|
||||
with ThreadPoolExecutor(max_workers=len(pages)) as pool:
|
||||
futures = [pool.submit(_fetch_one_page, next_proxy(), p) for p in pages]
|
||||
futures = [pool.submit(_fetch_page_via_cycler, cycler, p) for p in pages]
|
||||
return [f.result() for f in as_completed(futures)]
|
||||
|
||||
|
||||
def extract(
|
||||
landing_dir: Path,
|
||||
year_month: str,
|
||||
year_month: str, # noqa: ARG001 — unused; tenants uses daily partition instead
|
||||
conn: sqlite3.Connection,
|
||||
session: niquests.Session,
|
||||
) -> dict:
|
||||
"""Fetch all Playtomic venues via global pagination. Returns run metrics."""
|
||||
year, month = year_month.split("/")
|
||||
dest_dir = landing_path(landing_dir, "playtomic", year, month)
|
||||
"""Fetch all Playtomic venues via global pagination. Returns run metrics.
|
||||
|
||||
Partitioned by day (e.g. 2026/03/01) so each daily run produces a
|
||||
fresh file. _load_tenant_ids() in playtomic_availability globs across all
|
||||
partitions and picks the most recent one.
|
||||
"""
|
||||
today = datetime.now(UTC)
|
||||
year, month, day = today.strftime("%Y"), today.strftime("%m"), today.strftime("%d")
|
||||
dest_dir = landing_path(landing_dir, "playtomic", year, month, day)
|
||||
dest = dest_dir / "tenants.jsonl.gz"
|
||||
old_blob = dest_dir / "tenants.json.gz"
|
||||
if dest.exists() or old_blob.exists():
|
||||
logger.info("Already have tenants for %s/%s — skipping", year, month)
|
||||
if dest.exists():
|
||||
logger.info("Already have tenants for %s/%s/%s — skipping", year, month, day)
|
||||
return {"files_written": 0, "files_skipped": 1, "bytes_written": 0}
|
||||
|
||||
proxy_urls = load_proxy_urls()
|
||||
next_proxy = make_round_robin_cycler(proxy_urls) if proxy_urls else None
|
||||
batch_size = len(proxy_urls) if proxy_urls else 1
|
||||
tiers = load_proxy_tiers()
|
||||
cycler = make_tiered_cycler(tiers, CIRCUIT_BREAKER_THRESHOLD) if tiers else None
|
||||
batch_size = BATCH_SIZE if cycler else 1
|
||||
|
||||
if next_proxy:
|
||||
logger.info("Parallel mode: %d pages per batch (%d proxies)", batch_size, len(proxy_urls))
|
||||
if cycler:
|
||||
logger.info(
|
||||
"Parallel mode: %d pages/batch, %d tier(s), threshold=%d",
|
||||
batch_size,
|
||||
cycler["tier_count"](),
|
||||
CIRCUIT_BREAKER_THRESHOLD,
|
||||
)
|
||||
else:
|
||||
logger.info("Serial mode: 1 page at a time (no proxies)")
|
||||
|
||||
@@ -97,15 +147,33 @@ def extract(
|
||||
done = False
|
||||
|
||||
while not done and page < MAX_PAGES:
|
||||
if cycler and cycler["is_exhausted"]():
|
||||
logger.error(
|
||||
"All proxy tiers exhausted — stopping at page %d (%d venues collected)",
|
||||
page,
|
||||
len(all_tenants),
|
||||
)
|
||||
break
|
||||
|
||||
batch_end = min(page + batch_size, MAX_PAGES)
|
||||
pages_to_fetch = list(range(page, batch_end))
|
||||
|
||||
if next_proxy and len(pages_to_fetch) > 1:
|
||||
if cycler and len(pages_to_fetch) > 1:
|
||||
logger.info(
|
||||
"Fetching pages %d-%d in parallel (%d workers, total so far: %d)",
|
||||
page, batch_end - 1, len(pages_to_fetch), len(all_tenants),
|
||||
page,
|
||||
batch_end - 1,
|
||||
len(pages_to_fetch),
|
||||
len(all_tenants),
|
||||
)
|
||||
results = _fetch_pages_parallel(pages_to_fetch, next_proxy)
|
||||
try:
|
||||
results = _fetch_pages_parallel(pages_to_fetch, cycler)
|
||||
except RuntimeError:
|
||||
logger.error(
|
||||
"Proxy tiers exhausted mid-batch — writing partial results (%d venues)",
|
||||
len(all_tenants),
|
||||
)
|
||||
break
|
||||
else:
|
||||
# Serial: reuse the shared session, throttle between pages
|
||||
page_num = pages_to_fetch[0]
|
||||
@@ -119,7 +187,7 @@ def extract(
|
||||
)
|
||||
results = [(page_num, tenants)]
|
||||
|
||||
# Process pages in order so the done-detection on < PAGE_SIZE is deterministic
|
||||
# Process pages in order so done-detection on < PAGE_SIZE is deterministic
|
||||
for p, tenants in sorted(results):
|
||||
new_count = 0
|
||||
for tenant in tenants:
|
||||
@@ -130,7 +198,11 @@ def extract(
|
||||
new_count += 1
|
||||
|
||||
logger.info(
|
||||
"page=%d got=%d new=%d total=%d", p, len(tenants), new_count, len(all_tenants),
|
||||
"page=%d got=%d new=%d total=%d",
|
||||
p,
|
||||
len(tenants),
|
||||
new_count,
|
||||
len(all_tenants),
|
||||
)
|
||||
|
||||
# Last page — fewer than PAGE_SIZE results means we've exhausted the list
|
||||
@@ -139,7 +211,7 @@ def extract(
|
||||
break
|
||||
|
||||
page = batch_end
|
||||
if not next_proxy:
|
||||
if not cycler:
|
||||
time.sleep(THROTTLE_SECONDS)
|
||||
|
||||
# Write each tenant as a JSONL line, then compress atomically
|
||||
@@ -154,7 +226,7 @@ def extract(
|
||||
"files_written": 1,
|
||||
"files_skipped": 0,
|
||||
"bytes_written": bytes_written,
|
||||
"cursor_value": year_month,
|
||||
"cursor_value": f"{year}/{month}/{day}",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,41 +1,103 @@
|
||||
"""Optional proxy rotation for parallel HTTP fetching.
|
||||
|
||||
Proxies are configured via the PROXY_URLS environment variable (comma-separated).
|
||||
When unset, all functions return None/no-op — extractors fall back to direct requests.
|
||||
Proxies are configured via environment variables. When unset, all functions
|
||||
return None/no-op — extractors fall back to direct requests.
|
||||
|
||||
Tiered proxy with circuit breaker:
|
||||
Primary tier (PROXY_URLS) is used by default — typically cheap datacenter proxies.
|
||||
Fallback tier (PROXY_URLS_FALLBACK) activates once consecutive failures >= threshold.
|
||||
Once the circuit opens it stays open for the duration of the run (no auto-recovery).
|
||||
Three-tier escalation: free → datacenter → residential.
|
||||
Tier 1 (free): WEBSHARE_DOWNLOAD_URL — auto-fetched from Webshare API
|
||||
Tier 2 (datacenter): PROXY_URLS_DATACENTER — comma-separated paid DC proxies
|
||||
Tier 3 (residential): PROXY_URLS_RESIDENTIAL — comma-separated paid residential proxies
|
||||
|
||||
Tiered circuit breaker:
|
||||
Active tier is used until consecutive failures >= threshold, then escalates
|
||||
to the next tier. Once all tiers are exhausted, is_exhausted() returns True.
|
||||
Escalation is permanent for the duration of the run — no auto-recovery.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MAX_WEBSHARE_PROXIES = 20
|
||||
WEBSHARE_FETCH_TIMEOUT_SECONDS = 10
|
||||
WEBSHARE_MAX_RESPONSE_BYTES = 1024 * 1024 # 1MB
|
||||
|
||||
def load_proxy_urls() -> list[str]:
|
||||
"""Read PROXY_URLS env var (comma-separated). Returns [] if unset.
|
||||
|
||||
Format: http://user:pass@host:port or socks5://host:port
|
||||
def fetch_webshare_proxies(download_url: str, max_proxies: int = MAX_WEBSHARE_PROXIES) -> list[str]:
|
||||
"""Fetch proxy list from the Webshare download API. Returns [] on any error.
|
||||
|
||||
Expected line format: ip:port:username:password
|
||||
Converts to: http://username:password@ip:port
|
||||
|
||||
Bounded: reads at most WEBSHARE_MAX_RESPONSE_BYTES, returns at most max_proxies.
|
||||
"""
|
||||
raw = os.environ.get("PROXY_URLS", "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
assert max_proxies > 0, f"max_proxies must be positive, got {max_proxies}"
|
||||
assert download_url, "download_url must not be empty"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
download_url,
|
||||
headers={"User-Agent": "padelnomics-extract/1.0"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=WEBSHARE_FETCH_TIMEOUT_SECONDS) as resp:
|
||||
raw = resp.read(WEBSHARE_MAX_RESPONSE_BYTES).decode("utf-8")
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch Webshare proxies: %s", e)
|
||||
return []
|
||||
|
||||
urls = []
|
||||
for line in raw.splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split(":")
|
||||
if len(parts) != 4:
|
||||
logger.debug("Skipping malformed proxy line: %r", line)
|
||||
continue
|
||||
ip, port, username, password = parts
|
||||
urls.append(f"http://{username}:{password}@{ip}:{port}")
|
||||
if len(urls) >= max_proxies:
|
||||
break
|
||||
|
||||
logger.info("Fetched %d proxies from Webshare", len(urls))
|
||||
return urls
|
||||
|
||||
|
||||
def load_fallback_proxy_urls() -> list[str]:
|
||||
"""Read PROXY_URLS_FALLBACK env var (comma-separated). Returns [] if unset.
|
||||
def load_proxy_tiers() -> list[list[str]]:
|
||||
"""Assemble proxy tiers in escalation order: free → datacenter → residential.
|
||||
|
||||
Used as the residential/reliable fallback tier when the primary tier fails.
|
||||
Format: http://user:pass@host:port or socks5://host:port
|
||||
Tier 1 (free): fetched from WEBSHARE_DOWNLOAD_URL if set.
|
||||
Tier 2 (datacenter): PROXY_URLS_DATACENTER (comma-separated).
|
||||
Tier 3 (residential): PROXY_URLS_RESIDENTIAL (comma-separated).
|
||||
|
||||
Empty tiers are omitted. Returns [] if no proxies configured anywhere.
|
||||
"""
|
||||
raw = os.environ.get("PROXY_URLS_FALLBACK", "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
return urls
|
||||
tiers: list[list[str]] = []
|
||||
|
||||
webshare_url = os.environ.get("WEBSHARE_DOWNLOAD_URL", "").strip()
|
||||
if webshare_url:
|
||||
free_proxies = fetch_webshare_proxies(webshare_url)
|
||||
if free_proxies:
|
||||
tiers.append(free_proxies)
|
||||
|
||||
for var in ("PROXY_URLS_DATACENTER", "PROXY_URLS_RESIDENTIAL"):
|
||||
raw = os.environ.get(var, "")
|
||||
urls = [u.strip() for u in raw.split(",") if u.strip()]
|
||||
valid = []
|
||||
for url in urls:
|
||||
if not url.startswith(("http://", "https://")):
|
||||
logger.warning("%s contains URL without scheme, skipping: %s", var, url[:60])
|
||||
continue
|
||||
valid.append(url)
|
||||
if valid:
|
||||
tiers.append(valid)
|
||||
|
||||
return tiers
|
||||
|
||||
|
||||
def make_round_robin_cycler(proxy_urls: list[str]):
|
||||
@@ -78,83 +140,181 @@ def make_sticky_selector(proxy_urls: list[str]):
|
||||
return select_proxy
|
||||
|
||||
|
||||
def make_tiered_cycler(
|
||||
primary_urls: list[str],
|
||||
fallback_urls: list[str],
|
||||
threshold: int,
|
||||
) -> dict:
|
||||
"""Thread-safe tiered proxy cycler with circuit breaker.
|
||||
def make_tiered_cycler(tiers: list[list[str]], threshold: int, proxy_failure_limit: int = 3) -> dict:
|
||||
"""Thread-safe N-tier proxy cycler with circuit breaker and per-proxy dead tracking.
|
||||
|
||||
Uses primary_urls until consecutive failures >= threshold, then switches
|
||||
permanently to fallback_urls for the rest of the run. No auto-recovery —
|
||||
once the circuit opens it stays open to avoid flapping.
|
||||
Uses tiers[0] until consecutive failures >= threshold, then escalates
|
||||
to tiers[1], then tiers[2], etc. Once all tiers are exhausted,
|
||||
is_exhausted() returns True and next_proxy() returns None.
|
||||
|
||||
Failure counter resets on each escalation — the new tier gets a fresh start.
|
||||
Once exhausted, further record_failure() calls are no-ops.
|
||||
|
||||
Per-proxy dead tracking (when proxy_failure_limit > 0):
|
||||
Individual proxies are marked dead after proxy_failure_limit failures and
|
||||
skipped by next_proxy(). If all proxies in the active tier are dead,
|
||||
next_proxy() auto-escalates to the next tier. Both mechanisms coexist:
|
||||
per-proxy dead tracking removes broken individuals; tier-level threshold
|
||||
catches systemic failure even before any single proxy hits the limit.
|
||||
|
||||
Stale-failure protection:
|
||||
With parallel workers, some threads may fetch a proxy just before the tier
|
||||
escalates and report failure after. record_failure(proxy_url) checks which
|
||||
tier the proxy belongs to and ignores the tier-level circuit breaker if the
|
||||
proxy is from an already-escalated tier. This prevents in-flight failures
|
||||
from a dead tier instantly exhausting the freshly-escalated one.
|
||||
|
||||
Returns a dict of callables:
|
||||
next_proxy() -> str | None — returns URL from the active tier
|
||||
record_success() — resets consecutive failure counter
|
||||
record_failure() -> bool — increments counter; True if circuit just opened
|
||||
is_fallback_active() -> bool — whether fallback tier is currently active
|
||||
next_proxy() -> str | None — URL from active tier (skips dead), or None
|
||||
record_success(proxy_url=None) -> None — resets consecutive failure counter
|
||||
record_failure(proxy_url=None) -> bool — True if just escalated to next tier
|
||||
is_exhausted() -> bool — True if all tiers exhausted
|
||||
active_tier_index() -> int — 0-based index of current tier
|
||||
tier_count() -> int — total number of tiers
|
||||
dead_proxy_count() -> int — number of individual proxies marked dead
|
||||
|
||||
If primary_urls is empty: always returns from fallback_urls (no circuit breaker needed).
|
||||
If both are empty: next_proxy() always returns None.
|
||||
Edge cases:
|
||||
Empty tiers list: next_proxy() always returns None, is_exhausted() True.
|
||||
Single tier: behaves like the primary-only case, is_exhausted() after threshold.
|
||||
"""
|
||||
assert threshold > 0, f"threshold must be positive, got {threshold}"
|
||||
assert isinstance(tiers, list), f"tiers must be a list, got {type(tiers)}"
|
||||
assert proxy_failure_limit >= 0, f"proxy_failure_limit must be >= 0, got {proxy_failure_limit}"
|
||||
|
||||
lock = threading.Lock()
|
||||
state = {
|
||||
"consecutive_failures": 0,
|
||||
"fallback_active": False,
|
||||
# Reverse map: proxy URL -> tier index. Used in record_failure to ignore
|
||||
# "in-flight" failures from workers that fetched a proxy before escalation —
|
||||
# those failures belong to the old tier and must not count against the new one.
|
||||
proxy_to_tier_idx: dict[str, int] = {
|
||||
url: tier_idx
|
||||
for tier_idx, tier in enumerate(tiers)
|
||||
for url in tier
|
||||
}
|
||||
|
||||
primary_cycle = itertools.cycle(primary_urls) if primary_urls else None
|
||||
fallback_cycle = itertools.cycle(fallback_urls) if fallback_urls else None
|
||||
|
||||
# No primary proxies — skip circuit breaker, use fallback directly
|
||||
if not primary_urls:
|
||||
state["fallback_active"] = True
|
||||
lock = threading.Lock()
|
||||
cycles = [itertools.cycle(t) for t in tiers]
|
||||
state = {
|
||||
"active_tier": 0,
|
||||
"consecutive_failures": 0,
|
||||
"proxy_failure_counts": {}, # proxy_url -> int
|
||||
"dead_proxies": set(), # proxy URLs marked dead
|
||||
}
|
||||
|
||||
def next_proxy() -> str | None:
|
||||
with lock:
|
||||
if state["fallback_active"]:
|
||||
return next(fallback_cycle) if fallback_cycle else None
|
||||
return next(primary_cycle) if primary_cycle else None
|
||||
# Try each remaining tier (bounded: at most len(tiers) escalations)
|
||||
for _ in range(len(tiers) + 1):
|
||||
idx = state["active_tier"]
|
||||
if idx >= len(cycles):
|
||||
return None
|
||||
|
||||
def record_success() -> None:
|
||||
with lock:
|
||||
state["consecutive_failures"] = 0
|
||||
tier_proxies = tiers[idx]
|
||||
tier_len = len(tier_proxies)
|
||||
|
||||
def record_failure() -> bool:
|
||||
"""Increment failure counter. Returns True if circuit just opened."""
|
||||
with lock:
|
||||
if state["fallback_active"]:
|
||||
# Already on fallback — don't trip the circuit again
|
||||
return False
|
||||
state["consecutive_failures"] += 1
|
||||
if state["consecutive_failures"] >= threshold:
|
||||
state["fallback_active"] = True
|
||||
if fallback_urls:
|
||||
# Find a live proxy in this tier (bounded: try each proxy at most once)
|
||||
for _ in range(tier_len):
|
||||
candidate = next(cycles[idx])
|
||||
if candidate not in state["dead_proxies"]:
|
||||
return candidate
|
||||
|
||||
# All proxies in this tier are dead — auto-escalate
|
||||
state["consecutive_failures"] = 0
|
||||
state["active_tier"] += 1
|
||||
new_idx = state["active_tier"]
|
||||
if new_idx < len(tiers):
|
||||
logger.warning(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"switching to fallback residential proxies",
|
||||
state["consecutive_failures"],
|
||||
"All proxies in tier %d are dead — auto-escalating to tier %d/%d",
|
||||
idx + 1,
|
||||
new_idx + 1,
|
||||
len(tiers),
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"no fallback configured, aborting run",
|
||||
state["consecutive_failures"],
|
||||
"All proxies in all %d tier(s) are dead — no more fallbacks",
|
||||
len(tiers),
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_fallback_active() -> bool:
|
||||
return None # safety fallback
|
||||
|
||||
def record_success(proxy_url: str | None = None) -> None:
|
||||
with lock:
|
||||
return state["fallback_active"]
|
||||
state["consecutive_failures"] = 0
|
||||
if proxy_url is not None:
|
||||
state["proxy_failure_counts"][proxy_url] = 0
|
||||
|
||||
def record_failure(proxy_url: str | None = None) -> bool:
|
||||
"""Increment failure counter. Returns True if just escalated to next tier."""
|
||||
with lock:
|
||||
# Per-proxy dead tracking (additional to tier-level circuit breaker)
|
||||
if proxy_url is not None and proxy_failure_limit > 0:
|
||||
count = state["proxy_failure_counts"].get(proxy_url, 0) + 1
|
||||
state["proxy_failure_counts"][proxy_url] = count
|
||||
if count >= proxy_failure_limit and proxy_url not in state["dead_proxies"]:
|
||||
state["dead_proxies"].add(proxy_url)
|
||||
logger.warning(
|
||||
"Proxy %s marked dead after %d consecutive failures",
|
||||
proxy_url,
|
||||
count,
|
||||
)
|
||||
|
||||
# Tier-level circuit breaker (existing behavior)
|
||||
idx = state["active_tier"]
|
||||
if idx >= len(tiers):
|
||||
# Already exhausted — no-op
|
||||
return False
|
||||
|
||||
# Ignore failures from proxies that belong to an already-escalated tier.
|
||||
# With parallel workers, some threads fetch a proxy just before escalation
|
||||
# and report back after — those stale failures must not penalise the new tier.
|
||||
if proxy_url is not None:
|
||||
proxy_tier = proxy_to_tier_idx.get(proxy_url)
|
||||
if proxy_tier is not None and proxy_tier < idx:
|
||||
return False
|
||||
|
||||
state["consecutive_failures"] += 1
|
||||
if state["consecutive_failures"] < threshold:
|
||||
return False
|
||||
# Threshold reached — escalate
|
||||
state["consecutive_failures"] = 0
|
||||
state["active_tier"] += 1
|
||||
new_idx = state["active_tier"]
|
||||
if new_idx < len(tiers):
|
||||
logger.warning(
|
||||
"Circuit open after %d consecutive failures — "
|
||||
"escalating to proxy tier %d/%d",
|
||||
threshold,
|
||||
new_idx + 1,
|
||||
len(tiers),
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"All %d proxy tier(s) exhausted after %d consecutive failures — "
|
||||
"no more fallbacks",
|
||||
len(tiers),
|
||||
threshold,
|
||||
)
|
||||
return True
|
||||
|
||||
def is_exhausted() -> bool:
|
||||
with lock:
|
||||
return state["active_tier"] >= len(tiers)
|
||||
|
||||
def active_tier_index() -> int:
|
||||
with lock:
|
||||
return state["active_tier"]
|
||||
|
||||
def tier_count() -> int:
|
||||
return len(tiers)
|
||||
|
||||
def dead_proxy_count() -> int:
|
||||
with lock:
|
||||
return len(state["dead_proxies"])
|
||||
|
||||
return {
|
||||
"next_proxy": next_proxy,
|
||||
"record_success": record_success,
|
||||
"record_failure": record_failure,
|
||||
"is_fallback_active": is_fallback_active,
|
||||
"is_exhausted": is_exhausted,
|
||||
"active_tier_index": active_tier_index,
|
||||
"tier_count": tier_count,
|
||||
"dead_proxy_count": dead_proxy_count,
|
||||
}
|
||||
|
||||
|
||||
@@ -21,9 +21,13 @@ schedule = "monthly"
|
||||
module = "padelnomics_extract.eurostat"
|
||||
schedule = "monthly"
|
||||
|
||||
[geonames]
|
||||
module = "padelnomics_extract.geonames"
|
||||
schedule = "monthly"
|
||||
|
||||
[playtomic_tenants]
|
||||
module = "padelnomics_extract.playtomic_tenants"
|
||||
schedule = "weekly"
|
||||
schedule = "daily"
|
||||
|
||||
[playtomic_availability]
|
||||
module = "padelnomics_extract.playtomic_availability"
|
||||
@@ -35,3 +39,23 @@ module = "padelnomics_extract.playtomic_availability"
|
||||
entry = "main_recheck"
|
||||
schedule = "0,30 6-23 * * *"
|
||||
depends_on = ["playtomic_availability"]
|
||||
|
||||
[census_usa]
|
||||
module = "padelnomics_extract.census_usa"
|
||||
schedule = "monthly"
|
||||
|
||||
[census_usa_income]
|
||||
module = "padelnomics_extract.census_usa_income"
|
||||
schedule = "monthly"
|
||||
|
||||
[eurostat_city_labels]
|
||||
module = "padelnomics_extract.eurostat_city_labels"
|
||||
schedule = "monthly"
|
||||
|
||||
[ons_uk]
|
||||
module = "padelnomics_extract.ons_uk"
|
||||
schedule = "monthly"
|
||||
|
||||
[gisco]
|
||||
module = "padelnomics_extract.gisco"
|
||||
schedule = "monthly"
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
"""Download NUTS-2 boundary GeoJSON from Eurostat GISCO.
|
||||
|
||||
One-time (or on NUTS revision) download of NUTS-2 boundary polygons used for
|
||||
spatial income resolution in dim_locations. Stored uncompressed because DuckDB's
|
||||
ST_Read function cannot read gzipped files.
|
||||
|
||||
NUTS classification changes approximately every 7 years. Current revision: 2021.
|
||||
|
||||
Output: {LANDING_DIR}/gisco/2024/01/nuts2_boundaries.geojson (~5MB, uncompressed)
|
||||
|
||||
Usage:
|
||||
uv run python scripts/download_gisco_nuts.py [--landing-dir data/landing]
|
||||
|
||||
Idempotent: skips download if the file already exists.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
|
||||
# NUTS 2021 revision, 20M scale (1:20,000,000), WGS84 (EPSG:4326), LEVL_2 only.
|
||||
# 20M resolution gives simplified polygons that are fast for point-in-polygon
|
||||
# matching without sacrificing accuracy at the NUTS-2 boundary level.
|
||||
GISCO_URL = (
|
||||
"https://gisco-services.ec.europa.eu/distribution/v2/nuts/geojson/"
|
||||
"NUTS_RG_20M_2021_4326_LEVL_2.geojson"
|
||||
)
|
||||
|
||||
# Fixed partition: NUTS boundaries are a static reference file, not time-series data.
|
||||
# Use the NUTS revision year as the partition to make the source version explicit.
|
||||
DEST_REL_PATH = "gisco/2024/01/nuts2_boundaries.geojson"
|
||||
|
||||
HTTP_TIMEOUT_SECONDS = 120
|
||||
|
||||
|
||||
def download_nuts_boundaries(landing_dir: Path) -> None:
|
||||
dest = landing_dir / DEST_REL_PATH
|
||||
if dest.exists():
|
||||
print(f"Already exists (skipping): {dest}")
|
||||
return
|
||||
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
print(f"Downloading NUTS-2 boundaries from GISCO...")
|
||||
print(f" URL: {GISCO_URL}")
|
||||
|
||||
with niquests.Session() as session:
|
||||
resp = session.get(GISCO_URL, timeout=HTTP_TIMEOUT_SECONDS)
|
||||
resp.raise_for_status()
|
||||
|
||||
content = resp.content
|
||||
assert len(content) > 100_000, (
|
||||
f"GeoJSON too small ({len(content)} bytes) — download may have failed"
|
||||
)
|
||||
assert b'"FeatureCollection"' in content, "Response does not look like GeoJSON"
|
||||
|
||||
# Write uncompressed — ST_Read requires a plain file
|
||||
tmp = dest.with_suffix(".geojson.tmp")
|
||||
tmp.write_bytes(content)
|
||||
tmp.rename(dest)
|
||||
|
||||
size_mb = len(content) / 1_000_000
|
||||
print(f" Written: {dest} ({size_mb:.1f} MB)")
|
||||
print("Done. Run SQLMesh plan to rebuild stg_nuts2_boundaries.")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--landing-dir", default="data/landing", type=Path)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.landing_dir.is_dir():
|
||||
print(f"Error: landing dir does not exist: {args.landing_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
download_nuts_boundaries(args.landing_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -17,14 +17,12 @@ Usage:
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import tomllib
|
||||
import urllib.request
|
||||
from collections import defaultdict
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import UTC, datetime
|
||||
@@ -192,9 +190,9 @@ def run_workflow(conn, workflow: dict) -> None:
|
||||
entry_fn = getattr(module, entry_name)
|
||||
entry_fn()
|
||||
logger.info("Workflow %s completed successfully", workflow["name"])
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Workflow %s failed", workflow["name"])
|
||||
send_alert(f"Workflow '{workflow['name']}' failed")
|
||||
send_alert(f"[extract] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
raise
|
||||
|
||||
|
||||
@@ -233,8 +231,8 @@ def run_due_workflows(conn, workflows: list[dict]) -> bool:
|
||||
# Transform + Export + Deploy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bool:
|
||||
"""Run a shell command. Returns True on success."""
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> tuple[bool, str]:
|
||||
"""Run a shell command. Returns (success, error_snippet)."""
|
||||
logger.info("Shell: %s", cmd)
|
||||
result = subprocess.run(
|
||||
cmd, shell=True, capture_output=True, text=True, timeout=timeout_seconds
|
||||
@@ -242,47 +240,88 @@ def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bo
|
||||
if result.returncode != 0:
|
||||
logger.error("Shell failed (rc=%d): %s\nstdout: %s\nstderr: %s",
|
||||
result.returncode, cmd, result.stdout[-500:], result.stderr[-500:])
|
||||
return False
|
||||
return True
|
||||
raw = (result.stderr or result.stdout).strip()
|
||||
snippet = next((ln.strip() for ln in raw.splitlines() if ln.strip()), raw)[:120]
|
||||
return False, snippet
|
||||
return True, ""
|
||||
|
||||
|
||||
def run_transform() -> None:
|
||||
"""Run SQLMesh — it evaluates model staleness internally."""
|
||||
logger.info("Running SQLMesh transform")
|
||||
ok = run_shell(
|
||||
ok, err = run_shell(
|
||||
"uv run sqlmesh -p transform/sqlmesh_padelnomics plan prod --auto-apply",
|
||||
)
|
||||
if not ok:
|
||||
send_alert("SQLMesh transform failed")
|
||||
send_alert(f"[transform] {err}")
|
||||
|
||||
|
||||
def run_export() -> None:
|
||||
"""Export serving tables to analytics.duckdb."""
|
||||
logger.info("Exporting serving tables")
|
||||
ok = run_shell(
|
||||
ok, err = run_shell(
|
||||
f"DUCKDB_PATH={DUCKDB_PATH} SERVING_DUCKDB_PATH={SERVING_DUCKDB_PATH} "
|
||||
f"uv run python src/padelnomics/export_serving.py"
|
||||
)
|
||||
if not ok:
|
||||
send_alert("Serving export failed")
|
||||
send_alert(f"[export] {err}")
|
||||
|
||||
|
||||
_last_seen_head: str | None = None
|
||||
|
||||
|
||||
def web_code_changed() -> bool:
|
||||
"""Check if web app code changed since last deploy (after git pull)."""
|
||||
"""True on the first tick after a commit that changed web app code or secrets.
|
||||
|
||||
Compares the current HEAD to the HEAD from the previous tick. On first call
|
||||
after process start (e.g. after os.execv reloads new code), falls back to
|
||||
HEAD~1 so the just-deployed commit is evaluated exactly once.
|
||||
|
||||
Records HEAD before returning so the same commit never triggers twice.
|
||||
"""
|
||||
global _last_seen_head
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-only", "HEAD~1", "HEAD", "--", "web/", "Dockerfile"],
|
||||
["git", "rev-parse", "HEAD"], capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return False
|
||||
current_head = result.stdout.strip()
|
||||
|
||||
if _last_seen_head is None:
|
||||
# Fresh process — use HEAD~1 as base (evaluates the newly deployed tag).
|
||||
base_result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD~1"], capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
base = base_result.stdout.strip() if base_result.returncode == 0 else current_head
|
||||
else:
|
||||
base = _last_seen_head
|
||||
|
||||
_last_seen_head = current_head # advance now — won't fire again for this HEAD
|
||||
|
||||
if base == current_head:
|
||||
return False
|
||||
|
||||
diff = subprocess.run(
|
||||
["git", "diff", "--name-only", base, current_head, "--",
|
||||
"web/", "Dockerfile", ".env.prod.sops"],
|
||||
capture_output=True, text=True, timeout=30,
|
||||
)
|
||||
return bool(result.stdout.strip())
|
||||
return bool(diff.stdout.strip())
|
||||
|
||||
|
||||
def current_deployed_tag() -> str | None:
|
||||
"""Return the tag currently checked out, or None if not on a tag."""
|
||||
"""Return the highest-version tag pointing at HEAD, or None.
|
||||
|
||||
Uses the same sort order as latest_remote_tag() so that when multiple
|
||||
tags point to the same commit (e.g. a date-based tag and a CI integer
|
||||
tag), we always compare apples-to-apples.
|
||||
"""
|
||||
result = subprocess.run(
|
||||
["git", "describe", "--tags", "--exact-match", "HEAD"],
|
||||
["git", "tag", "--list", "--sort=-version:refname", "--points-at", "HEAD", "v*"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return result.stdout.strip() or None
|
||||
tags = result.stdout.strip().splitlines()
|
||||
return tags[0] if tags else None
|
||||
|
||||
|
||||
def latest_remote_tag() -> str | None:
|
||||
@@ -317,7 +356,12 @@ def git_pull_and_sync() -> None:
|
||||
|
||||
logger.info("New tag %s available (current: %s) — deploying", latest, current)
|
||||
run_shell(f"git checkout --detach {latest}")
|
||||
run_shell("sops --input-type dotenv --output-type dotenv -d .env.prod.sops > .env")
|
||||
run_shell("uv sync --all-packages")
|
||||
# Re-exec so the new code is loaded. os.execv replaces this process in-place;
|
||||
# systemd sees it as the same PID and does not restart the unit.
|
||||
logger.info("Deploy complete — re-execing to load new code")
|
||||
os.execv(sys.executable, sys.argv)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -365,11 +409,11 @@ def tick() -> None:
|
||||
# Deploy web app if code changed
|
||||
if os.getenv("SUPERVISOR_GIT_PULL") and web_code_changed():
|
||||
logger.info("Web code changed — deploying")
|
||||
ok = run_shell("./deploy.sh")
|
||||
ok, err = run_shell("./deploy.sh")
|
||||
if ok:
|
||||
send_alert("Deploy succeeded")
|
||||
send_alert("[deploy] ok")
|
||||
else:
|
||||
send_alert("Deploy FAILED — check journalctl -u padelnomics-supervisor")
|
||||
send_alert(f"[deploy] failed: {err}")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@@ -386,9 +430,9 @@ def supervisor_loop() -> None:
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Supervisor stopped (KeyboardInterrupt)")
|
||||
break
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Supervisor tick failed — backing off %ds", BACKOFF_SECONDS)
|
||||
send_alert("Supervisor tick failed")
|
||||
send_alert(f"[supervisor] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
time.sleep(BACKOFF_SECONDS)
|
||||
else:
|
||||
time.sleep(TICK_INTERVAL_SECONDS)
|
||||
|
||||
@@ -19,8 +19,10 @@
|
||||
-- 4. Country-level income (global fallback from stg_income / ilc_di03)
|
||||
--
|
||||
-- Distance calculations use ST_Distance_Sphere (DuckDB spatial extension).
|
||||
-- A bounding-box pre-filter (~0.5°, ≈55km) reduces the cross-join before the
|
||||
-- exact sphere distance is computed.
|
||||
-- Spatial joins use BETWEEN predicates (not ABS()) to enable DuckDB's IEJoin
|
||||
-- (interval join) optimization: O((N+M) log M) vs O(N×M) nested-loop.
|
||||
-- Country pre-filters restrict the left side to ~20K rows for padel/tennis CTEs
|
||||
-- (~8 countries each), down from ~140K global locations.
|
||||
|
||||
MODEL (
|
||||
name foundation.dim_locations,
|
||||
@@ -147,6 +149,8 @@ padel_courts AS (
|
||||
WHERE lat IS NOT NULL AND lon IS NOT NULL
|
||||
),
|
||||
-- Nearest padel court distance per location (bbox pre-filter → exact sphere distance)
|
||||
-- BETWEEN enables DuckDB IEJoin (O((N+M) log M)) vs ABS() nested-loop (O(N×M)).
|
||||
-- Country pre-filter reduces left side from ~140K to ~20K rows (padel is ~8 countries).
|
||||
nearest_padel AS (
|
||||
SELECT
|
||||
l.geoname_id,
|
||||
@@ -158,9 +162,12 @@ nearest_padel AS (
|
||||
) AS nearest_padel_court_km
|
||||
FROM locations l
|
||||
JOIN padel_courts p
|
||||
-- ~55km bounding box pre-filter to limit cross-join before sphere calc
|
||||
ON ABS(l.lat - p.lat) < 0.5
|
||||
AND ABS(l.lon - p.lon) < 0.5
|
||||
-- ~55km bounding box pre-filter; BETWEEN triggers IEJoin optimization
|
||||
ON l.lat BETWEEN p.lat - 0.5 AND p.lat + 0.5
|
||||
AND l.lon BETWEEN p.lon - 0.5 AND p.lon + 0.5
|
||||
WHERE l.country_code IN (
|
||||
SELECT DISTINCT country_code FROM padel_courts WHERE country_code IS NOT NULL
|
||||
)
|
||||
GROUP BY l.geoname_id
|
||||
),
|
||||
-- Padel venues within 5km of each location (counts as "local padel supply")
|
||||
@@ -170,24 +177,35 @@ padel_local AS (
|
||||
COUNT(*) AS padel_venue_count
|
||||
FROM locations l
|
||||
JOIN padel_courts p
|
||||
ON ABS(l.lat - p.lat) < 0.05 -- ~5km bbox pre-filter
|
||||
AND ABS(l.lon - p.lon) < 0.05
|
||||
WHERE ST_Distance_Sphere(
|
||||
-- ~5km bbox pre-filter; BETWEEN triggers IEJoin optimization
|
||||
ON l.lat BETWEEN p.lat - 0.05 AND p.lat + 0.05
|
||||
AND l.lon BETWEEN p.lon - 0.05 AND p.lon + 0.05
|
||||
WHERE l.country_code IN (
|
||||
SELECT DISTINCT country_code FROM padel_courts WHERE country_code IS NOT NULL
|
||||
)
|
||||
AND ST_Distance_Sphere(
|
||||
ST_Point(l.lon, l.lat),
|
||||
ST_Point(p.lon, p.lat)
|
||||
) / 1000.0 <= 5.0
|
||||
GROUP BY l.geoname_id
|
||||
),
|
||||
-- Tennis courts within 25km of each location (sports culture proxy)
|
||||
-- Country pre-filter reduces left side from ~140K to ~20K rows (tennis courts are European only).
|
||||
tennis_nearby AS (
|
||||
SELECT
|
||||
l.geoname_id,
|
||||
COUNT(*) AS tennis_courts_within_25km
|
||||
FROM locations l
|
||||
JOIN staging.stg_tennis_courts t
|
||||
ON ABS(l.lat - t.lat) < 0.23 -- ~25km bbox pre-filter
|
||||
AND ABS(l.lon - t.lon) < 0.23
|
||||
WHERE ST_Distance_Sphere(
|
||||
-- ~25km bbox pre-filter; BETWEEN triggers IEJoin optimization
|
||||
ON l.lat BETWEEN t.lat - 0.23 AND t.lat + 0.23
|
||||
AND l.lon BETWEEN t.lon - 0.23 AND t.lon + 0.23
|
||||
WHERE l.country_code IN (
|
||||
SELECT DISTINCT country_code
|
||||
FROM staging.stg_tennis_courts
|
||||
WHERE country_code IS NOT NULL
|
||||
)
|
||||
AND ST_Distance_Sphere(
|
||||
ST_Point(l.lon, l.lat),
|
||||
ST_Point(t.lon, t.lat)
|
||||
) / 1000.0 <= 25.0
|
||||
|
||||
@@ -2,22 +2,14 @@
|
||||
-- One row per available 60-minute booking slot per court per venue per day.
|
||||
-- "Available" = the slot was NOT booked at capture time. Missing slots = booked.
|
||||
--
|
||||
-- Reads BOTH morning snapshots and recheck files:
|
||||
-- Morning (new): availability_{date}.jsonl.gz → snapshot_type = 'morning'
|
||||
-- Morning (old): availability_{date}.json.gz → snapshot_type = 'morning'
|
||||
-- Recheck (new): availability_{date}_recheck_{HH}.jsonl.gz → snapshot_type = 'recheck'
|
||||
-- Recheck (old): availability_{date}_recheck_{HH}.json.gz → snapshot_type = 'recheck'
|
||||
-- Reads morning snapshots and recheck files (JSONL format):
|
||||
-- Morning: availability_{date}.jsonl.gz → snapshot_type = 'morning'
|
||||
-- Recheck: availability_{date}_recheck_{HH}.jsonl.gz → snapshot_type = 'recheck'
|
||||
--
|
||||
-- Only 60-min duration slots are kept (canonical hourly rate + occupancy unit).
|
||||
-- Price parsed from strings like "14.56 EUR" or "48 GBP".
|
||||
--
|
||||
-- Supports two morning landing formats (UNION ALL during migration):
|
||||
-- New: availability_{date}.jsonl.gz — one venue per line, columns: tenant_id, slots, date, captured_at_utc
|
||||
-- Old: availability_{date}.json.gz — {"date":..., "venues": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Requires: at least one availability file in the landing zone.
|
||||
-- A seed file (data/landing/playtomic/1970/01/availability_1970-01-01.json.gz)
|
||||
-- with empty venues[] ensures this model runs before real data arrives.
|
||||
-- Source: data/landing/playtomic/{year}/{month}/availability_*.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_availability,
|
||||
@@ -27,7 +19,6 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one venue per JSONL line — no outer UNNEST needed
|
||||
morning_jsonl AS (
|
||||
SELECT
|
||||
date AS snapshot_date,
|
||||
@@ -50,35 +41,6 @@ morning_jsonl AS (
|
||||
WHERE filename NOT LIKE '%_recheck_%'
|
||||
AND tenant_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"date":..., "venues": [...]} blob — kept for transition
|
||||
morning_blob AS (
|
||||
SELECT
|
||||
af.date AS snapshot_date,
|
||||
af.captured_at_utc,
|
||||
'morning' AS snapshot_type,
|
||||
NULL::INTEGER AS recheck_hour,
|
||||
venue_json ->> 'tenant_id' AS tenant_id,
|
||||
venue_json -> 'slots' AS slots_json
|
||||
FROM (
|
||||
SELECT date, captured_at_utc, venues
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/availability_*.json.gz',
|
||||
format = 'auto',
|
||||
columns = {
|
||||
date: 'VARCHAR',
|
||||
captured_at_utc: 'VARCHAR',
|
||||
venues: 'JSON[]'
|
||||
},
|
||||
filename = true,
|
||||
maximum_object_size = 134217728 -- 128 MB; daily files grow with venue count
|
||||
)
|
||||
WHERE filename NOT LIKE '%_recheck_%'
|
||||
AND venues IS NOT NULL
|
||||
AND json_array_length(venues) > 0
|
||||
) af,
|
||||
LATERAL UNNEST(af.venues) AS t(venue_json)
|
||||
),
|
||||
-- Recheck snapshots (new JSONL format — one venue per line)
|
||||
recheck_jsonl AS (
|
||||
SELECT
|
||||
date AS snapshot_date,
|
||||
@@ -101,43 +63,10 @@ recheck_jsonl AS (
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
),
|
||||
-- Recheck snapshots (old blob format, kept for transition)
|
||||
recheck_blob AS (
|
||||
SELECT
|
||||
rf.date AS snapshot_date,
|
||||
rf.captured_at_utc,
|
||||
'recheck' AS snapshot_type,
|
||||
TRY_CAST(
|
||||
regexp_extract(rf.filename, '_recheck_(\d+)', 1) AS INTEGER
|
||||
) AS recheck_hour,
|
||||
venue_json ->> 'tenant_id' AS tenant_id,
|
||||
venue_json -> 'slots' AS slots_json
|
||||
FROM (
|
||||
SELECT date, captured_at_utc, venues, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/availability_*_recheck_*.json.gz',
|
||||
format = 'auto',
|
||||
columns = {
|
||||
date: 'VARCHAR',
|
||||
captured_at_utc: 'VARCHAR',
|
||||
venues: 'JSON[]'
|
||||
},
|
||||
filename = true,
|
||||
maximum_object_size = 134217728 -- 128 MB; matches morning snapshot limit
|
||||
)
|
||||
WHERE venues IS NOT NULL
|
||||
AND json_array_length(venues) > 0
|
||||
) rf,
|
||||
LATERAL UNNEST(rf.venues) AS t(venue_json)
|
||||
),
|
||||
all_venues AS (
|
||||
SELECT * FROM morning_jsonl
|
||||
UNION ALL
|
||||
SELECT * FROM morning_blob
|
||||
UNION ALL
|
||||
SELECT * FROM recheck_jsonl
|
||||
UNION ALL
|
||||
SELECT * FROM recheck_blob
|
||||
),
|
||||
raw_resources AS (
|
||||
SELECT
|
||||
|
||||
@@ -5,11 +5,7 @@
|
||||
-- DuckDB auto-infers opening_hours as STRUCT, so we access each day by literal
|
||||
-- key (no dynamic access) and UNION ALL to unpivot.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant per line, opening_hours is a top-level JSON column
|
||||
-- Old: tenants.json.gz — {"tenants": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_opening_hours,
|
||||
@@ -19,40 +15,18 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line
|
||||
jsonl_venues AS (
|
||||
venues AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
opening_hours AS oh
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {tenant_id: 'VARCHAR', opening_hours: 'JSON'}
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
AND opening_hours IS NOT NULL
|
||||
),
|
||||
-- Old format: blob
|
||||
blob_venues AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
tenant -> 'opening_hours' AS oh
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
AND (tenant -> 'opening_hours') IS NOT NULL
|
||||
),
|
||||
venues AS (
|
||||
SELECT * FROM jsonl_venues
|
||||
UNION ALL
|
||||
SELECT * FROM blob_venues
|
||||
),
|
||||
-- Unpivot by UNION ALL — 7 literal key accesses
|
||||
unpivoted AS (
|
||||
SELECT tenant_id, 'MONDAY' AS day_of_week, 1 AS day_number,
|
||||
@@ -104,6 +78,4 @@ SELECT
|
||||
FROM unpivoted
|
||||
WHERE opening_time IS NOT NULL
|
||||
AND closing_time IS NOT NULL
|
||||
-- Enforce grain: if both old blob and new JSONL exist for the same month,
|
||||
-- the UNION ALL produces duplicate (tenant_id, day_of_week) pairs — deduplicate.
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY tenant_id, day_of_week ORDER BY tenant_id) = 1
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
-- Reads resources array from the landing zone to extract court type, size,
|
||||
-- surface, and booking config.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant per line, resources is a top-level JSON column
|
||||
-- Old: tenants.json.gz — {"tenants": [...]} blob (double UNNEST: tenants → resources)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_resources,
|
||||
@@ -16,41 +12,18 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line — single UNNEST for resources
|
||||
jsonl_unnested AS (
|
||||
unnested AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
UPPER(address ->> 'country_code') AS country_code,
|
||||
UNNEST(from_json(resources, '["JSON"]')) AS resource_json
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {tenant_id: 'VARCHAR', address: 'JSON', resources: 'JSON'}
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
AND resources IS NOT NULL
|
||||
),
|
||||
-- Old format: blob — double UNNEST (tenants → resources)
|
||||
blob_unnested AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
UPPER(tenant -> 'address' ->> 'country_code') AS country_code,
|
||||
UNNEST(from_json(tenant -> 'resources', '["JSON"]')) AS resource_json
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
AND (tenant -> 'resources') IS NOT NULL
|
||||
),
|
||||
unnested AS (
|
||||
SELECT * FROM jsonl_unnested
|
||||
UNION ALL
|
||||
SELECT * FROM blob_unnested
|
||||
)
|
||||
SELECT
|
||||
tenant_id,
|
||||
@@ -68,6 +41,4 @@ SELECT
|
||||
FROM unnested
|
||||
WHERE (resource_json ->> 'resource_id') IS NOT NULL
|
||||
AND (resource_json ->> 'sport_id') = 'PADEL'
|
||||
-- Enforce grain: if both old blob and new JSONL exist for the same month,
|
||||
-- the UNION ALL produces duplicate (tenant_id, resource_id) pairs — deduplicate.
|
||||
QUALIFY ROW_NUMBER() OVER (PARTITION BY tenant_id, resource_json ->> 'resource_id' ORDER BY tenant_id) = 1
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
-- including address, opening hours, court resources, VAT rate, and facilities.
|
||||
-- Deduplicates on tenant_id (keeps most recent extraction).
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: tenants.jsonl.gz — one tenant JSON object per line (no UNNEST needed)
|
||||
-- Old: tenants.json.gz — {"tenants": [{...}]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/playtomic/{year}/{month}/tenants.{jsonl,json}.gz
|
||||
-- Source: data/landing/playtomic/{year}/{month}/{day}/tenants.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_playtomic_venues,
|
||||
@@ -17,8 +13,7 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one tenant per JSONL line — no UNNEST, access columns directly
|
||||
jsonl_parsed AS (
|
||||
parsed AS (
|
||||
SELECT
|
||||
tenant_id,
|
||||
tenant_name,
|
||||
@@ -45,7 +40,7 @@ jsonl_parsed AS (
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.jsonl.gz',
|
||||
@LANDING_DIR || '/playtomic/*/*/*/tenants.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
filename = true,
|
||||
columns = {
|
||||
@@ -59,49 +54,6 @@ jsonl_parsed AS (
|
||||
)
|
||||
WHERE tenant_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"tenants": [...]} blob — keep for transition until old files rotate out
|
||||
blob_parsed AS (
|
||||
SELECT
|
||||
tenant ->> 'tenant_id' AS tenant_id,
|
||||
tenant ->> 'tenant_name' AS tenant_name,
|
||||
tenant ->> 'slug' AS slug,
|
||||
tenant ->> 'tenant_type' AS tenant_type,
|
||||
tenant ->> 'tenant_status' AS tenant_status,
|
||||
tenant ->> 'playtomic_status' AS playtomic_status,
|
||||
tenant ->> 'booking_type' AS booking_type,
|
||||
tenant -> 'address' ->> 'street' AS street,
|
||||
tenant -> 'address' ->> 'city' AS city,
|
||||
tenant -> 'address' ->> 'postal_code' AS postal_code,
|
||||
UPPER(tenant -> 'address' ->> 'country_code') AS country_code,
|
||||
tenant -> 'address' ->> 'timezone' AS timezone,
|
||||
tenant -> 'address' ->> 'administrative_area' AS administrative_area,
|
||||
TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(tenant -> 'address' -> 'coordinate' ->> 'lon' AS DOUBLE) AS lon,
|
||||
TRY_CAST(tenant ->> 'vat_rate' AS DOUBLE) AS vat_rate,
|
||||
tenant ->> 'default_currency' AS default_currency,
|
||||
TRY_CAST(tenant -> 'booking_settings' ->> 'booking_ahead_limit' AS INTEGER) AS booking_ahead_limit_minutes,
|
||||
tenant -> 'opening_hours' AS opening_hours_json,
|
||||
tenant -> 'resources' AS resources_json,
|
||||
tenant ->> 'created_at' AS created_at,
|
||||
tenant ->> 'is_playtomic_partner' AS is_playtomic_partner_raw,
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(tenants) AS tenant, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/playtomic/*/*/tenants.json.gz',
|
||||
format = 'auto',
|
||||
filename = true,
|
||||
maximum_object_size = 134217728
|
||||
)
|
||||
)
|
||||
WHERE (tenant ->> 'tenant_id') IS NOT NULL
|
||||
),
|
||||
parsed AS (
|
||||
SELECT * FROM jsonl_parsed
|
||||
UNION ALL
|
||||
SELECT * FROM blob_parsed
|
||||
),
|
||||
deduped AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY tenant_id ORDER BY source_file DESC) AS rn
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
-- Broad coverage (140K+ locations) enables Gemeinde-level market intelligence.
|
||||
-- One row per geoname_id (GeoNames stable numeric identifier).
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: cities_global.jsonl.gz — one city per line, columns directly accessible
|
||||
-- Old: cities_global.json.gz — {"rows": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/geonames/{year}/{month}/cities_global.{jsonl,json}.gz
|
||||
-- Source: data/landing/geonames/{year}/{month}/cities_global.jsonl.gz
|
||||
|
||||
MODEL (
|
||||
name staging.stg_population_geonames,
|
||||
@@ -16,74 +12,29 @@ MODEL (
|
||||
grain geoname_id
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one city per JSONL line
|
||||
jsonl_rows AS (
|
||||
SELECT
|
||||
TRY_CAST(geoname_id AS INTEGER) AS geoname_id,
|
||||
city_name,
|
||||
country_code,
|
||||
TRY_CAST(lat AS DOUBLE) AS lat,
|
||||
TRY_CAST(lon AS DOUBLE) AS lon,
|
||||
admin1_code,
|
||||
admin2_code,
|
||||
TRY_CAST(population AS BIGINT) AS population,
|
||||
TRY_CAST(ref_year AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {
|
||||
geoname_id: 'INTEGER', city_name: 'VARCHAR', country_code: 'VARCHAR',
|
||||
lat: 'DOUBLE', lon: 'DOUBLE', admin1_code: 'VARCHAR', admin2_code: 'VARCHAR',
|
||||
population: 'BIGINT', ref_year: 'INTEGER'
|
||||
}
|
||||
)
|
||||
WHERE geoname_id IS NOT NULL
|
||||
),
|
||||
-- Old format: {"rows": [...]} blob — kept for transition
|
||||
blob_rows AS (
|
||||
SELECT
|
||||
TRY_CAST(row ->> 'geoname_id' AS INTEGER) AS geoname_id,
|
||||
row ->> 'city_name' AS city_name,
|
||||
row ->> 'country_code' AS country_code,
|
||||
TRY_CAST(row ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(row ->> 'lon' AS DOUBLE) AS lon,
|
||||
row ->> 'admin1_code' AS admin1_code,
|
||||
row ->> 'admin2_code' AS admin2_code,
|
||||
TRY_CAST(row ->> 'population' AS BIGINT) AS population,
|
||||
TRY_CAST(row ->> 'ref_year' AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(rows) AS row
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.json.gz',
|
||||
auto_detect = true,
|
||||
maximum_object_size = 40000000
|
||||
)
|
||||
)
|
||||
WHERE (row ->> 'geoname_id') IS NOT NULL
|
||||
),
|
||||
all_rows AS (
|
||||
SELECT * FROM jsonl_rows
|
||||
UNION ALL
|
||||
SELECT * FROM blob_rows
|
||||
)
|
||||
SELECT
|
||||
geoname_id,
|
||||
TRIM(city_name) AS city_name,
|
||||
UPPER(country_code) AS country_code,
|
||||
lat,
|
||||
lon,
|
||||
NULLIF(TRIM(admin1_code), '') AS admin1_code,
|
||||
NULLIF(TRIM(admin2_code), '') AS admin2_code,
|
||||
population,
|
||||
ref_year,
|
||||
extracted_date
|
||||
FROM all_rows
|
||||
WHERE population IS NOT NULL
|
||||
TRY_CAST(geoname_id AS INTEGER) AS geoname_id,
|
||||
TRIM(city_name) AS city_name,
|
||||
UPPER(country_code) AS country_code,
|
||||
TRY_CAST(lat AS DOUBLE) AS lat,
|
||||
TRY_CAST(lon AS DOUBLE) AS lon,
|
||||
NULLIF(TRIM(admin1_code), '') AS admin1_code,
|
||||
NULLIF(TRIM(admin2_code), '') AS admin2_code,
|
||||
TRY_CAST(population AS BIGINT) AS population,
|
||||
TRY_CAST(ref_year AS INTEGER) AS ref_year,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/geonames/*/*/cities_global.jsonl.gz',
|
||||
format = 'newline_delimited',
|
||||
columns = {
|
||||
geoname_id: 'INTEGER', city_name: 'VARCHAR', country_code: 'VARCHAR',
|
||||
lat: 'DOUBLE', lon: 'DOUBLE', admin1_code: 'VARCHAR', admin2_code: 'VARCHAR',
|
||||
population: 'BIGINT', ref_year: 'INTEGER'
|
||||
}
|
||||
)
|
||||
WHERE geoname_id IS NOT NULL
|
||||
AND population IS NOT NULL
|
||||
AND population > 0
|
||||
AND geoname_id IS NOT NULL
|
||||
AND city_name IS NOT NULL
|
||||
AND lat IS NOT NULL
|
||||
AND lon IS NOT NULL
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
-- Used as a "racket sport culture" signal in the opportunity score:
|
||||
-- areas with high tennis court density are prime padel adoption markets.
|
||||
--
|
||||
-- Supports two landing formats (UNION ALL during migration):
|
||||
-- New: courts.jsonl.gz — one OSM element per line; nodes have lat/lon directly,
|
||||
-- ways/relations have center.lat/center.lon (Overpass out center)
|
||||
-- Old: courts.json.gz — {"elements": [...]} blob (UNNEST required)
|
||||
--
|
||||
-- Source: data/landing/overpass_tennis/{year}/{month}/courts.{jsonl,json}.gz
|
||||
-- Source: data/landing/overpass_tennis/{year}/{month}/courts.jsonl.gz
|
||||
-- Format: one OSM element per line; nodes have lat/lon directly,
|
||||
-- ways/relations have center.lat/center.lon (Overpass out center)
|
||||
|
||||
MODEL (
|
||||
name staging.stg_tennis_courts,
|
||||
@@ -17,8 +14,7 @@ MODEL (
|
||||
);
|
||||
|
||||
WITH
|
||||
-- New format: one OSM element per JSONL line
|
||||
jsonl_elements AS (
|
||||
parsed AS (
|
||||
SELECT
|
||||
type AS osm_type,
|
||||
TRY_CAST(id AS BIGINT) AS osm_id,
|
||||
@@ -47,33 +43,6 @@ jsonl_elements AS (
|
||||
)
|
||||
WHERE type IS NOT NULL
|
||||
),
|
||||
-- Old format: {"elements": [...]} blob — kept for transition
|
||||
blob_elements AS (
|
||||
SELECT
|
||||
elem ->> 'type' AS osm_type,
|
||||
(elem ->> 'id')::BIGINT AS osm_id,
|
||||
TRY_CAST(elem ->> 'lat' AS DOUBLE) AS lat,
|
||||
TRY_CAST(elem ->> 'lon' AS DOUBLE) AS lon,
|
||||
elem -> 'tags' ->> 'name' AS name,
|
||||
elem -> 'tags' ->> 'addr:country' AS country_code,
|
||||
elem -> 'tags' ->> 'addr:city' AS city_tag,
|
||||
filename AS source_file,
|
||||
CURRENT_DATE AS extracted_date
|
||||
FROM (
|
||||
SELECT UNNEST(elements) AS elem, filename
|
||||
FROM read_json(
|
||||
@LANDING_DIR || '/overpass_tennis/*/*/courts.json.gz',
|
||||
format = 'auto',
|
||||
filename = true
|
||||
)
|
||||
)
|
||||
WHERE (elem ->> 'type') IS NOT NULL
|
||||
),
|
||||
parsed AS (
|
||||
SELECT * FROM jsonl_elements
|
||||
UNION ALL
|
||||
SELECT * FROM blob_elements
|
||||
),
|
||||
deduped AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY osm_id ORDER BY extracted_date DESC) AS rn
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
"""Create minimal seed files for SQLMesh staging models that require landing data."""
|
||||
"""Create minimal seed files for SQLMesh staging models that require landing data.
|
||||
|
||||
Seeds are empty JSONL gzip files — they satisfy DuckDB's file-not-found check
|
||||
while contributing zero rows to the staging models.
|
||||
"""
|
||||
import gzip
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
seed = {
|
||||
"date": "1970-01-01",
|
||||
"captured_at_utc": "1970-01-01T00:00:00Z",
|
||||
"venue_count": 0,
|
||||
"venues_errored": 0,
|
||||
"venues": [],
|
||||
}
|
||||
morning = Path("data/landing/playtomic/1970/01/availability_1970-01-01.json.gz")
|
||||
recheck = Path("data/landing/playtomic/1970/01/availability_1970-01-01_recheck_00.json.gz")
|
||||
# stg_playtomic_availability requires at least one morning and one recheck file
|
||||
morning = Path("data/landing/playtomic/1970/01/availability_1970-01-01.jsonl.gz")
|
||||
recheck = Path("data/landing/playtomic/1970/01/availability_1970-01-01_recheck_00.jsonl.gz")
|
||||
morning.parent.mkdir(parents=True, exist_ok=True)
|
||||
for p in [morning, recheck]:
|
||||
if not p.exists():
|
||||
with gzip.open(p, "wt") as f:
|
||||
json.dump(seed, f)
|
||||
with gzip.open(p, "wb") as f:
|
||||
pass # empty JSONL — 0 rows, no error
|
||||
print("created", p)
|
||||
else:
|
||||
print("exists ", p)
|
||||
|
||||
@@ -6,7 +6,9 @@ Operational visibility for the data extraction and transformation pipeline:
|
||||
/admin/pipeline/overview → HTMX tab: extraction status, serving freshness, landing stats
|
||||
/admin/pipeline/extractions → HTMX tab: filterable extraction run history
|
||||
/admin/pipeline/extractions/<id>/mark-stale → POST: mark stuck "running" row as failed
|
||||
/admin/pipeline/extract/trigger → POST: enqueue full extraction run
|
||||
/admin/pipeline/extract/trigger → POST: enqueue extraction run (HTMX-aware)
|
||||
/admin/pipeline/transform → HTMX tab: SQLMesh + export status, run history
|
||||
/admin/pipeline/transform/trigger → POST: enqueue transform/export/pipeline step
|
||||
/admin/pipeline/catalog → HTMX tab: data catalog (tables, columns, sample data)
|
||||
/admin/pipeline/catalog/<table> → HTMX partial: table detail (columns + sample)
|
||||
/admin/pipeline/query → HTMX tab: SQL query editor
|
||||
@@ -18,6 +20,7 @@ Data sources:
|
||||
- analytics.duckdb (DuckDB read-only via analytics.execute_user_query)
|
||||
- LANDING_DIR/ (filesystem scan for file sizes + dates)
|
||||
- infra/supervisor/workflows.toml (schedule definitions — tomllib, stdlib)
|
||||
- app.db tasks table (run_transform, run_export, run_pipeline task rows)
|
||||
"""
|
||||
import asyncio
|
||||
import json
|
||||
@@ -49,7 +52,7 @@ _LANDING_DIR = os.environ.get("LANDING_DIR", "data/landing")
|
||||
_SERVING_DUCKDB_PATH = os.environ.get("SERVING_DUCKDB_PATH", "data/analytics.duckdb")
|
||||
|
||||
# Repo root: web/src/padelnomics/admin/ → up 4 levels
|
||||
_REPO_ROOT = Path(__file__).resolve().parents[5]
|
||||
_REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||
_WORKFLOWS_TOML = _REPO_ROOT / "infra" / "supervisor" / "workflows.toml"
|
||||
|
||||
# A "running" row older than this is considered stale/crashed.
|
||||
@@ -626,10 +629,8 @@ async def pipeline_dashboard():
|
||||
# ── Overview tab ─────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@bp.route("/overview")
|
||||
@role_required("admin")
|
||||
async def pipeline_overview():
|
||||
"""HTMX tab: extraction status per source, serving freshness, landing zone."""
|
||||
async def _render_overview_partial():
|
||||
"""Build and render the pipeline overview partial (shared by GET and POST triggers)."""
|
||||
latest_runs, landing_stats, workflows, serving_meta = await asyncio.gather(
|
||||
asyncio.to_thread(_fetch_latest_per_extractor_sync),
|
||||
asyncio.to_thread(_get_landing_zone_stats_sync),
|
||||
@@ -650,6 +651,13 @@ async def pipeline_overview():
|
||||
"stale": _is_stale(run) if run else False,
|
||||
})
|
||||
|
||||
# Treat pending extraction tasks as "running" (queued or active).
|
||||
from ..core import fetch_all as _fetch_all # noqa: PLC0415
|
||||
pending_extraction = await _fetch_all(
|
||||
"SELECT id FROM tasks WHERE task_name = 'run_extraction' AND status = 'pending' LIMIT 1"
|
||||
)
|
||||
any_running = bool(pending_extraction)
|
||||
|
||||
# Compute landing zone totals
|
||||
total_landing_bytes = sum(s["total_bytes"] for s in landing_stats)
|
||||
|
||||
@@ -677,10 +685,18 @@ async def pipeline_overview():
|
||||
total_landing_bytes=total_landing_bytes,
|
||||
serving_tables=serving_tables,
|
||||
last_export=last_export,
|
||||
any_running=any_running,
|
||||
format_bytes=_format_bytes,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/overview")
|
||||
@role_required("admin")
|
||||
async def pipeline_overview():
|
||||
"""HTMX tab: extraction status per source, serving freshness, landing zone."""
|
||||
return await _render_overview_partial()
|
||||
|
||||
|
||||
# ── Extractions tab ────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@@ -745,7 +761,11 @@ async def pipeline_mark_stale(run_id: int):
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def pipeline_trigger_extract():
|
||||
"""Enqueue an extraction run — all extractors, or a single named one."""
|
||||
"""Enqueue an extraction run — all extractors, or a single named one.
|
||||
|
||||
HTMX-aware: if the HX-Request header is present, returns the overview partial
|
||||
directly so the UI can update in-place without a redirect.
|
||||
"""
|
||||
from ..worker import enqueue
|
||||
|
||||
form = await request.form
|
||||
@@ -757,11 +777,15 @@ async def pipeline_trigger_extract():
|
||||
await flash(f"Unknown extractor '{extractor}'.", "warning")
|
||||
return redirect(url_for("pipeline.pipeline_dashboard"))
|
||||
await enqueue("run_extraction", {"extractor": extractor})
|
||||
await flash(f"Extractor '{extractor}' queued. Check the task queue for progress.", "success")
|
||||
else:
|
||||
await enqueue("run_extraction")
|
||||
await flash("Extraction run queued. Check the task queue for progress.", "success")
|
||||
|
||||
is_htmx = request.headers.get("HX-Request") == "true"
|
||||
if is_htmx:
|
||||
return await _render_overview_partial()
|
||||
|
||||
msg = f"Extractor '{extractor}' queued." if extractor else "Extraction run queued."
|
||||
await flash(f"{msg} Check the task queue for progress.", "success")
|
||||
return redirect(url_for("pipeline.pipeline_dashboard"))
|
||||
|
||||
|
||||
@@ -847,6 +871,156 @@ async def pipeline_lineage_schema(model: str):
|
||||
)
|
||||
|
||||
|
||||
# ── Transform tab ─────────────────────────────────────────────────────────────
|
||||
|
||||
_TRANSFORM_TASK_NAMES = ("run_transform", "run_export", "run_pipeline")
|
||||
|
||||
|
||||
async def _fetch_pipeline_tasks() -> dict:
|
||||
"""Fetch the latest task row for each transform task type, plus recent run history.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"latest": {"run_transform": row|None, "run_export": row|None, "run_pipeline": row|None},
|
||||
"history": [row, ...], # last 20 rows across all three task types, newest first
|
||||
}
|
||||
"""
|
||||
from ..core import fetch_all as _fetch_all # noqa: PLC0415
|
||||
|
||||
# Latest row per task type (may be pending, complete, or failed)
|
||||
latest_rows = await _fetch_all(
|
||||
"""
|
||||
SELECT t.*
|
||||
FROM tasks t
|
||||
INNER JOIN (
|
||||
SELECT task_name, MAX(id) AS max_id
|
||||
FROM tasks
|
||||
WHERE task_name IN ('run_transform', 'run_export', 'run_pipeline')
|
||||
GROUP BY task_name
|
||||
) latest ON t.id = latest.max_id
|
||||
"""
|
||||
)
|
||||
latest: dict = {"run_transform": None, "run_export": None, "run_pipeline": None}
|
||||
for row in latest_rows:
|
||||
latest[row["task_name"]] = dict(row)
|
||||
|
||||
history = await _fetch_all(
|
||||
"""
|
||||
SELECT id, task_name, status, created_at, completed_at, error
|
||||
FROM tasks
|
||||
WHERE task_name IN ('run_transform', 'run_export', 'run_pipeline')
|
||||
ORDER BY id DESC
|
||||
LIMIT 20
|
||||
"""
|
||||
)
|
||||
return {"latest": latest, "history": [dict(r) for r in history]}
|
||||
|
||||
|
||||
def _format_duration(created_at: str | None, completed_at: str | None) -> str:
|
||||
"""Human-readable duration between created_at and completed_at, or '' if unavailable."""
|
||||
if not created_at or not completed_at:
|
||||
return ""
|
||||
try:
|
||||
fmt = "%Y-%m-%d %H:%M:%S"
|
||||
start = datetime.strptime(created_at, fmt)
|
||||
end = datetime.strptime(completed_at, fmt)
|
||||
delta = int((end - start).total_seconds())
|
||||
if delta < 0:
|
||||
return ""
|
||||
if delta < 60:
|
||||
return f"{delta}s"
|
||||
return f"{delta // 60}m {delta % 60}s"
|
||||
except ValueError:
|
||||
return ""
|
||||
|
||||
|
||||
async def _render_transform_partial():
|
||||
"""Build and render the transform tab partial."""
|
||||
task_data = await _fetch_pipeline_tasks()
|
||||
latest = task_data["latest"]
|
||||
history = task_data["history"]
|
||||
|
||||
# Enrich history rows with duration
|
||||
for row in history:
|
||||
row["duration"] = _format_duration(row.get("created_at"), row.get("completed_at"))
|
||||
# Truncate error for display
|
||||
if row.get("error"):
|
||||
row["error_short"] = row["error"][:120]
|
||||
else:
|
||||
row["error_short"] = None
|
||||
|
||||
any_running = any(
|
||||
t is not None and t["status"] == "pending" for t in latest.values()
|
||||
)
|
||||
|
||||
serving_meta = await asyncio.to_thread(_load_serving_meta)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/pipeline_transform.html",
|
||||
latest=latest,
|
||||
history=history,
|
||||
any_running=any_running,
|
||||
serving_meta=serving_meta,
|
||||
format_duration=_format_duration,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/transform")
|
||||
@role_required("admin")
|
||||
async def pipeline_transform():
|
||||
"""HTMX tab: SQLMesh transform + export status, run history."""
|
||||
return await _render_transform_partial()
|
||||
|
||||
|
||||
@bp.route("/transform/trigger", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def pipeline_trigger_transform():
|
||||
"""Enqueue a transform, export, or full pipeline task.
|
||||
|
||||
form field `step`: 'transform' | 'export' | 'pipeline'
|
||||
Concurrency guard: rejects if the same task type is already pending.
|
||||
HTMX-aware: returns the transform partial for HTMX requests.
|
||||
"""
|
||||
from ..core import fetch_one as _fetch_one # noqa: PLC0415
|
||||
from ..worker import enqueue
|
||||
|
||||
form = await request.form
|
||||
step = (form.get("step") or "").strip()
|
||||
|
||||
step_to_task = {
|
||||
"transform": "run_transform",
|
||||
"export": "run_export",
|
||||
"pipeline": "run_pipeline",
|
||||
}
|
||||
if step not in step_to_task:
|
||||
await flash(f"Unknown step '{step}'.", "warning")
|
||||
return redirect(url_for("pipeline.pipeline_dashboard"))
|
||||
|
||||
task_name = step_to_task[step]
|
||||
|
||||
# Concurrency guard: reject if same task type is already pending
|
||||
existing = await _fetch_one(
|
||||
"SELECT id FROM tasks WHERE task_name = ? AND status = 'pending' LIMIT 1",
|
||||
(task_name,),
|
||||
)
|
||||
if existing:
|
||||
is_htmx = request.headers.get("HX-Request") == "true"
|
||||
if is_htmx:
|
||||
return await _render_transform_partial()
|
||||
await flash(f"A '{step}' task is already queued (task #{existing['id']}).", "warning")
|
||||
return redirect(url_for("pipeline.pipeline_dashboard"))
|
||||
|
||||
await enqueue(task_name)
|
||||
|
||||
is_htmx = request.headers.get("HX-Request") == "true"
|
||||
if is_htmx:
|
||||
return await _render_transform_partial()
|
||||
|
||||
await flash(f"'{step}' task queued. Check the task queue for progress.", "success")
|
||||
return redirect(url_for("pipeline.pipeline_dashboard"))
|
||||
|
||||
|
||||
# ── Catalog tab ───────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
|
||||
@@ -169,7 +169,6 @@ async def pseo_generate_gaps(slug: str):
|
||||
"template_slug": slug,
|
||||
"start_date": date.today().isoformat(),
|
||||
"articles_per_day": 500,
|
||||
"limit": 500,
|
||||
})
|
||||
await flash(
|
||||
f"Queued generation for {len(gaps)} missing articles in '{config['name']}'.",
|
||||
|
||||
@@ -1865,7 +1865,7 @@ async def template_preview(slug: str, row_key: str):
|
||||
@csrf_protect
|
||||
async def template_generate(slug: str):
|
||||
"""Generate articles from template + DuckDB data."""
|
||||
from ..content import fetch_template_data, load_template
|
||||
from ..content import count_template_data, load_template
|
||||
|
||||
try:
|
||||
config = load_template(slug)
|
||||
@@ -1873,8 +1873,7 @@ async def template_generate(slug: str):
|
||||
await flash("Template not found.", "error")
|
||||
return redirect(url_for("admin.templates"))
|
||||
|
||||
data_rows = await fetch_template_data(config["data_table"], limit=501)
|
||||
row_count = len(data_rows)
|
||||
row_count = await count_template_data(config["data_table"])
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
@@ -1888,7 +1887,6 @@ async def template_generate(slug: str):
|
||||
"template_slug": slug,
|
||||
"start_date": start_date.isoformat(),
|
||||
"articles_per_day": articles_per_day,
|
||||
"limit": 500,
|
||||
})
|
||||
await flash(
|
||||
f"Article generation queued for '{config['name']}'. "
|
||||
@@ -1923,7 +1921,6 @@ async def template_regenerate(slug: str):
|
||||
"template_slug": slug,
|
||||
"start_date": date.today().isoformat(),
|
||||
"articles_per_day": 500,
|
||||
"limit": 500,
|
||||
})
|
||||
await flash("Regeneration queued. The worker will process it in the background.", "success")
|
||||
return redirect(url_for("admin.template_detail", slug=slug))
|
||||
@@ -2499,7 +2496,12 @@ async def article_results():
|
||||
@csrf_protect
|
||||
async def article_new():
|
||||
"""Create a manual article."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards, is_reserved_path
|
||||
from ..content.routes import (
|
||||
BUILD_DIR,
|
||||
bake_product_cards,
|
||||
bake_scenario_cards,
|
||||
is_reserved_path,
|
||||
)
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
@@ -2523,9 +2525,10 @@ async def article_new():
|
||||
await flash(f"URL path '{url_path}' conflicts with a reserved route.", "error")
|
||||
return await render_template("admin/article_form.html", data=dict(form), editing=False)
|
||||
|
||||
# Render markdown → HTML with scenario cards baked in
|
||||
# Render markdown → HTML with scenario + product cards baked in
|
||||
body_html = mistune.html(body)
|
||||
body_html = await bake_scenario_cards(body_html)
|
||||
body_html = await bake_product_cards(body_html, lang=language)
|
||||
|
||||
build_dir = BUILD_DIR / language
|
||||
build_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -2561,7 +2564,12 @@ async def article_new():
|
||||
@csrf_protect
|
||||
async def article_edit(article_id: int):
|
||||
"""Edit a manual article."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards, is_reserved_path
|
||||
from ..content.routes import (
|
||||
BUILD_DIR,
|
||||
bake_product_cards,
|
||||
bake_scenario_cards,
|
||||
is_reserved_path,
|
||||
)
|
||||
|
||||
article = await fetch_one("SELECT * FROM articles WHERE id = ?", (article_id,))
|
||||
if not article:
|
||||
@@ -2591,6 +2599,7 @@ async def article_edit(article_id: int):
|
||||
if body:
|
||||
body_html = mistune.html(body)
|
||||
body_html = await bake_scenario_cards(body_html)
|
||||
body_html = await bake_product_cards(body_html, lang=language)
|
||||
build_dir = BUILD_DIR / language
|
||||
build_dir.mkdir(parents=True, exist_ok=True)
|
||||
(build_dir / f"{article['slug']}.html").write_text(body_html)
|
||||
@@ -2717,7 +2726,6 @@ async def rebuild_all():
|
||||
"template_slug": t["slug"],
|
||||
"start_date": date.today().isoformat(),
|
||||
"articles_per_day": 500,
|
||||
"limit": 500,
|
||||
})
|
||||
|
||||
# Manual articles still need inline rebuild
|
||||
@@ -2735,7 +2743,7 @@ async def rebuild_all():
|
||||
|
||||
async def _rebuild_article(article_id: int):
|
||||
"""Re-render a single article from its source."""
|
||||
from ..content.routes import BUILD_DIR, bake_scenario_cards
|
||||
from ..content.routes import BUILD_DIR, bake_product_cards, bake_scenario_cards
|
||||
|
||||
article = await fetch_one("SELECT * FROM articles WHERE id = ?", (article_id,))
|
||||
if not article:
|
||||
@@ -2757,9 +2765,13 @@ async def _rebuild_article(article_id: int):
|
||||
md_path = Path("data/content/articles") / f"{article['slug']}.md"
|
||||
if not md_path.exists():
|
||||
return
|
||||
body_html = mistune.html(md_path.read_text())
|
||||
raw = md_path.read_text()
|
||||
m = _FRONTMATTER_RE.match(raw)
|
||||
body = raw[m.end():] if m else raw
|
||||
body_html = mistune.html(body)
|
||||
lang = article.get("language", "en") if hasattr(article, "get") else "en"
|
||||
body_html = await bake_scenario_cards(body_html, lang=lang)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
BUILD_DIR.mkdir(parents=True, exist_ok=True)
|
||||
(BUILD_DIR / f"{article['slug']}.html").write_text(body_html)
|
||||
|
||||
@@ -3021,6 +3033,7 @@ async def outreach():
|
||||
current_search=search,
|
||||
current_follow_up=follow_up,
|
||||
page=page,
|
||||
outreach_email=EMAIL_ADDRESSES["outreach"],
|
||||
)
|
||||
|
||||
|
||||
@@ -3233,3 +3246,632 @@ async def outreach_import():
|
||||
|
||||
await flash(f"Imported {imported} suppliers. Skipped {skipped} (duplicates or missing data).", "success")
|
||||
return redirect(url_for("admin.outreach"))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Affiliate Product Catalog
|
||||
# =============================================================================
|
||||
|
||||
AFFILIATE_CATEGORIES = ("racket", "ball", "shoe", "bag", "grip", "eyewear", "accessory")
|
||||
AFFILIATE_STATUSES = ("draft", "active", "archived")
|
||||
AFFILIATE_PROGRAM_STATUSES = ("active", "inactive")
|
||||
|
||||
|
||||
# ── Affiliate Programs ────────────────────────────────────────────────────────
|
||||
|
||||
def _form_to_program(form) -> dict:
|
||||
"""Parse affiliate program form values into a data dict."""
|
||||
commission_str = form.get("commission_pct", "").strip()
|
||||
commission_pct = 0.0
|
||||
if commission_str:
|
||||
try:
|
||||
commission_pct = float(commission_str.replace(",", "."))
|
||||
except ValueError:
|
||||
commission_pct = 0.0
|
||||
|
||||
return {
|
||||
"name": form.get("name", "").strip(),
|
||||
"slug": form.get("slug", "").strip(),
|
||||
"url_template": form.get("url_template", "").strip(),
|
||||
"tracking_tag": form.get("tracking_tag", "").strip(),
|
||||
"commission_pct": commission_pct,
|
||||
"homepage_url": form.get("homepage_url", "").strip(),
|
||||
"status": form.get("status", "active").strip(),
|
||||
"notes": form.get("notes", "").strip(),
|
||||
}
|
||||
|
||||
|
||||
@bp.route("/affiliate/programs")
|
||||
@role_required("admin")
|
||||
async def affiliate_programs():
|
||||
"""Affiliate programs list — full page."""
|
||||
from ..affiliate import get_all_programs
|
||||
|
||||
programs = await get_all_programs()
|
||||
return await render_template(
|
||||
"admin/affiliate_programs.html",
|
||||
admin_page="affiliate_programs",
|
||||
programs=programs,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/programs/results")
|
||||
@role_required("admin")
|
||||
async def affiliate_program_results():
|
||||
"""HTMX partial: program rows."""
|
||||
from ..affiliate import get_all_programs
|
||||
|
||||
programs = await get_all_programs()
|
||||
return await render_template(
|
||||
"admin/partials/affiliate_program_results.html",
|
||||
programs=programs,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/programs/new", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_program_new():
|
||||
"""Create an affiliate program."""
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_program(form)
|
||||
|
||||
if not data["name"] or not data["slug"] or not data["url_template"]:
|
||||
await flash("Name, slug, and URL template are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data=data,
|
||||
editing=False,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
existing = await fetch_one(
|
||||
"SELECT id FROM affiliate_programs WHERE slug = ?", (data["slug"],)
|
||||
)
|
||||
if existing:
|
||||
await flash(f"Slug '{data['slug']}' already exists.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data=data,
|
||||
editing=False,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
await execute(
|
||||
"""INSERT INTO affiliate_programs
|
||||
(name, slug, url_template, tracking_tag, commission_pct,
|
||||
homepage_url, status, notes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
data["name"], data["slug"], data["url_template"],
|
||||
data["tracking_tag"], data["commission_pct"],
|
||||
data["homepage_url"], data["status"], data["notes"],
|
||||
),
|
||||
)
|
||||
await flash(f"Program '{data['name']}' created.", "success")
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data={},
|
||||
editing=False,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/programs/<int:program_id>/edit", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_program_edit(program_id: int):
|
||||
"""Edit an affiliate program."""
|
||||
program = await fetch_one(
|
||||
"SELECT * FROM affiliate_programs WHERE id = ?", (program_id,)
|
||||
)
|
||||
if not program:
|
||||
await flash("Program not found.", "error")
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_program(form)
|
||||
|
||||
if not data["name"] or not data["slug"] or not data["url_template"]:
|
||||
await flash("Name, slug, and URL template are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data={**dict(program), **data},
|
||||
editing=True,
|
||||
program_id=program_id,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
if data["slug"] != program["slug"]:
|
||||
collision = await fetch_one(
|
||||
"SELECT id FROM affiliate_programs WHERE slug = ? AND id != ?",
|
||||
(data["slug"], program_id),
|
||||
)
|
||||
if collision:
|
||||
await flash(f"Slug '{data['slug']}' already exists.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data={**dict(program), **data},
|
||||
editing=True,
|
||||
program_id=program_id,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
await execute(
|
||||
"""UPDATE affiliate_programs
|
||||
SET name=?, slug=?, url_template=?, tracking_tag=?, commission_pct=?,
|
||||
homepage_url=?, status=?, notes=?, updated_at=datetime('now')
|
||||
WHERE id=?""",
|
||||
(
|
||||
data["name"], data["slug"], data["url_template"],
|
||||
data["tracking_tag"], data["commission_pct"],
|
||||
data["homepage_url"], data["status"], data["notes"],
|
||||
program_id,
|
||||
),
|
||||
)
|
||||
await flash(f"Program '{data['name']}' updated.", "success")
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_program_form.html",
|
||||
admin_page="affiliate_programs",
|
||||
data=dict(program),
|
||||
editing=True,
|
||||
program_id=program_id,
|
||||
program_statuses=AFFILIATE_PROGRAM_STATUSES,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/programs/<int:program_id>/delete", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_program_delete(program_id: int):
|
||||
"""Delete an affiliate program — blocked if products reference it."""
|
||||
program = await fetch_one(
|
||||
"SELECT name FROM affiliate_programs WHERE id = ?", (program_id,)
|
||||
)
|
||||
if not program:
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
product_count = await fetch_one(
|
||||
"SELECT COUNT(*) AS cnt FROM affiliate_products WHERE program_id = ?",
|
||||
(program_id,),
|
||||
)
|
||||
count = product_count["cnt"] if product_count else 0
|
||||
if count > 0:
|
||||
await flash(
|
||||
f"Cannot delete '{program['name']}' — {count} product(s) reference it. "
|
||||
"Reassign or remove those products first.",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
await execute("DELETE FROM affiliate_programs WHERE id = ?", (program_id,))
|
||||
await flash(f"Program '{program['name']}' deleted.", "success")
|
||||
return redirect(url_for("admin.affiliate_programs"))
|
||||
|
||||
|
||||
def _form_to_product(form) -> dict:
|
||||
"""Parse affiliate product form values into a data dict."""
|
||||
price_str = form.get("price_eur", "").strip()
|
||||
price_cents = None
|
||||
if price_str:
|
||||
try:
|
||||
price_cents = round(float(price_str.replace(",", ".")) * 100)
|
||||
except ValueError:
|
||||
price_cents = None
|
||||
|
||||
rating_str = form.get("rating", "").strip()
|
||||
rating = None
|
||||
if rating_str:
|
||||
try:
|
||||
rating = float(rating_str.replace(",", "."))
|
||||
except ValueError:
|
||||
rating = None
|
||||
|
||||
pros_raw = form.get("pros", "").strip()
|
||||
cons_raw = form.get("cons", "").strip()
|
||||
pros = json.dumps([line.strip() for line in pros_raw.splitlines() if line.strip()])
|
||||
cons = json.dumps([line.strip() for line in cons_raw.splitlines() if line.strip()])
|
||||
|
||||
# Program-based URL vs manual URL.
|
||||
# When a program is selected, product_identifier holds the ASIN/path;
|
||||
# affiliate_url is cleared. Manual mode is the reverse.
|
||||
program_id_str = form.get("program_id", "").strip()
|
||||
program_id = int(program_id_str) if program_id_str and program_id_str != "0" else None
|
||||
product_identifier = form.get("product_identifier", "").strip()
|
||||
affiliate_url = form.get("affiliate_url", "").strip()
|
||||
|
||||
# retailer is auto-populated from program name on save (kept for display/filter)
|
||||
retailer = form.get("retailer", "").strip()
|
||||
|
||||
return {
|
||||
"slug": form.get("slug", "").strip(),
|
||||
"name": form.get("name", "").strip(),
|
||||
"brand": form.get("brand", "").strip(),
|
||||
"category": form.get("category", "accessory").strip(),
|
||||
"retailer": retailer,
|
||||
"program_id": program_id,
|
||||
"product_identifier": product_identifier,
|
||||
"affiliate_url": affiliate_url,
|
||||
"image_url": form.get("image_url", "").strip(),
|
||||
"price_cents": price_cents,
|
||||
"currency": "EUR",
|
||||
"rating": rating,
|
||||
"pros": pros,
|
||||
"cons": cons,
|
||||
"description": form.get("description", "").strip(),
|
||||
"cta_label": form.get("cta_label", "").strip(),
|
||||
"status": form.get("status", "draft").strip(),
|
||||
"language": form.get("language", "de").strip() or "de",
|
||||
"sort_order": int(form.get("sort_order", "0") or "0"),
|
||||
}
|
||||
|
||||
|
||||
@bp.route("/affiliate")
|
||||
@role_required("admin")
|
||||
async def affiliate_products():
|
||||
"""Affiliate product list — full page."""
|
||||
from ..affiliate import get_all_products, get_click_counts, get_distinct_retailers
|
||||
|
||||
q = request.args.get("q", "").strip()
|
||||
category = request.args.get("category", "").strip()
|
||||
retailer_filter = request.args.get("retailer", "").strip()
|
||||
status_filter = request.args.get("status", "").strip()
|
||||
|
||||
products = await get_all_products(
|
||||
status=status_filter or None,
|
||||
retailer=retailer_filter or None,
|
||||
)
|
||||
if q:
|
||||
q_lower = q.lower()
|
||||
products = [p for p in products if q_lower in p["name"].lower() or q_lower in p["brand"].lower()]
|
||||
if category:
|
||||
products = [p for p in products if p["category"] == category]
|
||||
|
||||
click_counts = await get_click_counts()
|
||||
for p in products:
|
||||
p["click_count"] = click_counts.get(p["id"], 0)
|
||||
|
||||
retailers = await get_distinct_retailers()
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_products.html",
|
||||
admin_page="affiliate",
|
||||
products=products,
|
||||
click_counts=click_counts,
|
||||
retailers=retailers,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
q=q,
|
||||
category=category,
|
||||
retailer_filter=retailer_filter,
|
||||
status_filter=status_filter,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/results")
|
||||
@role_required("admin")
|
||||
async def affiliate_results():
|
||||
"""HTMX partial: filtered product rows."""
|
||||
from ..affiliate import get_all_products, get_click_counts
|
||||
|
||||
q = request.args.get("q", "").strip()
|
||||
category = request.args.get("category", "").strip()
|
||||
retailer_filter = request.args.get("retailer", "").strip()
|
||||
status_filter = request.args.get("status", "").strip()
|
||||
|
||||
products = await get_all_products(
|
||||
status=status_filter or None,
|
||||
retailer=retailer_filter or None,
|
||||
)
|
||||
if q:
|
||||
q_lower = q.lower()
|
||||
products = [p for p in products if q_lower in p["name"].lower() or q_lower in p["brand"].lower()]
|
||||
if category:
|
||||
products = [p for p in products if p["category"] == category]
|
||||
|
||||
click_counts = await get_click_counts()
|
||||
for p in products:
|
||||
p["click_count"] = click_counts.get(p["id"], 0)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/affiliate_results.html",
|
||||
products=products,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/preview", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_preview():
|
||||
"""Render a product card fragment from form data — used by live preview HTMX."""
|
||||
from ..content.routes import _bake_env
|
||||
from ..i18n import get_translations
|
||||
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
lang = data["language"] or "de"
|
||||
|
||||
# Convert JSON-string pros/cons to lists for the template
|
||||
product = dict(data)
|
||||
product["pros"] = json.loads(product["pros"]) if product["pros"] else []
|
||||
product["cons"] = json.loads(product["cons"]) if product["cons"] else []
|
||||
|
||||
if not product["name"]:
|
||||
return "<p style='color:#94A3B8;font-size:.875rem;padding:.5rem 0'>Fill in the form to see a preview.</p>"
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_card.html")
|
||||
html = tmpl.render(product=product, t=get_translations(lang), lang=lang)
|
||||
return html
|
||||
|
||||
|
||||
@bp.route("/affiliate/new", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_new():
|
||||
"""Create an affiliate product."""
|
||||
from ..affiliate import get_all_programs, get_distinct_retailers
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
|
||||
has_url = bool(data["program_id"] and data["product_identifier"]) or bool(data["affiliate_url"])
|
||||
if not data["slug"] or not data["name"] or not has_url:
|
||||
await flash("Slug, name, and either a program+product ID or manual URL are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=data,
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
existing = await fetch_one(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ?",
|
||||
(data["slug"], data["language"]),
|
||||
)
|
||||
if existing:
|
||||
await flash(f"Slug '{data['slug']}' already exists for language '{data['language']}'.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=data,
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
# Auto-populate retailer from program name if not manually set
|
||||
if data["program_id"] and not data["retailer"]:
|
||||
prog = await fetch_one(
|
||||
"SELECT name FROM affiliate_programs WHERE id = ?", (data["program_id"],)
|
||||
)
|
||||
if prog:
|
||||
data["retailer"] = prog["name"]
|
||||
|
||||
await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, program_id, product_identifier,
|
||||
affiliate_url, image_url, price_cents, currency, rating, pros, cons,
|
||||
description, cta_label, status, language, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
data["slug"], data["name"], data["brand"], data["category"],
|
||||
data["retailer"], data["program_id"], data["product_identifier"],
|
||||
data["affiliate_url"], data["image_url"],
|
||||
data["price_cents"], data["currency"], data["rating"],
|
||||
data["pros"], data["cons"], data["description"], data["cta_label"],
|
||||
data["status"], data["language"], data["sort_order"],
|
||||
),
|
||||
)
|
||||
await flash(f"Product '{data['name']}' created.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={},
|
||||
editing=False,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/edit", methods=["GET", "POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_edit(product_id: int):
|
||||
"""Edit an affiliate product."""
|
||||
from ..affiliate import get_all_programs, get_distinct_retailers
|
||||
|
||||
product = await fetch_one("SELECT * FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
if not product:
|
||||
await flash("Product not found.", "error")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
if request.method == "POST":
|
||||
form = await request.form
|
||||
data = _form_to_product(form)
|
||||
|
||||
has_url = bool(data["program_id"] and data["product_identifier"]) or bool(data["affiliate_url"])
|
||||
if not data["slug"] or not data["name"] or not has_url:
|
||||
await flash("Slug, name, and either a program+product ID or manual URL are required.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={**dict(product), **data},
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
# Check slug collision only if slug or language changed
|
||||
if data["slug"] != product["slug"] or data["language"] != product["language"]:
|
||||
collision = await fetch_one(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ? AND id != ?",
|
||||
(data["slug"], data["language"], product_id),
|
||||
)
|
||||
if collision:
|
||||
await flash(f"Slug '{data['slug']}' already exists for language '{data['language']}'.", "error")
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data={**dict(product), **data},
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
# Auto-populate retailer from program name if not manually set
|
||||
if data["program_id"] and not data["retailer"]:
|
||||
prog = await fetch_one(
|
||||
"SELECT name FROM affiliate_programs WHERE id = ?", (data["program_id"],)
|
||||
)
|
||||
if prog:
|
||||
data["retailer"] = prog["name"]
|
||||
|
||||
await execute(
|
||||
"""UPDATE affiliate_products
|
||||
SET slug=?, name=?, brand=?, category=?, retailer=?, program_id=?,
|
||||
product_identifier=?, affiliate_url=?, image_url=?,
|
||||
price_cents=?, currency=?, rating=?, pros=?, cons=?,
|
||||
description=?, cta_label=?, status=?, language=?, sort_order=?,
|
||||
updated_at=datetime('now')
|
||||
WHERE id=?""",
|
||||
(
|
||||
data["slug"], data["name"], data["brand"], data["category"],
|
||||
data["retailer"], data["program_id"], data["product_identifier"],
|
||||
data["affiliate_url"], data["image_url"],
|
||||
data["price_cents"], data["currency"], data["rating"],
|
||||
data["pros"], data["cons"], data["description"], data["cta_label"],
|
||||
data["status"], data["language"], data["sort_order"],
|
||||
product_id,
|
||||
),
|
||||
)
|
||||
await flash(f"Product '{data['name']}' updated.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
# Render pros/cons JSON arrays as newline-separated text for the form
|
||||
product_dict = dict(product)
|
||||
try:
|
||||
product_dict["pros_text"] = "\n".join(json.loads(product["pros"] or "[]"))
|
||||
product_dict["cons_text"] = "\n".join(json.loads(product["cons"] or "[]"))
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
product_dict["pros_text"] = ""
|
||||
product_dict["cons_text"] = ""
|
||||
if product["price_cents"]:
|
||||
product_dict["price_eur"] = f"{product['price_cents'] / 100:.2f}"
|
||||
else:
|
||||
product_dict["price_eur"] = ""
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_form.html",
|
||||
admin_page="affiliate",
|
||||
data=product_dict,
|
||||
editing=True,
|
||||
product_id=product_id,
|
||||
categories=AFFILIATE_CATEGORIES,
|
||||
statuses=AFFILIATE_STATUSES,
|
||||
retailers=await get_distinct_retailers(),
|
||||
programs=await get_all_programs(status="active"),
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/delete", methods=["POST"])
|
||||
@role_required("admin")
|
||||
@csrf_protect
|
||||
async def affiliate_delete(product_id: int):
|
||||
"""Delete an affiliate product."""
|
||||
product = await fetch_one("SELECT name FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
if product:
|
||||
await execute("DELETE FROM affiliate_products WHERE id = ?", (product_id,))
|
||||
await flash(f"Product '{product['name']}' deleted.", "success")
|
||||
return redirect(url_for("admin.affiliate_products"))
|
||||
|
||||
|
||||
@bp.route("/affiliate/dashboard")
|
||||
@role_required("admin")
|
||||
async def affiliate_dashboard():
|
||||
"""Affiliate click statistics dashboard."""
|
||||
from ..affiliate import get_click_stats
|
||||
|
||||
days_count = int(request.args.get("days", "30") or "30")
|
||||
days_count = max(7, min(days_count, 365))
|
||||
stats = await get_click_stats(days_count)
|
||||
|
||||
# Build estimated revenue: clicks × assumed 3% CR × avg basket €80
|
||||
est_revenue = round(stats["total_clicks"] * 0.03 * 80)
|
||||
|
||||
# Article count (live articles that have been clicked)
|
||||
article_count = len(stats["top_articles"])
|
||||
|
||||
# Retailer bars: compute pct of max for width
|
||||
max_ret_clicks = max((r["click_count"] for r in stats["by_retailer"]), default=1)
|
||||
for r in stats["by_retailer"]:
|
||||
r["pct"] = round(r["click_count"] / max_ret_clicks * 100) if max_ret_clicks else 0
|
||||
total = stats["total_clicks"] or 1
|
||||
r["share_pct"] = round(r["click_count"] / total * 100)
|
||||
|
||||
return await render_template(
|
||||
"admin/affiliate_dashboard.html",
|
||||
admin_page="affiliate_dashboard",
|
||||
stats=stats,
|
||||
est_revenue=est_revenue,
|
||||
article_count=article_count,
|
||||
days_count=days_count,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/affiliate/<int:product_id>/toggle", methods=["POST"])
|
||||
@role_required("admin")
|
||||
async def affiliate_toggle(product_id: int):
|
||||
"""Toggle product status: draft → active → archived → draft."""
|
||||
product = await fetch_one(
|
||||
"SELECT id, name, status FROM affiliate_products WHERE id = ?", (product_id,)
|
||||
)
|
||||
if not product:
|
||||
return "", 404
|
||||
|
||||
cycle = {"draft": "active", "active": "archived", "archived": "draft"}
|
||||
new_status = cycle.get(product["status"], "draft")
|
||||
await execute(
|
||||
"UPDATE affiliate_products SET status=?, updated_at=datetime('now') WHERE id=?",
|
||||
(new_status, product_id),
|
||||
)
|
||||
|
||||
product_updated = await fetch_one(
|
||||
"SELECT * FROM affiliate_products WHERE id = ?", (product_id,)
|
||||
)
|
||||
from ..affiliate import get_click_counts
|
||||
click_counts = await get_click_counts()
|
||||
product_dict = dict(product_updated)
|
||||
product_dict["click_count"] = click_counts.get(product_id, 0)
|
||||
|
||||
return await render_template(
|
||||
"admin/partials/affiliate_row.html",
|
||||
product=product_dict,
|
||||
)
|
||||
|
||||
@@ -0,0 +1,121 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate_dashboard" %}
|
||||
|
||||
{% block title %}Affiliate Dashboard - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<h1 class="text-2xl">Affiliate Dashboard</h1>
|
||||
<div class="flex gap-2">
|
||||
{% for d in [7, 30, 90] %}
|
||||
<a href="?days={{ d }}" class="btn-outline btn-sm {% if days_count == d %}active{% endif %}">{{ d }}d</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{# ── Stats strip ── #}
|
||||
<div style="display:grid;grid-template-columns:repeat(4,1fr);gap:1rem;margin-bottom:1.5rem;">
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Clicks ({{ days_count }}d)</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ stats.total_clicks | int }}</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Products</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ stats.active_products or 0 }}</div>
|
||||
<div class="text-xs text-slate">{{ stats.draft_products or 0 }} draft</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Articles (clicked)</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">{{ article_count }}</div>
|
||||
</div>
|
||||
|
||||
<div class="card" style="padding:1.25rem;">
|
||||
<div class="text-xs font-semibold text-slate" style="text-transform:uppercase;letter-spacing:.06em;margin-bottom:.375rem;">Est. Revenue</div>
|
||||
<div class="mono" style="font-size:1.75rem;font-weight:700;color:#0F172A;">~€{{ est_revenue }}</div>
|
||||
<div class="text-xs text-slate">3% CR × €80 basket</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{# ── Daily bar chart ── #}
|
||||
{% if stats.daily_bars %}
|
||||
<div class="card mb-6" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Clicks · Last {{ days_count }} Days</div>
|
||||
<div style="display:flex;align-items:flex-end;gap:2px;height:120px;overflow-x:auto;">
|
||||
{% for bar in stats.daily_bars %}
|
||||
<div title="{{ bar.day }}: {{ bar.click_count }} clicks"
|
||||
style="flex-shrink:0;width:8px;background:#1D4ED8;border-radius:3px 3px 0 0;min-height:2px;height:{{ bar.pct }}%;transition:opacity .15s;"
|
||||
onmouseover="this.style.opacity='.7'" onmouseout="this.style.opacity='1'">
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<div style="display:flex;justify-content:space-between;margin-top:.375rem;">
|
||||
<span class="text-xs text-slate">{{ stats.daily_bars[0].day if stats.daily_bars else '' }}</span>
|
||||
<span class="text-xs text-slate">{{ stats.daily_bars[-1].day if stats.daily_bars else '' }}</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1.5rem;margin-bottom:1.5rem;">
|
||||
|
||||
{# ── Top products ── #}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Top Products</div>
|
||||
{% if stats.top_products %}
|
||||
{% for p in stats.top_products %}
|
||||
<div style="display:flex;align-items:center;gap:.75rem;padding:.5rem 0;{% if not loop.last %}border-bottom:1px solid #F1F5F9;{% endif %}">
|
||||
<span class="mono text-xs text-slate" style="width:1.5rem;text-align:right;">{{ loop.index }}</span>
|
||||
<span style="flex:1;font-size:.8125rem;color:#0F172A;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=p.id) }}" style="color:inherit;text-decoration:none;">{{ p.name }}</a>
|
||||
</span>
|
||||
<span class="mono" style="font-weight:600;font-size:.875rem;color:#0F172A;">{{ p.click_count }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p class="text-slate text-sm">No clicks yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# ── Top articles ── #}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Top Articles</div>
|
||||
{% if stats.top_articles %}
|
||||
{% for a in stats.top_articles %}
|
||||
<div style="display:flex;align-items:center;gap:.75rem;padding:.5rem 0;{% if not loop.last %}border-bottom:1px solid #F1F5F9;{% endif %}">
|
||||
<span class="mono text-xs text-slate" style="width:1.5rem;text-align:right;">{{ loop.index }}</span>
|
||||
<span style="flex:1;font-size:.8125rem;color:#0F172A;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;"
|
||||
title="{{ a.article_slug }}">{{ a.article_slug }}</span>
|
||||
<span class="mono" style="font-weight:600;font-size:.875rem;color:#0F172A;">{{ a.click_count }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p class="text-slate text-sm">No clicks with article source yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{# ── Clicks by retailer ── #}
|
||||
{% if stats.by_retailer %}
|
||||
<div class="card" style="padding:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-4" style="text-transform:uppercase;letter-spacing:.06em;">Clicks by Retailer</div>
|
||||
{% for r in stats.by_retailer %}
|
||||
<div style="display:flex;align-items:center;gap:1rem;margin-bottom:.75rem;">
|
||||
<span style="width:140px;font-size:.8125rem;color:#0F172A;flex-shrink:0;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;">
|
||||
{{ r.retailer or 'Unknown' }}
|
||||
</span>
|
||||
<div style="flex:1;background:#F1F5F9;border-radius:4px;height:24px;overflow:hidden;">
|
||||
<div style="width:{{ r.pct }}%;background:#1D4ED8;height:100%;border-radius:4px;min-width:2px;"></div>
|
||||
</div>
|
||||
<span class="mono" style="font-size:.8125rem;font-weight:600;width:60px;text-align:right;flex-shrink:0;">
|
||||
{{ r.click_count }} <span class="text-slate" style="font-weight:400;">({{ r.share_pct }}%)</span>
|
||||
</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
255
web/src/padelnomics/admin/templates/admin/affiliate_form.html
Normal file
255
web/src/padelnomics/admin/templates/admin/affiliate_form.html
Normal file
@@ -0,0 +1,255 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate" %}
|
||||
|
||||
{% block title %}{% if editing %}Edit Product{% else %}New Product{% endif %} - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<script>
|
||||
function slugify(text) {
|
||||
return text.toLowerCase()
|
||||
.replace(/[äöü]/g, c => ({'ä':'ae','ö':'oe','ü':'ue'}[c]))
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '');
|
||||
}
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
var nameInput = document.getElementById('f-name');
|
||||
var slugInput = document.getElementById('f-slug');
|
||||
if (nameInput && slugInput && !slugInput.value) {
|
||||
nameInput.addEventListener('input', function() {
|
||||
if (!slugInput.dataset.manual) {
|
||||
slugInput.value = slugify(nameInput.value);
|
||||
}
|
||||
});
|
||||
slugInput.addEventListener('input', function() {
|
||||
slugInput.dataset.manual = '1';
|
||||
});
|
||||
}
|
||||
|
||||
// Toggle program-based vs manual URL fields
|
||||
function toggleProgramFields() {
|
||||
var sel = document.getElementById('f-program');
|
||||
if (!sel) return;
|
||||
var isManual = sel.value === '0' || sel.value === '';
|
||||
document.getElementById('f-product-id-row').style.display = isManual ? 'none' : '';
|
||||
document.getElementById('f-manual-url-row').style.display = isManual ? '' : 'none';
|
||||
}
|
||||
var programSel = document.getElementById('f-program');
|
||||
if (programSel) {
|
||||
programSel.addEventListener('change', toggleProgramFields);
|
||||
toggleProgramFields();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="text-slate text-sm" style="text-decoration:none">← Products</a>
|
||||
<h1 class="text-2xl mt-1">{% if editing %}Edit Product{% else %}New Product{% endif %}</h1>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{# HTMX preview trigger — outside the grid so it takes no layout space #}
|
||||
<div style="display:none"
|
||||
hx-post="{{ url_for('admin.affiliate_preview') }}"
|
||||
hx-target="#product-preview"
|
||||
hx-trigger="load, input from:#affiliate-form delay:600ms"
|
||||
hx-include="#affiliate-form"
|
||||
hx-push-url="false">
|
||||
</div>
|
||||
|
||||
<div style="display:grid;grid-template-columns:1fr 380px;gap:2rem;align-items:start" class="affiliate-form-grid">
|
||||
|
||||
{# ── Left: form ── #}
|
||||
<form method="post" id="affiliate-form"
|
||||
action="{% if editing %}{{ url_for('admin.affiliate_edit', product_id=product_id) }}{% else %}{{ url_for('admin.affiliate_new') }}{% endif %}">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="card" style="padding:1.5rem;display:flex;flex-direction:column;gap:1.25rem;">
|
||||
|
||||
{# Name #}
|
||||
<div>
|
||||
<label class="form-label" for="f-name">Name *</label>
|
||||
<input id="f-name" type="text" name="name" value="{{ data.get('name','') }}"
|
||||
class="form-input" placeholder="e.g. Bullpadel Vertex 04" required>
|
||||
</div>
|
||||
|
||||
{# Slug #}
|
||||
<div>
|
||||
<label class="form-label" for="f-slug">Slug *</label>
|
||||
<input id="f-slug" type="text" name="slug" value="{{ data.get('slug','') }}"
|
||||
class="form-input" placeholder="e.g. bullpadel-vertex-04-amazon" required
|
||||
pattern="[a-z0-9][a-z0-9\-]*">
|
||||
<p class="form-hint">Lowercase letters, numbers, hyphens only. Include retailer to disambiguate (e.g. <code>-amazon</code>, <code>-padelnuestro</code>).</p>
|
||||
</div>
|
||||
|
||||
{# Brand + Category row #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-brand">Brand</label>
|
||||
<input id="f-brand" type="text" name="brand" value="{{ data.get('brand','') }}"
|
||||
class="form-input" placeholder="e.g. Bullpadel">
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-category">Category</label>
|
||||
<select id="f-category" name="category" class="form-input">
|
||||
{% for cat in categories %}
|
||||
<option value="{{ cat }}" {% if data.get('category','accessory') == cat %}selected{% endif %}>{{ cat | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Program dropdown #}
|
||||
<div>
|
||||
<label class="form-label" for="f-program">Affiliate Program</label>
|
||||
<select id="f-program" name="program_id" class="form-input">
|
||||
<option value="0" {% if not data.get('program_id') %}selected{% endif %}>Manual (custom URL)</option>
|
||||
{% for prog in programs %}
|
||||
<option value="{{ prog.id }}" {% if data.get('program_id') == prog.id %}selected{% endif %}>{{ prog.name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p class="form-hint">Select a program to auto-build the URL, or choose Manual for a custom link.</p>
|
||||
</div>
|
||||
|
||||
{# Product Identifier (shown when program selected) #}
|
||||
<div id="f-product-id-row">
|
||||
<label class="form-label" for="f-product-id">Product ID *</label>
|
||||
<input id="f-product-id" type="text" name="product_identifier"
|
||||
value="{{ data.get('product_identifier','') }}"
|
||||
class="form-input" placeholder="e.g. B0XXXXXXXXX (ASIN for Amazon)">
|
||||
<p class="form-hint">ASIN, product path, or other program-specific identifier. URL is assembled at redirect time.</p>
|
||||
</div>
|
||||
|
||||
{# Manual URL (shown when Manual selected) #}
|
||||
<div id="f-manual-url-row">
|
||||
<label class="form-label" for="f-url">Affiliate URL</label>
|
||||
<input id="f-url" type="url" name="affiliate_url" value="{{ data.get('affiliate_url','') }}"
|
||||
class="form-input" placeholder="https://www.amazon.de/dp/B0XXXXX?tag=padelnomics-21">
|
||||
<p class="form-hint">Full URL with tracking params already baked in. Used as fallback if no program is set.</p>
|
||||
</div>
|
||||
|
||||
{# Retailer (auto-populated from program; editable for manual products) #}
|
||||
<div>
|
||||
<label class="form-label" for="f-retailer">Retailer <span class="form-hint" style="font-weight:normal">(auto-filled from program)</span></label>
|
||||
<input id="f-retailer" type="text" name="retailer" value="{{ data.get('retailer','') }}"
|
||||
class="form-input" placeholder="e.g. Amazon, Padel Nuestro"
|
||||
list="retailers-list">
|
||||
<datalist id="retailers-list">
|
||||
{% for r in retailers %}
|
||||
<option value="{{ r }}">
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
</div>
|
||||
|
||||
{# Image URL #}
|
||||
<div>
|
||||
<label class="form-label" for="f-image">Image URL</label>
|
||||
<input id="f-image" type="text" name="image_url" value="{{ data.get('image_url','') }}"
|
||||
class="form-input" placeholder="/static/images/affiliate/bullpadel-vertex-04.webp">
|
||||
<p class="form-hint">Local path (recommended) or external URL.</p>
|
||||
</div>
|
||||
|
||||
{# Price + Rating row #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-price">Price (EUR)</label>
|
||||
<input id="f-price" type="number" name="price_eur" value="{{ data.get('price_eur','') }}"
|
||||
class="form-input" placeholder="149.99" step="0.01" min="0">
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-rating">Rating (0–5)</label>
|
||||
<input id="f-rating" type="number" name="rating" value="{{ data.get('rating','') }}"
|
||||
class="form-input" placeholder="4.3" step="0.1" min="0" max="5">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Description #}
|
||||
<div>
|
||||
<label class="form-label" for="f-desc">Short Description</label>
|
||||
<textarea id="f-desc" name="description" rows="3"
|
||||
class="form-input" placeholder="One to two sentences describing the product...">{{ data.get('description','') }}</textarea>
|
||||
</div>
|
||||
|
||||
{# Pros #}
|
||||
<div>
|
||||
<label class="form-label" for="f-pros">Pros <span class="form-hint" style="font-weight:normal">(one per line)</span></label>
|
||||
<textarea id="f-pros" name="pros" rows="4"
|
||||
class="form-input" placeholder="Carbon frame for maximum power Diamond shape for aggressive players">{{ data.get('pros_text', data.get('pros','')) }}</textarea>
|
||||
</div>
|
||||
|
||||
{# Cons #}
|
||||
<div>
|
||||
<label class="form-label" for="f-cons">Cons <span class="form-hint" style="font-weight:normal">(one per line)</span></label>
|
||||
<textarea id="f-cons" name="cons" rows="3"
|
||||
class="form-input" placeholder="Only for advanced players">{{ data.get('cons_text', data.get('cons','')) }}</textarea>
|
||||
</div>
|
||||
|
||||
{# CTA Label #}
|
||||
<div>
|
||||
<label class="form-label" for="f-cta">CTA Label</label>
|
||||
<input id="f-cta" type="text" name="cta_label" value="{{ data.get('cta_label','') }}"
|
||||
class="form-input" placeholder='Leave empty for default "Zum Angebot"'>
|
||||
</div>
|
||||
|
||||
{# Status + Language + Sort #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-status">Status</label>
|
||||
<select id="f-status" name="status" class="form-input">
|
||||
{% for s in statuses %}
|
||||
<option value="{{ s }}" {% if data.get('status','draft') == s %}selected{% endif %}>{{ s | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-lang">Language</label>
|
||||
<select id="f-lang" name="language" class="form-input">
|
||||
<option value="de" {% if data.get('language','de') == 'de' %}selected{% endif %}>DE</option>
|
||||
<option value="en" {% if data.get('language','de') == 'en' %}selected{% endif %}>EN</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-sort">Sort Order</label>
|
||||
<input id="f-sort" type="number" name="sort_order" value="{{ data.get('sort_order', 0) }}"
|
||||
class="form-input" min="0">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Actions #}
|
||||
<div class="flex gap-3 justify-between" style="margin-top:.5rem">
|
||||
<div class="flex gap-2">
|
||||
<button type="submit" class="btn" formaction="{% if editing %}{{ url_for('admin.affiliate_edit', product_id=product_id) }}{% else %}{{ url_for('admin.affiliate_new') }}{% endif %}">
|
||||
{% if editing %}Save Changes{% else %}Create Product{% endif %}
|
||||
</button>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="btn-outline">Cancel</a>
|
||||
</div>
|
||||
{% if editing %}
|
||||
<form method="post" action="{{ url_for('admin.affiliate_delete', product_id=product_id) }}" style="margin:0">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline"
|
||||
onclick="return confirm('Delete this product? This cannot be undone.')">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{# ── Right: live preview ── #}
|
||||
<div style="position:sticky;top:1.5rem;">
|
||||
<div class="text-xs font-semibold text-slate mb-2" style="text-transform:uppercase;letter-spacing:.06em;">Preview</div>
|
||||
<div id="product-preview" style="border:1px solid #E2E8F0;border-radius:12px;padding:1rem;background:#F8FAFC;min-height:180px;">
|
||||
<p style="color:#94A3B8;font-size:.875rem;text-align:center;margin-top:2rem;">Loading preview…</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<style>
|
||||
@media (max-width: 900px) {
|
||||
.affiliate-form-grid { grid-template-columns: 1fr !important; }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,83 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate" %}
|
||||
|
||||
{% block title %}Affiliate Products - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<h1 class="text-2xl">Affiliate Products</h1>
|
||||
<a href="{{ url_for('admin.affiliate_new') }}" class="btn btn-sm">+ New Product</a>
|
||||
</header>
|
||||
|
||||
{# Filters #}
|
||||
<div class="card mb-6" style="padding:1rem 1.25rem">
|
||||
<form class="flex flex-wrap gap-3 items-end"
|
||||
hx-get="{{ url_for('admin.affiliate_results') }}"
|
||||
hx-target="#aff-results"
|
||||
hx-trigger="change, input delay:300ms"
|
||||
hx-indicator="#aff-loading">
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Search</label>
|
||||
<input type="text" name="q" value="{{ q }}" placeholder="Name or brand..."
|
||||
class="form-input" style="min-width:200px">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Category</label>
|
||||
<select name="category" class="form-input" style="min-width:120px">
|
||||
<option value="">All</option>
|
||||
{% for cat in categories %}
|
||||
<option value="{{ cat }}" {% if cat == category %}selected{% endif %}>{{ cat | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Retailer</label>
|
||||
<select name="retailer" class="form-input" style="min-width:140px">
|
||||
<option value="">All</option>
|
||||
{% for r in retailers %}
|
||||
<option value="{{ r }}" {% if r == retailer_filter %}selected{% endif %}>{{ r }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="text-xs font-semibold text-slate block mb-1">Status</label>
|
||||
<select name="status" class="form-input" style="min-width:110px">
|
||||
<option value="">All</option>
|
||||
{% for s in statuses %}
|
||||
<option value="{{ s }}" {% if s == status_filter %}selected{% endif %}>{{ s | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<svg id="aff-loading" class="htmx-indicator search-spinner" width="14" height="14" viewBox="0 0 24 24" fill="none" aria-hidden="true">
|
||||
<circle cx="12" cy="12" r="10" stroke="#CBD5E1" stroke-width="3"/>
|
||||
<path d="M12 2a10 10 0 0 1 10 10" stroke="#0EA5E9" stroke-width="3" stroke-linecap="round"/>
|
||||
</svg>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{# Results #}
|
||||
<div id="aff-results">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Brand</th>
|
||||
<th>Retailer</th>
|
||||
<th>Category</th>
|
||||
<th>Price</th>
|
||||
<th>Status</th>
|
||||
<th class="text-right">Clicks</th>
|
||||
<th class="text-right">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% include "admin/partials/affiliate_results.html" %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,134 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate_programs" %}
|
||||
|
||||
{% block title %}{% if editing %}Edit Program{% else %}New Program{% endif %} - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<script>
|
||||
function slugify(text) {
|
||||
return text.toLowerCase()
|
||||
.replace(/[äöü]/g, c => ({'ä':'ae','ö':'oe','ü':'ue'}[c]))
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '');
|
||||
}
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
var nameInput = document.getElementById('f-name');
|
||||
var slugInput = document.getElementById('f-slug');
|
||||
if (nameInput && slugInput && !slugInput.value) {
|
||||
nameInput.addEventListener('input', function() {
|
||||
if (!slugInput.dataset.manual) {
|
||||
slugInput.value = slugify(nameInput.value);
|
||||
}
|
||||
});
|
||||
slugInput.addEventListener('input', function() {
|
||||
slugInput.dataset.manual = '1';
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<a href="{{ url_for('admin.affiliate_programs') }}" class="text-slate text-sm" style="text-decoration:none">← Programs</a>
|
||||
<h1 class="text-2xl mt-1">{% if editing %}Edit Program{% else %}New Program{% endif %}</h1>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div style="max-width:600px">
|
||||
<form method="post" id="program-form"
|
||||
action="{% if editing %}{{ url_for('admin.affiliate_program_edit', program_id=program_id) }}{% else %}{{ url_for('admin.affiliate_program_new') }}{% endif %}">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="card" style="padding:1.5rem;display:flex;flex-direction:column;gap:1.25rem;">
|
||||
|
||||
{# Name #}
|
||||
<div>
|
||||
<label class="form-label" for="f-name">Name *</label>
|
||||
<input id="f-name" type="text" name="name" value="{{ data.get('name','') }}"
|
||||
class="form-input" placeholder="e.g. Amazon, Padel Nuestro" required>
|
||||
</div>
|
||||
|
||||
{# Slug #}
|
||||
<div>
|
||||
<label class="form-label" for="f-slug">Slug *</label>
|
||||
<input id="f-slug" type="text" name="slug" value="{{ data.get('slug','') }}"
|
||||
class="form-input" placeholder="e.g. amazon, padel-nuestro" required
|
||||
pattern="[a-z0-9][a-z0-9\-]*">
|
||||
<p class="form-hint">Lowercase letters, numbers, hyphens only.</p>
|
||||
</div>
|
||||
|
||||
{# URL Template #}
|
||||
<div>
|
||||
<label class="form-label" for="f-template">URL Template *</label>
|
||||
<input id="f-template" type="text" name="url_template" value="{{ data.get('url_template','') }}"
|
||||
class="form-input" placeholder="https://www.amazon.de/dp/{product_id}?tag={tag}" required>
|
||||
<p class="form-hint">
|
||||
Use <code>{product_id}</code> for the ASIN/product path and <code>{tag}</code> for the tracking tag.<br>
|
||||
Example: <code>https://www.amazon.de/dp/{product_id}?tag={tag}</code>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{# Tracking Tag + Commission row #}
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem;">
|
||||
<div>
|
||||
<label class="form-label" for="f-tag">Tracking Tag</label>
|
||||
<input id="f-tag" type="text" name="tracking_tag" value="{{ data.get('tracking_tag','') }}"
|
||||
class="form-input" placeholder="e.g. padelnomics-21">
|
||||
</div>
|
||||
<div>
|
||||
<label class="form-label" for="f-commission">Commission %</label>
|
||||
<input id="f-commission" type="number" name="commission_pct" value="{{ data.get('commission_pct', 0) }}"
|
||||
class="form-input" placeholder="3" step="0.1" min="0" max="100">
|
||||
<p class="form-hint">Used for revenue estimates (e.g. 3 = 3%).</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{# Homepage URL #}
|
||||
<div>
|
||||
<label class="form-label" for="f-homepage">Homepage URL</label>
|
||||
<input id="f-homepage" type="url" name="homepage_url" value="{{ data.get('homepage_url','') }}"
|
||||
class="form-input" placeholder="https://www.amazon.de">
|
||||
<p class="form-hint">Shown as a link in the programs list.</p>
|
||||
</div>
|
||||
|
||||
{# Status #}
|
||||
<div>
|
||||
<label class="form-label" for="f-status">Status</label>
|
||||
<select id="f-status" name="status" class="form-input">
|
||||
{% for s in program_statuses %}
|
||||
<option value="{{ s }}" {% if data.get('status','active') == s %}selected{% endif %}>{{ s | capitalize }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p class="form-hint">Inactive programs are hidden from the product form dropdown.</p>
|
||||
</div>
|
||||
|
||||
{# Notes #}
|
||||
<div>
|
||||
<label class="form-label" for="f-notes">Notes <span class="form-hint" style="font-weight:normal">(internal)</span></label>
|
||||
<textarea id="f-notes" name="notes" rows="3"
|
||||
class="form-input" placeholder="Login URL, account ID, affiliate dashboard link...">{{ data.get('notes','') }}</textarea>
|
||||
</div>
|
||||
|
||||
{# Actions #}
|
||||
<div class="flex gap-3 justify-between" style="margin-top:.5rem">
|
||||
<div class="flex gap-2">
|
||||
<button type="submit" class="btn">
|
||||
{% if editing %}Save Changes{% else %}Create Program{% endif %}
|
||||
</button>
|
||||
<a href="{{ url_for('admin.affiliate_programs') }}" class="btn-outline">Cancel</a>
|
||||
</div>
|
||||
{% if editing %}
|
||||
<form method="post" action="{{ url_for('admin.affiliate_program_delete', program_id=program_id) }}" style="margin:0">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline"
|
||||
onclick="return confirm('Delete this program? Blocked if products reference it.')">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,30 @@
|
||||
{% extends "admin/base_admin.html" %}
|
||||
{% set admin_page = "affiliate_programs" %}
|
||||
|
||||
{% block title %}Affiliate Programs - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<h1 class="text-2xl">Affiliate Programs</h1>
|
||||
<a href="{{ url_for('admin.affiliate_program_new') }}" class="btn btn-sm">+ New Program</a>
|
||||
</header>
|
||||
|
||||
<div id="prog-results">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Slug</th>
|
||||
<th>Tracking Tag</th>
|
||||
<th class="text-right">Commission</th>
|
||||
<th class="text-right">Products</th>
|
||||
<th>Status</th>
|
||||
<th class="text-right">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% include "admin/partials/affiliate_program_results.html" %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -40,8 +40,10 @@
|
||||
.admin-subnav {
|
||||
display: flex; align-items: stretch; padding: 0 2rem;
|
||||
background: #fff; border-bottom: 1px solid #E2E8F0;
|
||||
flex-shrink: 0; overflow-x: auto; gap: 0;
|
||||
flex-shrink: 0; overflow-x: auto; overflow-y: hidden; gap: 0;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.admin-subnav::-webkit-scrollbar { display: none; }
|
||||
.admin-subnav a {
|
||||
display: flex; align-items: center; gap: 5px;
|
||||
padding: 0 1px; margin: 0 13px 0 0; height: 42px;
|
||||
@@ -99,6 +101,7 @@
|
||||
'suppliers': 'suppliers',
|
||||
'articles': 'content', 'scenarios': 'content', 'templates': 'content', 'pseo': 'content',
|
||||
'emails': 'email', 'inbox': 'email', 'compose': 'email', 'gallery': 'email', 'audiences': 'email', 'outreach': 'email',
|
||||
'affiliate': 'affiliate', 'affiliate_dashboard': 'affiliate', 'affiliate_programs': 'affiliate',
|
||||
'billing': 'billing',
|
||||
'seo': 'analytics',
|
||||
'pipeline': 'pipeline',
|
||||
@@ -149,6 +152,11 @@
|
||||
Billing
|
||||
</a>
|
||||
|
||||
<a href="{{ url_for('admin.affiliate_dashboard') }}" class="{% if active_section == 'affiliate' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M13.5 21v-7.5a.75.75 0 0 1 .75-.75h3a.75.75 0 0 1 .75.75V21m-4.5 0H2.36m11.14 0H18m0 0h3.64m-1.39 0V9.349M3.75 21V9.349m0 0a3.001 3.001 0 0 0 3.75-.615A2.993 2.993 0 0 0 9.75 9.75c.896 0 1.7-.393 2.25-1.016a2.993 2.993 0 0 0 2.25 1.016 2.993 2.993 0 0 0 2.25-1.016 3.001 3.001 0 0 0 3.75.614m-16.5 0a3.004 3.004 0 0 1-.621-4.72l1.189-1.19A1.5 1.5 0 0 1 5.378 3h13.243a1.5 1.5 0 0 1 1.06.44l1.19 1.189a3 3 0 0 1-.621 4.72M6.75 18h3.75a.75.75 0 0 0 .75-.75V13.5a.75.75 0 0 0-.75-.75H6.75a.75.75 0 0 0-.75.75v3.75c0 .414.336.75.75.75Z"/></svg>
|
||||
Affiliate
|
||||
</a>
|
||||
|
||||
<a href="{{ url_for('admin.seo') }}" class="{% if active_section == 'analytics' %}active{% endif %}">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" d="M2.25 18 9 11.25l4.306 4.306a11.95 11.95 0 0 1 5.814-5.518l2.74-1.22m0 0-5.94-2.281m5.94 2.28-2.28 5.941"/></svg>
|
||||
Analytics
|
||||
@@ -196,6 +204,12 @@
|
||||
<a href="{{ url_for('admin.audiences') }}" class="{% if admin_page == 'audiences' %}active{% endif %}">Audiences</a>
|
||||
<a href="{{ url_for('admin.outreach') }}" class="{% if admin_page == 'outreach' %}active{% endif %}">Outreach</a>
|
||||
</nav>
|
||||
{% elif active_section == 'affiliate' %}
|
||||
<nav class="admin-subnav">
|
||||
<a href="{{ url_for('admin.affiliate_dashboard') }}" class="{% if admin_page == 'affiliate_dashboard' %}active{% endif %}">Dashboard</a>
|
||||
<a href="{{ url_for('admin.affiliate_products') }}" class="{% if admin_page == 'affiliate' %}active{% endif %}">Products</a>
|
||||
<a href="{{ url_for('admin.affiliate_programs') }}" class="{% if admin_page == 'affiliate_programs' %}active{% endif %}">Programs</a>
|
||||
</nav>
|
||||
{% elif active_section == 'system' %}
|
||||
<nav class="admin-subnav">
|
||||
<a href="{{ url_for('admin.users') }}" class="{% if admin_page == 'users' %}active{% endif %}">Users</a>
|
||||
|
||||
@@ -3,6 +3,19 @@
|
||||
|
||||
{% block title %}Admin Dashboard - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<style>
|
||||
.funnel-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: 0.75rem;
|
||||
}
|
||||
@media (min-width: 768px) {
|
||||
.funnel-grid { grid-template-columns: repeat(5, 1fr); }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-8">
|
||||
<div>
|
||||
@@ -47,7 +60,7 @@
|
||||
|
||||
<!-- Lead Funnel -->
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">Lead Funnel</p>
|
||||
<div style="display:grid;grid-template-columns:repeat(5,1fr);gap:0.75rem" class="mb-8">
|
||||
<div class="funnel-grid mb-8">
|
||||
<div class="card text-center border-l-4 border-l-electric" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Planner Users</p>
|
||||
<p class="text-xl font-bold text-navy">{{ stats.planner_users }}</p>
|
||||
@@ -72,7 +85,7 @@
|
||||
|
||||
<!-- Supplier Stats -->
|
||||
<p class="text-xs font-semibold text-slate uppercase tracking-wider mb-2">Supplier Funnel</p>
|
||||
<div style="display:grid;grid-template-columns:repeat(5,1fr);gap:0.75rem" class="mb-8">
|
||||
<div class="funnel-grid mb-8">
|
||||
<div class="card text-center border-l-4 border-l-accent" style="padding:0.75rem">
|
||||
<p class="text-xs text-slate">Claimed Suppliers</p>
|
||||
<p class="text-xl font-bold text-navy">{{ stats.suppliers_claimed }}</p>
|
||||
|
||||
@@ -2,13 +2,30 @@
|
||||
{% set admin_page = "outreach" %}
|
||||
{% block title %}Outreach Pipeline - Admin - {{ config.APP_NAME }}{% endblock %}
|
||||
|
||||
{% block admin_head %}
|
||||
<style>
|
||||
.pipeline-status-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: 0.75rem;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
@media (min-width: 640px) {
|
||||
.pipeline-status-grid { grid-template-columns: repeat(3, 1fr); }
|
||||
}
|
||||
@media (min-width: 1024px) {
|
||||
.pipeline-status-grid { grid-template-columns: repeat(6, 1fr); }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block admin_content %}
|
||||
<header class="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<h1 class="text-2xl">Outreach</h1>
|
||||
<p class="text-sm text-slate mt-1">
|
||||
{{ pipeline.total }} supplier{{ 's' if pipeline.total != 1 else '' }} in pipeline
|
||||
· Sending domain: <span class="mono text-xs">hello.padelnomics.io</span>
|
||||
· Sending from: <span class="mono text-xs">{{ outreach_email }}</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
@@ -18,7 +35,7 @@
|
||||
</header>
|
||||
|
||||
<!-- Pipeline cards -->
|
||||
<div style="display:grid;grid-template-columns:repeat(6,1fr);gap:0.75rem;margin-bottom:1.5rem">
|
||||
<div class="pipeline-status-grid">
|
||||
{% set status_colors = {
|
||||
'prospect': '#E2E8F0',
|
||||
'contacted': '#DBEAFE',
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
{% if programs %}
|
||||
{% for prog in programs %}
|
||||
<tr id="prog-{{ prog.id }}">
|
||||
<td style="font-weight:500">
|
||||
{% if prog.homepage_url %}
|
||||
<a href="{{ prog.homepage_url }}" target="_blank" rel="noopener" style="color:#0F172A;text-decoration:none">{{ prog.name }}</a>
|
||||
{% else %}
|
||||
{{ prog.name }}
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="mono text-slate">{{ prog.slug }}</td>
|
||||
<td class="mono text-slate">{{ prog.tracking_tag or '—' }}</td>
|
||||
<td class="mono text-right">
|
||||
{% if prog.commission_pct %}{{ "%.0f" | format(prog.commission_pct) }}%{% else %}—{% endif %}
|
||||
</td>
|
||||
<td class="mono text-right">{{ prog.product_count }}</td>
|
||||
<td>
|
||||
<span class="badge {% if prog.status == 'active' %}badge-success{% else %}badge{% endif %}">
|
||||
{{ prog.status }}
|
||||
</span>
|
||||
</td>
|
||||
<td class="text-right" style="white-space:nowrap">
|
||||
<a href="{{ url_for('admin.affiliate_program_edit', program_id=prog.id) }}" class="btn-outline btn-sm">Edit</a>
|
||||
<form method="post" action="{{ url_for('admin.affiliate_program_delete', program_id=prog.id) }}" style="display:inline">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline btn-sm"
|
||||
onclick="return confirm('Delete {{ prog.name }}? This is blocked if products reference it.')">Delete</button>
|
||||
</form>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<tr>
|
||||
<td colspan="7" class="text-slate" style="text-align:center;padding:2rem;">No programs found.</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,9 @@
|
||||
{% if products %}
|
||||
{% for product in products %}
|
||||
{% include "admin/partials/affiliate_row.html" %}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<tr>
|
||||
<td colspan="8" class="text-slate" style="text-align:center;padding:2rem;">No products found.</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,29 @@
|
||||
<tr id="aff-{{ product.id }}">
|
||||
<td style="max-width:240px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap" title="{{ product.name }}">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=product.id) }}" style="color:#0F172A;text-decoration:none;font-weight:500;">{{ product.name }}</a>
|
||||
</td>
|
||||
<td class="text-slate">{{ product.brand or '—' }}</td>
|
||||
<td class="text-slate">{{ product.retailer or '—' }}</td>
|
||||
<td class="text-slate">{{ product.category }}</td>
|
||||
<td class="mono">
|
||||
{% if product.price_cents %}{{ "%.0f" | format(product.price_cents / 100) }}€{% else %}—{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<button hx-post="{{ url_for('admin.affiliate_toggle', product_id=product.id) }}"
|
||||
hx-target="#aff-{{ product.id }}" hx-swap="outerHTML"
|
||||
hx-headers='{"X-CSRF-Token": "{{ csrf_token() }}"}'
|
||||
class="badge {% if product.status == 'active' %}badge-success{% elif product.status == 'draft' %}badge-warning{% else %}badge{% endif %}"
|
||||
style="cursor:pointer;border:none;">
|
||||
{{ product.status }}
|
||||
</button>
|
||||
</td>
|
||||
<td class="mono text-right">{{ product.click_count or 0 }}</td>
|
||||
<td class="text-right" style="white-space:nowrap">
|
||||
<a href="{{ url_for('admin.affiliate_edit', product_id=product.id) }}" class="btn-outline btn-sm">Edit</a>
|
||||
<form method="post" action="{{ url_for('admin.affiliate_delete', product_id=product.id) }}" style="display:inline">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button type="submit" class="btn-outline btn-sm"
|
||||
onclick="return confirm('Delete {{ product.name }}?')">Delete</button>
|
||||
</form>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -6,15 +6,19 @@
|
||||
<td>
|
||||
{% for v in g.variants %}
|
||||
<div class="variant-row">
|
||||
<a href="{{ url_for('admin.article_edit', article_id=v.id) }}"
|
||||
{% if v.display_status == 'live' %}
|
||||
<a href="/{{ v.language or 'en' }}{{ v.url_path }}" target="_blank"
|
||||
class="lang-chip lang-chip-{{ v.display_status }}"
|
||||
title="Edit {{ v.language|upper }} variant">
|
||||
title="View live {{ v.language|upper }} article">
|
||||
<span class="dot"></span>{{ v.language | upper }}
|
||||
{% if v.noindex %}<span class="noindex-tag">noindex</span>{% endif %}
|
||||
</a>
|
||||
{% if v.display_status == 'live' %}
|
||||
<a href="/{{ v.language or 'en' }}{{ v.url_path }}" target="_blank"
|
||||
class="btn-outline btn-sm view-lang-btn" title="View live article">View ↗</a>
|
||||
{% else %}
|
||||
<span class="lang-chip lang-chip-{{ v.display_status }}"
|
||||
title="{{ v.display_status | capitalize }}">
|
||||
<span class="dot"></span>{{ v.language | upper }}
|
||||
{% if v.noindex %}<span class="noindex-tag">noindex</span>{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
<a href="{{ url_for('admin.article_edit', article_id=v.id) }}"
|
||||
class="btn-outline btn-sm view-lang-btn">Edit</a>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{% if emails %}
|
||||
<div class="card">
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -38,6 +39,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:2rem">
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
|
||||
{% if leads %}
|
||||
<div class="card">
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -58,6 +59,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pagination -->
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{% if suppliers %}
|
||||
<div class="card">
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -19,6 +20,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:2rem">
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
<!-- Pipeline Overview Tab: extraction status, serving freshness, landing zone -->
|
||||
<!-- Pipeline Overview Tab: extraction status, serving freshness, landing zone
|
||||
Self-polls every 5s while any extraction task is pending, stops when quiet. -->
|
||||
|
||||
<div id="pipeline-overview-content"
|
||||
hx-get="{{ url_for('pipeline.pipeline_overview') }}"
|
||||
hx-target="this"
|
||||
hx-swap="outerHTML"
|
||||
{% if any_running %}hx-trigger="every 5s"{% endif %}>
|
||||
|
||||
<!-- Extraction Status Grid -->
|
||||
<div class="card mb-4">
|
||||
@@ -26,12 +33,14 @@
|
||||
{% if stale %}
|
||||
<span class="badge-warning" style="font-size:10px;padding:1px 6px;margin-left:auto">stale</span>
|
||||
{% endif %}
|
||||
<form method="post" action="{{ url_for('pipeline.pipeline_trigger_extract') }}" class="m-0 ml-auto">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<input type="hidden" name="extractor" value="{{ wf.name }}">
|
||||
<button type="button" class="btn btn-sm" style="padding:2px 8px;font-size:11px"
|
||||
onclick="confirmAction('Run {{ wf.name }} extractor?', this.closest('form'))">Run</button>
|
||||
</form>
|
||||
<button type="button"
|
||||
class="btn btn-sm ml-auto"
|
||||
style="padding:2px 8px;font-size:11px"
|
||||
hx-post="{{ url_for('pipeline.pipeline_trigger_extract') }}"
|
||||
hx-target="#pipeline-overview-content"
|
||||
hx-swap="outerHTML"
|
||||
hx-vals='{"extractor": "{{ wf.name }}", "csrf_token": "{{ csrf_token() }}"}'
|
||||
onclick="if (!confirm('Run {{ wf.name }} extractor?')) return false;">Run</button>
|
||||
</div>
|
||||
<p class="text-xs text-slate">{{ wf.schedule_label }}</p>
|
||||
{% if run %}
|
||||
@@ -57,7 +66,7 @@
|
||||
</div>
|
||||
|
||||
<!-- Two-column row: Serving Freshness + Landing Zone -->
|
||||
<div style="display:grid;grid-template-columns:1fr 1fr;gap:1rem">
|
||||
<div class="pipeline-two-col">
|
||||
|
||||
<!-- Serving Freshness -->
|
||||
<div class="card">
|
||||
@@ -68,6 +77,7 @@
|
||||
</p>
|
||||
{% endif %}
|
||||
{% if serving_tables %}
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table" style="font-size:0.8125rem">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -86,6 +96,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-slate">No serving tables found — run the pipeline first.</p>
|
||||
{% endif %}
|
||||
@@ -99,6 +110,7 @@
|
||||
</span>
|
||||
</p>
|
||||
{% if landing_stats %}
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table" style="font-size:0.8125rem">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -119,6 +131,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-slate">
|
||||
Landing zone empty or not found at <code>data/landing</code>.
|
||||
@@ -127,3 +140,5 @@
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>{# end #pipeline-overview-content #}
|
||||
|
||||
@@ -0,0 +1,197 @@
|
||||
<!-- Pipeline Transform Tab: SQLMesh + export status, run history
|
||||
Self-polls every 5s while any transform/export task is pending. -->
|
||||
|
||||
<div id="pipeline-transform-content"
|
||||
hx-get="{{ url_for('pipeline.pipeline_transform') }}"
|
||||
hx-target="this"
|
||||
hx-swap="outerHTML"
|
||||
{% if any_running %}hx-trigger="every 5s"{% endif %}>
|
||||
|
||||
<!-- Status Cards: Transform + Export -->
|
||||
<div class="pipeline-two-col mb-4">
|
||||
|
||||
<!-- SQLMesh Transform -->
|
||||
{% set tx = latest['run_transform'] %}
|
||||
<div class="card">
|
||||
<p class="card-header">SQLMesh Transform</p>
|
||||
<div class="flex items-center gap-2 mb-3">
|
||||
{% if tx is none %}
|
||||
<span class="status-dot pending"></span>
|
||||
<span class="text-sm text-slate">Never run</span>
|
||||
{% elif tx.status == 'pending' %}
|
||||
<span class="status-dot running"></span>
|
||||
<span class="text-sm text-slate">Running…</span>
|
||||
{% elif tx.status == 'complete' %}
|
||||
<span class="status-dot ok"></span>
|
||||
<span class="text-sm text-slate">Complete</span>
|
||||
{% else %}
|
||||
<span class="status-dot failed"></span>
|
||||
<span class="text-sm text-danger">Failed</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if tx %}
|
||||
<p class="text-xs text-slate mono">
|
||||
Started: {{ (tx.created_at or '')[:19] or '—' }}
|
||||
</p>
|
||||
{% if tx.completed_at %}
|
||||
<p class="text-xs text-slate mono">
|
||||
Finished: {{ tx.completed_at[:19] }}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% if tx.status == 'failed' and tx.error %}
|
||||
<details class="mt-2">
|
||||
<summary class="text-xs text-danger cursor-pointer">Error</summary>
|
||||
<pre class="text-xs mt-1 p-2 bg-gray-50 rounded overflow-auto" style="max-height:8rem;white-space:pre-wrap">{{ tx.error[:400] }}</pre>
|
||||
</details>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<div class="mt-3">
|
||||
<button type="button"
|
||||
class="btn btn-sm"
|
||||
{% if any_running %}disabled{% endif %}
|
||||
hx-post="{{ url_for('pipeline.pipeline_trigger_transform') }}"
|
||||
hx-target="#pipeline-transform-content"
|
||||
hx-swap="outerHTML"
|
||||
hx-vals='{"step": "transform", "csrf_token": "{{ csrf_token() }}"}'
|
||||
onclick="if (!confirm('Run SQLMesh transform (prod --auto-apply)?')) return false;">
|
||||
Run Transform
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Export Serving -->
|
||||
{% set ex = latest['run_export'] %}
|
||||
<div class="card">
|
||||
<p class="card-header">Export Serving</p>
|
||||
<div class="flex items-center gap-2 mb-3">
|
||||
{% if ex is none %}
|
||||
<span class="status-dot pending"></span>
|
||||
<span class="text-sm text-slate">Never run</span>
|
||||
{% elif ex.status == 'pending' %}
|
||||
<span class="status-dot running"></span>
|
||||
<span class="text-sm text-slate">Running…</span>
|
||||
{% elif ex.status == 'complete' %}
|
||||
<span class="status-dot ok"></span>
|
||||
<span class="text-sm text-slate">Complete</span>
|
||||
{% else %}
|
||||
<span class="status-dot failed"></span>
|
||||
<span class="text-sm text-danger">Failed</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if ex %}
|
||||
<p class="text-xs text-slate mono">
|
||||
Started: {{ (ex.created_at or '')[:19] or '—' }}
|
||||
</p>
|
||||
{% if ex.completed_at %}
|
||||
<p class="text-xs text-slate mono">
|
||||
Finished: {{ ex.completed_at[:19] }}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% if serving_meta %}
|
||||
<p class="text-xs text-slate mt-1">
|
||||
Last export: <span class="font-semibold mono">{{ (serving_meta.exported_at_utc or '')[:19].replace('T', ' ') or '—' }}</span>
|
||||
</p>
|
||||
{% endif %}
|
||||
{% if ex.status == 'failed' and ex.error %}
|
||||
<details class="mt-2">
|
||||
<summary class="text-xs text-danger cursor-pointer">Error</summary>
|
||||
<pre class="text-xs mt-1 p-2 bg-gray-50 rounded overflow-auto" style="max-height:8rem;white-space:pre-wrap">{{ ex.error[:400] }}</pre>
|
||||
</details>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<div class="mt-3">
|
||||
<button type="button"
|
||||
class="btn btn-sm"
|
||||
{% if any_running %}disabled{% endif %}
|
||||
hx-post="{{ url_for('pipeline.pipeline_trigger_transform') }}"
|
||||
hx-target="#pipeline-transform-content"
|
||||
hx-swap="outerHTML"
|
||||
hx-vals='{"step": "export", "csrf_token": "{{ csrf_token() }}"}'
|
||||
onclick="if (!confirm('Export serving tables (lakehouse → analytics.duckdb)?')) return false;">
|
||||
Run Export
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<!-- Run Full Pipeline -->
|
||||
{% set pl = latest['run_pipeline'] %}
|
||||
<div class="card mb-4">
|
||||
<div class="flex items-center justify-between flex-wrap gap-3">
|
||||
<div>
|
||||
<p class="font-semibold text-navy text-sm">Full Pipeline</p>
|
||||
<p class="text-xs text-slate mt-1">Runs extract → transform → export sequentially</p>
|
||||
{% if pl %}
|
||||
<p class="text-xs text-slate mono mt-1">
|
||||
Last: {{ (pl.created_at or '')[:19] or '—' }}
|
||||
{% if pl.status == 'complete' %}<span class="badge-success ml-2">Complete</span>{% endif %}
|
||||
{% if pl.status == 'pending' %}<span class="badge-warning ml-2">Running…</span>{% endif %}
|
||||
{% if pl.status == 'failed' %}<span class="badge-danger ml-2">Failed</span>{% endif %}
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
<button type="button"
|
||||
class="btn btn-sm"
|
||||
{% if any_running %}disabled{% endif %}
|
||||
hx-post="{{ url_for('pipeline.pipeline_trigger_transform') }}"
|
||||
hx-target="#pipeline-transform-content"
|
||||
hx-swap="outerHTML"
|
||||
hx-vals='{"step": "pipeline", "csrf_token": "{{ csrf_token() }}"}'
|
||||
onclick="if (!confirm('Run full ELT pipeline (extract → transform → export)?')) return false;">
|
||||
Run Full Pipeline
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Recent Runs -->
|
||||
<div class="card">
|
||||
<p class="card-header">Recent Runs</p>
|
||||
{% if history %}
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table" style="font-size:0.8125rem">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>#</th>
|
||||
<th>Step</th>
|
||||
<th>Started</th>
|
||||
<th>Duration</th>
|
||||
<th>Status</th>
|
||||
<th>Error</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in history %}
|
||||
<tr>
|
||||
<td class="text-xs text-slate">{{ row.id }}</td>
|
||||
<td class="mono text-xs">{{ row.task_name | replace('run_', '') }}</td>
|
||||
<td class="mono text-xs text-slate">{{ (row.created_at or '')[:19] or '—' }}</td>
|
||||
<td class="mono text-xs text-slate">{{ row.duration or '—' }}</td>
|
||||
<td>
|
||||
{% if row.status == 'complete' %}
|
||||
<span class="badge-success">Complete</span>
|
||||
{% elif row.status == 'failed' %}
|
||||
<span class="badge-danger">Failed</span>
|
||||
{% else %}
|
||||
<span class="badge-warning">Running…</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if row.error_short %}
|
||||
<details>
|
||||
<summary class="text-xs text-danger cursor-pointer">Error</summary>
|
||||
<pre class="text-xs mt-1 p-2 bg-gray-50 rounded overflow-auto" style="max-width:24rem;white-space:pre-wrap">{{ row.error_short }}</pre>
|
||||
</details>
|
||||
{% else %}—{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-slate">No transform runs yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>{# end #pipeline-transform-content #}
|
||||
@@ -1,5 +1,6 @@
|
||||
{% if suppliers %}
|
||||
<div class="card">
|
||||
<div style="overflow-x:auto">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -47,6 +48,7 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="card text-center" style="padding:2rem">
|
||||
|
||||
@@ -4,8 +4,18 @@
|
||||
|
||||
{% block admin_head %}
|
||||
<style>
|
||||
.pipeline-stat-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: 0.75rem;
|
||||
}
|
||||
@media (min-width: 768px) {
|
||||
.pipeline-stat-grid { grid-template-columns: repeat(4, 1fr); }
|
||||
}
|
||||
|
||||
.pipeline-tabs {
|
||||
display: flex; gap: 0; border-bottom: 2px solid #E2E8F0; margin-bottom: 1.5rem;
|
||||
overflow-x: auto; -webkit-overflow-scrolling: touch;
|
||||
}
|
||||
.pipeline-tabs button {
|
||||
padding: 0.625rem 1.25rem; font-size: 0.8125rem; font-weight: 600;
|
||||
@@ -23,7 +33,19 @@
|
||||
.status-dot.failed { background: #EF4444; }
|
||||
.status-dot.stale { background: #D97706; }
|
||||
.status-dot.running { background: #3B82F6; }
|
||||
|
||||
@keyframes pulse-dot { 0%,100%{opacity:1} 50%{opacity:0.4} }
|
||||
.status-dot.running { animation: pulse-dot 1.5s ease-in-out infinite; }
|
||||
.status-dot.pending { background: #CBD5E1; }
|
||||
|
||||
.pipeline-two-col {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr;
|
||||
gap: 1rem;
|
||||
}
|
||||
@media (min-width: 640px) {
|
||||
.pipeline-two-col { grid-template-columns: 1fr 1fr; }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
@@ -34,10 +56,11 @@
|
||||
<p class="text-sm text-slate mt-1">Extraction status, data catalog, and ad-hoc query editor</p>
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
<form method="post" action="{{ url_for('pipeline.pipeline_trigger_extract') }}" class="m-0">
|
||||
<form method="post" action="{{ url_for('pipeline.pipeline_trigger_transform') }}" class="m-0">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<input type="hidden" name="step" value="pipeline">
|
||||
<button type="button" class="btn btn-sm"
|
||||
onclick="confirmAction('Enqueue a full extraction run? This will run all extractors in the background.', this.closest('form'))">
|
||||
onclick="confirmAction('Run full ELT pipeline (extract → transform → export)? This runs in the background.', this.closest('form'))">
|
||||
Run Pipeline
|
||||
</button>
|
||||
</form>
|
||||
@@ -46,7 +69,7 @@
|
||||
</header>
|
||||
|
||||
<!-- Health stat cards -->
|
||||
<div style="display:grid;grid-template-columns:repeat(4,1fr);gap:0.75rem" class="mb-6">
|
||||
<div class="pipeline-stat-grid mb-6">
|
||||
<div class="card text-center" style="padding:0.875rem">
|
||||
<p class="text-xs text-slate">Total Runs</p>
|
||||
<p class="text-2xl font-bold text-navy metric">{{ summary.total | default(0) }}</p>
|
||||
@@ -97,6 +120,10 @@
|
||||
hx-get="{{ url_for('pipeline.pipeline_lineage') }}"
|
||||
hx-target="#pipeline-tab-content" hx-swap="innerHTML"
|
||||
hx-trigger="click">Lineage</button>
|
||||
<button data-tab="transform"
|
||||
hx-get="{{ url_for('pipeline.pipeline_transform') }}"
|
||||
hx-target="#pipeline-tab-content" hx-swap="innerHTML"
|
||||
hx-trigger="click">Transform</button>
|
||||
</div>
|
||||
|
||||
<!-- Tab content (Overview loads on page load) -->
|
||||
|
||||
307
web/src/padelnomics/affiliate.py
Normal file
307
web/src/padelnomics/affiliate.py
Normal file
@@ -0,0 +1,307 @@
|
||||
"""
|
||||
Affiliate product catalog: product lookup, click logging, and stats queries.
|
||||
|
||||
All functions are plain async procedures — no classes, no state.
|
||||
|
||||
Design decisions:
|
||||
- IP hashing uses a daily salt (date + SECRET_KEY[:16]) for GDPR compliance.
|
||||
Rotating salt prevents re-identification across days without storing PII.
|
||||
- Products are fetched by (slug, language) with a graceful fallback to any
|
||||
language, so DE cards appear in EN articles rather than nothing.
|
||||
- Stats are computed entirely in SQL — no Python aggregation.
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from .core import config, execute, fetch_all, fetch_one
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_CATEGORIES = ("racket", "ball", "shoe", "bag", "grip", "eyewear", "accessory")
|
||||
VALID_STATUSES = ("draft", "active", "archived")
|
||||
VALID_PROGRAM_STATUSES = ("active", "inactive")
|
||||
|
||||
|
||||
def hash_ip(ip_address: str) -> str:
|
||||
"""SHA256(ip + YYYY-MM-DD + SECRET_KEY[:16]) with daily salt rotation."""
|
||||
assert ip_address, "ip_address must not be empty"
|
||||
today = date.today().isoformat()
|
||||
salt = config.SECRET_KEY[:16]
|
||||
raw = f"{ip_address}:{today}:{salt}"
|
||||
return hashlib.sha256(raw.encode()).hexdigest()
|
||||
|
||||
|
||||
async def get_all_programs(status: str | None = None) -> list[dict]:
|
||||
"""Return all affiliate programs, optionally filtered by status."""
|
||||
if status:
|
||||
assert status in VALID_PROGRAM_STATUSES, f"unknown program status: {status}"
|
||||
rows = await fetch_all(
|
||||
"SELECT ap.*, ("
|
||||
" SELECT COUNT(*) FROM affiliate_products WHERE program_id = ap.id"
|
||||
") AS product_count"
|
||||
" FROM affiliate_programs ap WHERE ap.status = ?"
|
||||
" ORDER BY ap.name ASC",
|
||||
(status,),
|
||||
)
|
||||
else:
|
||||
rows = await fetch_all(
|
||||
"SELECT ap.*, ("
|
||||
" SELECT COUNT(*) FROM affiliate_products WHERE program_id = ap.id"
|
||||
") AS product_count"
|
||||
" FROM affiliate_programs ap ORDER BY ap.name ASC"
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
async def get_program(program_id: int) -> dict | None:
|
||||
"""Return a single affiliate program by id."""
|
||||
assert program_id > 0, "program_id must be positive"
|
||||
row = await fetch_one(
|
||||
"SELECT * FROM affiliate_programs WHERE id = ?", (program_id,)
|
||||
)
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
async def get_program_by_slug(slug: str) -> dict | None:
|
||||
"""Return a single affiliate program by slug."""
|
||||
assert slug, "slug must not be empty"
|
||||
row = await fetch_one(
|
||||
"SELECT * FROM affiliate_programs WHERE slug = ?", (slug,)
|
||||
)
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def build_affiliate_url(product: dict, program: dict | None = None) -> str:
|
||||
"""Assemble the final affiliate URL from program template + product identifier.
|
||||
|
||||
Falls back to the baked product["affiliate_url"] when no program is set,
|
||||
preserving backward compatibility with products created before programs existed.
|
||||
"""
|
||||
if not product.get("program_id") or not program:
|
||||
return product["affiliate_url"]
|
||||
return program["url_template"].format(
|
||||
product_id=product["product_identifier"],
|
||||
tag=program["tracking_tag"],
|
||||
)
|
||||
|
||||
|
||||
async def get_product(slug: str, language: str = "de") -> dict | None:
|
||||
"""Return active product by slug+language, falling back to any language.
|
||||
|
||||
JOINs affiliate_programs so the returned dict includes program fields
|
||||
(prefixed with _program_*) for use in build_affiliate_url().
|
||||
"""
|
||||
assert slug, "slug must not be empty"
|
||||
row = await fetch_one(
|
||||
"SELECT p.*, pg.url_template AS _program_url_template,"
|
||||
" pg.tracking_tag AS _program_tracking_tag,"
|
||||
" pg.name AS _program_name"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_programs pg ON pg.id = p.program_id"
|
||||
" WHERE p.slug = ? AND p.language = ? AND p.status = 'active'",
|
||||
(slug, language),
|
||||
)
|
||||
if row:
|
||||
return _parse_product(row)
|
||||
# Graceful fallback: show any language rather than nothing
|
||||
row = await fetch_one(
|
||||
"SELECT p.*, pg.url_template AS _program_url_template,"
|
||||
" pg.tracking_tag AS _program_tracking_tag,"
|
||||
" pg.name AS _program_name"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_programs pg ON pg.id = p.program_id"
|
||||
" WHERE p.slug = ? AND p.status = 'active' LIMIT 1",
|
||||
(slug,),
|
||||
)
|
||||
return _parse_product(row) if row else None
|
||||
|
||||
|
||||
async def get_products_by_category(category: str, language: str = "de") -> list[dict]:
|
||||
"""Return active products in category sorted by sort_order, with fallback."""
|
||||
assert category in VALID_CATEGORIES, f"unknown category: {category}"
|
||||
rows = await fetch_all(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE category = ? AND language = ? AND status = 'active'"
|
||||
" ORDER BY sort_order ASC, id ASC",
|
||||
(category, language),
|
||||
)
|
||||
if rows:
|
||||
return [_parse_product(r) for r in rows]
|
||||
# Fallback: any language for this category
|
||||
rows = await fetch_all(
|
||||
"SELECT * FROM affiliate_products"
|
||||
" WHERE category = ? AND status = 'active'"
|
||||
" ORDER BY sort_order ASC, id ASC",
|
||||
(category,),
|
||||
)
|
||||
return [_parse_product(r) for r in rows]
|
||||
|
||||
|
||||
async def get_all_products(
|
||||
status: str | None = None,
|
||||
retailer: str | None = None,
|
||||
) -> list[dict]:
|
||||
"""Admin listing — all products, optionally filtered by status and/or retailer."""
|
||||
conditions = []
|
||||
params: list = []
|
||||
if status:
|
||||
assert status in VALID_STATUSES, f"unknown status: {status}"
|
||||
conditions.append("status = ?")
|
||||
params.append(status)
|
||||
if retailer:
|
||||
conditions.append("retailer = ?")
|
||||
params.append(retailer)
|
||||
|
||||
where = ("WHERE " + " AND ".join(conditions)) if conditions else ""
|
||||
rows = await fetch_all(
|
||||
f"SELECT * FROM affiliate_products {where} ORDER BY sort_order ASC, id ASC",
|
||||
tuple(params),
|
||||
)
|
||||
return [_parse_product(r) for r in rows]
|
||||
|
||||
|
||||
async def get_click_counts() -> dict[int, int]:
|
||||
"""Return {product_id: click_count} for all products (used in admin list)."""
|
||||
rows = await fetch_all(
|
||||
"SELECT product_id, COUNT(*) AS cnt FROM affiliate_clicks GROUP BY product_id"
|
||||
)
|
||||
return {r["product_id"]: r["cnt"] for r in rows}
|
||||
|
||||
|
||||
async def log_click(
|
||||
product_id: int,
|
||||
ip_address: str,
|
||||
article_slug: str | None,
|
||||
referrer: str | None,
|
||||
) -> None:
|
||||
"""Insert a click event. Hashes IP for GDPR compliance."""
|
||||
assert product_id > 0, "product_id must be positive"
|
||||
assert ip_address, "ip_address must not be empty"
|
||||
ip = hash_ip(ip_address)
|
||||
await execute(
|
||||
"INSERT INTO affiliate_clicks (product_id, article_slug, referrer, ip_hash)"
|
||||
" VALUES (?, ?, ?, ?)",
|
||||
(product_id, article_slug, referrer, ip),
|
||||
)
|
||||
|
||||
|
||||
async def get_click_stats(days_count: int = 30) -> dict:
|
||||
"""Compute click statistics over the last N days, entirely in SQL."""
|
||||
assert 1 <= days_count <= 365, f"days must be 1-365, got {days_count}"
|
||||
|
||||
# Total clicks in window
|
||||
total_row = await fetch_one(
|
||||
"SELECT COUNT(*) AS cnt FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
total = total_row["cnt"] if total_row else 0
|
||||
|
||||
# Active product count
|
||||
product_counts = await fetch_one(
|
||||
"SELECT"
|
||||
" SUM(CASE WHEN status='active' THEN 1 ELSE 0 END) AS active_count,"
|
||||
" SUM(CASE WHEN status='draft' THEN 1 ELSE 0 END) AS draft_count"
|
||||
" FROM affiliate_products"
|
||||
)
|
||||
|
||||
# Top products by clicks
|
||||
top_products = await fetch_all(
|
||||
"SELECT p.id, p.name, p.slug, p.retailer, COUNT(c.id) AS click_count"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_clicks c"
|
||||
" ON c.product_id = p.id"
|
||||
" AND c.clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY p.id"
|
||||
" ORDER BY click_count DESC"
|
||||
" LIMIT 10",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Top articles by clicks
|
||||
top_articles = await fetch_all(
|
||||
"SELECT article_slug, COUNT(*) AS click_count"
|
||||
" FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)"
|
||||
" AND article_slug IS NOT NULL"
|
||||
" GROUP BY article_slug"
|
||||
" ORDER BY click_count DESC"
|
||||
" LIMIT 10",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Clicks by retailer
|
||||
by_retailer = await fetch_all(
|
||||
"SELECT p.retailer, COUNT(c.id) AS click_count"
|
||||
" FROM affiliate_products p"
|
||||
" LEFT JOIN affiliate_clicks c"
|
||||
" ON c.product_id = p.id"
|
||||
" AND c.clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY p.retailer"
|
||||
" ORDER BY click_count DESC",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Daily click counts for bar chart
|
||||
daily = await fetch_all(
|
||||
"SELECT date(clicked_at) AS day, COUNT(*) AS click_count"
|
||||
" FROM affiliate_clicks"
|
||||
" WHERE clicked_at >= datetime('now', ?)"
|
||||
" GROUP BY day"
|
||||
" ORDER BY day ASC",
|
||||
(f"-{days_count} days",),
|
||||
)
|
||||
|
||||
# Normalize daily to percentage heights for CSS bar chart
|
||||
max_daily = max((r["click_count"] for r in daily), default=1)
|
||||
daily_bars = [
|
||||
{"day": r["day"], "click_count": r["click_count"],
|
||||
"pct": round(r["click_count"] / max_daily * 100)}
|
||||
for r in daily
|
||||
]
|
||||
|
||||
return {
|
||||
"total_clicks": total,
|
||||
"active_products": product_counts["active_count"] if product_counts else 0,
|
||||
"draft_products": product_counts["draft_count"] if product_counts else 0,
|
||||
"top_products": [dict(r) for r in top_products],
|
||||
"top_articles": [dict(r) for r in top_articles],
|
||||
"by_retailer": [dict(r) for r in by_retailer],
|
||||
"daily_bars": daily_bars,
|
||||
"days": days_count,
|
||||
}
|
||||
|
||||
|
||||
async def get_distinct_retailers() -> list[str]:
|
||||
"""Return sorted list of distinct retailer names for form datalist."""
|
||||
rows = await fetch_all(
|
||||
"SELECT DISTINCT retailer FROM affiliate_products"
|
||||
" WHERE retailer != '' ORDER BY retailer"
|
||||
)
|
||||
return [r["retailer"] for r in rows]
|
||||
|
||||
|
||||
def _parse_product(row) -> dict:
|
||||
"""Convert aiosqlite Row to plain dict, parsing JSON pros/cons arrays.
|
||||
|
||||
If the row includes _program_* columns (from a JOIN), extracts them into
|
||||
a nested "_program" dict so build_affiliate_url() can use them directly.
|
||||
"""
|
||||
d = dict(row)
|
||||
d["pros"] = json.loads(d.get("pros") or "[]")
|
||||
d["cons"] = json.loads(d.get("cons") or "[]")
|
||||
# Extract program fields added by get_product()'s JOIN
|
||||
if "_program_url_template" in d:
|
||||
if d.get("program_id") and d["_program_url_template"]:
|
||||
d["_program"] = {
|
||||
"url_template": d.pop("_program_url_template"),
|
||||
"tracking_tag": d.pop("_program_tracking_tag", ""),
|
||||
"name": d.pop("_program_name", ""),
|
||||
}
|
||||
else:
|
||||
d.pop("_program_url_template", None)
|
||||
d.pop("_program_tracking_tag", None)
|
||||
d.pop("_program_name", None)
|
||||
return d
|
||||
@@ -4,6 +4,10 @@ DuckDB read-only analytics reader.
|
||||
Opens a single long-lived DuckDB connection at startup (read_only=True).
|
||||
All queries run via asyncio.to_thread() to avoid blocking the event loop.
|
||||
|
||||
When export_serving.py atomically renames a new analytics.duckdb into place,
|
||||
_check_and_reopen() detects the inode change and transparently reopens —
|
||||
no app restart required.
|
||||
|
||||
Usage:
|
||||
from .analytics import fetch_analytics, execute_user_query
|
||||
|
||||
@@ -14,6 +18,7 @@ Usage:
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
@@ -21,6 +26,8 @@ from typing import Any
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_conn = None # duckdb.DuckDBPyConnection | None — lazy import
|
||||
_conn_inode: int | None = None
|
||||
_reopen_lock = threading.Lock()
|
||||
_DUCKDB_PATH = os.environ.get("SERVING_DUCKDB_PATH", "data/analytics.duckdb")
|
||||
|
||||
# DuckDB queries run in the asyncio thread pool. Cap them so a slow scan
|
||||
@@ -32,20 +39,67 @@ def open_analytics_db() -> None:
|
||||
"""Open the DuckDB connection. Call once at app startup."""
|
||||
import duckdb
|
||||
|
||||
global _conn
|
||||
global _conn, _conn_inode
|
||||
path = Path(_DUCKDB_PATH)
|
||||
if not path.exists():
|
||||
# Database doesn't exist yet — skip silently. Queries will return empty.
|
||||
return
|
||||
_conn = duckdb.connect(str(path), read_only=True)
|
||||
_conn_inode = path.stat().st_ino
|
||||
|
||||
|
||||
def close_analytics_db() -> None:
|
||||
"""Close the DuckDB connection. Call at app shutdown."""
|
||||
global _conn
|
||||
global _conn, _conn_inode
|
||||
if _conn is not None:
|
||||
_conn.close()
|
||||
_conn = None
|
||||
_conn_inode = None
|
||||
|
||||
|
||||
def _check_and_reopen() -> None:
|
||||
"""Reopen the connection if analytics.duckdb was atomically replaced (new inode).
|
||||
|
||||
Called at the start of each query. Requires a directory bind mount (not a file
|
||||
bind mount) so that os.stat() inside the container sees the new inode after rename.
|
||||
"""
|
||||
global _conn, _conn_inode
|
||||
import duckdb
|
||||
|
||||
path = Path(_DUCKDB_PATH)
|
||||
try:
|
||||
current_inode = path.stat().st_ino
|
||||
except OSError:
|
||||
return
|
||||
|
||||
if current_inode == _conn_inode:
|
||||
return # same file — nothing to do
|
||||
|
||||
with _reopen_lock:
|
||||
# Double-check under lock to avoid concurrent reopens.
|
||||
try:
|
||||
current_inode = path.stat().st_ino
|
||||
except OSError:
|
||||
return
|
||||
if current_inode == _conn_inode:
|
||||
return
|
||||
|
||||
old_conn = _conn
|
||||
try:
|
||||
new_conn = duckdb.connect(str(path), read_only=True)
|
||||
except Exception:
|
||||
logger.exception("Failed to reopen analytics DB after file change")
|
||||
return
|
||||
|
||||
_conn = new_conn
|
||||
_conn_inode = current_inode
|
||||
logger.info("Analytics DB reopened (inode changed to %d)", current_inode)
|
||||
|
||||
if old_conn is not None:
|
||||
try:
|
||||
old_conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
async def fetch_analytics(sql: str, params: list | None = None) -> list[dict[str, Any]]:
|
||||
@@ -61,7 +115,11 @@ async def fetch_analytics(sql: str, params: list | None = None) -> list[dict[str
|
||||
return []
|
||||
|
||||
def _run() -> list[dict]:
|
||||
cur = _conn.cursor()
|
||||
_check_and_reopen()
|
||||
conn = _conn
|
||||
if conn is None:
|
||||
return []
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
rel = cur.execute(sql, params or [])
|
||||
cols = [d[0] for d in rel.description]
|
||||
@@ -104,8 +162,12 @@ async def execute_user_query(
|
||||
return [], [], "Analytics database is not available.", 0.0
|
||||
|
||||
def _run() -> tuple[list[str], list[tuple], str | None, float]:
|
||||
_check_and_reopen()
|
||||
conn = _conn
|
||||
if conn is None:
|
||||
return [], [], "Analytics database is not available.", 0.0
|
||||
t0 = time.monotonic()
|
||||
cur = _conn.cursor()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
rel = cur.execute(sql)
|
||||
cols = [d[0] for d in rel.description]
|
||||
|
||||
@@ -280,6 +280,52 @@ def create_app() -> Quart:
|
||||
except Exception as e:
|
||||
return {"status": "unhealthy", "db": str(e)}, 500
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Affiliate click redirect — language-agnostic, no blueprint prefix
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
@app.route("/go/<slug>")
|
||||
async def affiliate_redirect(slug: str):
|
||||
"""302 redirect to affiliate URL, logging the click.
|
||||
|
||||
Uses 302 (not 301) so every hit is tracked — browsers don't cache 302s.
|
||||
Extracts article_slug and lang from Referer header best-effort.
|
||||
"""
|
||||
from .affiliate import build_affiliate_url, get_product, log_click
|
||||
from .core import check_rate_limit
|
||||
|
||||
# Extract lang from Referer path (e.g. /de/blog/... → "de"), default de
|
||||
referer = request.headers.get("Referer", "")
|
||||
lang = "de"
|
||||
article_slug = None
|
||||
if referer:
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
ref_path = urlparse(referer).path
|
||||
parts = ref_path.strip("/").split("/")
|
||||
if parts and len(parts[0]) == 2:
|
||||
lang = parts[0]
|
||||
if len(parts) > 1:
|
||||
article_slug = parts[-1] or None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
product = await get_product(slug, lang)
|
||||
if not product:
|
||||
abort(404)
|
||||
|
||||
# Assemble URL from program template; falls back to baked affiliate_url
|
||||
url = build_affiliate_url(product, product.get("_program"))
|
||||
|
||||
ip = request.remote_addr or "unknown"
|
||||
allowed, _info = await check_rate_limit(f"aff:{ip}", limit=60, window=60)
|
||||
if not allowed:
|
||||
# Still redirect even if rate-limited; just don't log the click
|
||||
return redirect(url, 302)
|
||||
|
||||
await log_click(product["id"], ip, article_slug, referer or None)
|
||||
return redirect(url, 302)
|
||||
|
||||
# Legacy 301 redirects — bookmarked/cached URLs before lang prefixes existed
|
||||
@app.route("/terms")
|
||||
async def legacy_terms():
|
||||
|
||||
@@ -123,17 +123,19 @@ async def get_table_columns(data_table: str) -> list[dict]:
|
||||
async def fetch_template_data(
|
||||
data_table: str,
|
||||
order_by: str | None = None,
|
||||
limit: int = 500,
|
||||
limit: int = 0,
|
||||
) -> list[dict]:
|
||||
"""Fetch all rows from a DuckDB serving table."""
|
||||
"""Fetch rows from a DuckDB serving table. limit=0 means all rows."""
|
||||
assert "." in data_table, "data_table must be schema-qualified"
|
||||
_validate_table_name(data_table)
|
||||
|
||||
order_clause = f"ORDER BY {order_by} DESC" if order_by else ""
|
||||
return await fetch_analytics(
|
||||
f"SELECT * FROM {data_table} {order_clause} LIMIT ?",
|
||||
[limit],
|
||||
)
|
||||
if limit:
|
||||
return await fetch_analytics(
|
||||
f"SELECT * FROM {data_table} {order_clause} LIMIT ?",
|
||||
[limit],
|
||||
)
|
||||
return await fetch_analytics(f"SELECT * FROM {data_table} {order_clause}")
|
||||
|
||||
|
||||
async def count_template_data(data_table: str) -> int:
|
||||
@@ -290,7 +292,7 @@ async def generate_articles(
|
||||
start_date: date,
|
||||
articles_per_day: int,
|
||||
*,
|
||||
limit: int = 500,
|
||||
limit: int = 0,
|
||||
base_url: str = "https://padelnomics.io",
|
||||
task_id: int | None = None,
|
||||
) -> int:
|
||||
@@ -315,7 +317,7 @@ async def generate_articles(
|
||||
"""
|
||||
from ..core import execute as db_execute
|
||||
from ..planner.calculator import DEFAULTS, calc, validate_state
|
||||
from .routes import bake_scenario_cards, is_reserved_path
|
||||
from .routes import bake_product_cards, bake_scenario_cards, is_reserved_path
|
||||
|
||||
assert articles_per_day > 0, "articles_per_day must be positive"
|
||||
|
||||
@@ -443,6 +445,7 @@ async def generate_articles(
|
||||
body_html = await bake_scenario_cards(
|
||||
body_html, lang=lang, scenario_overrides=scenario_overrides
|
||||
)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
t_bake += time.perf_counter() - t0
|
||||
|
||||
# Extract FAQ pairs for structured data
|
||||
@@ -584,7 +587,7 @@ async def preview_article(
|
||||
No disk write, no DB insert. Returns {title, url_path, html, meta_description}.
|
||||
"""
|
||||
from ..planner.calculator import DEFAULTS, calc, validate_state
|
||||
from .routes import bake_scenario_cards
|
||||
from .routes import bake_product_cards, bake_scenario_cards
|
||||
|
||||
config = load_template(slug)
|
||||
|
||||
@@ -641,6 +644,7 @@ async def preview_article(
|
||||
body_html = await bake_scenario_cards(
|
||||
body_html, lang=lang, scenario_overrides=scenario_overrides,
|
||||
)
|
||||
body_html = await bake_product_cards(body_html, lang=lang)
|
||||
|
||||
return {
|
||||
"title": title,
|
||||
|
||||
@@ -27,6 +27,8 @@ RESERVED_PREFIXES = (
|
||||
)
|
||||
|
||||
SCENARIO_RE = re.compile(r'\[scenario:([a-z0-9_-]+)(?::([a-z]+))?\]')
|
||||
PRODUCT_RE = re.compile(r'\[product:([a-z0-9_-]+)\]')
|
||||
PRODUCT_GROUP_RE = re.compile(r'\[product-group:([a-z0-9_-]+)\]')
|
||||
|
||||
SECTION_TEMPLATES = {
|
||||
None: "partials/scenario_summary.html",
|
||||
@@ -112,6 +114,53 @@ async def bake_scenario_cards(
|
||||
return html
|
||||
|
||||
|
||||
async def bake_product_cards(html: str, lang: str = "de") -> str:
|
||||
"""Replace [product:slug] and [product-group:category] markers with rendered HTML.
|
||||
|
||||
Processes markers in two passes (product first, then groups) to keep logic
|
||||
clear. Reverse iteration preserves string offsets when splicing.
|
||||
"""
|
||||
from ..affiliate import get_product, get_products_by_category
|
||||
|
||||
t = get_translations(lang)
|
||||
|
||||
# ── Pass 1: [product:slug] ────────────────────────────────────────────────
|
||||
product_matches = list(PRODUCT_RE.finditer(html))
|
||||
if product_matches:
|
||||
slugs = list({m.group(1) for m in product_matches})
|
||||
products: dict[str, dict | None] = {}
|
||||
for slug in slugs:
|
||||
products[slug] = await get_product(slug, lang)
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_card.html")
|
||||
for match in reversed(product_matches):
|
||||
slug = match.group(1)
|
||||
product = products.get(slug)
|
||||
if not product:
|
||||
continue
|
||||
card_html = tmpl.render(product=product, lang=lang, t=t)
|
||||
html = html[:match.start()] + card_html + html[match.end():]
|
||||
|
||||
# ── Pass 2: [product-group:category] ─────────────────────────────────────
|
||||
group_matches = list(PRODUCT_GROUP_RE.finditer(html))
|
||||
if group_matches:
|
||||
categories = list({m.group(1) for m in group_matches})
|
||||
groups: dict[str, list] = {}
|
||||
for cat in categories:
|
||||
groups[cat] = await get_products_by_category(cat, lang)
|
||||
|
||||
tmpl = _bake_env.get_template("partials/product_group.html")
|
||||
for match in reversed(group_matches):
|
||||
cat = match.group(1)
|
||||
group_products = groups.get(cat, [])
|
||||
if not group_products:
|
||||
continue
|
||||
grid_html = tmpl.render(products=group_products, category=cat, lang=lang, t=t)
|
||||
html = html[:match.start()] + grid_html + html[match.end():]
|
||||
|
||||
return html
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Markets Hub
|
||||
# =============================================================================
|
||||
|
||||
@@ -181,7 +181,7 @@ Der **Market Score ({{ market_score | round(1) }}/100)** misst die *Marktreife*:
|
||||
|
||||
{{ city_name }} has a **<a href="/{{ language }}/market-score" style="text-decoration:none"><span style="font-family:'Bricolage Grotesque',sans-serif;font-weight:800;color:#0F172A;letter-spacing:-0.02em">padelnomics</span> Market Score</a> of {{ market_score | round(1) }}/100** — placing it{% if market_score >= 55 %} among the strongest padel markets in {{ country_name_en }}{% elif market_score >= 35 %} in the mid-tier of {{ country_name_en }}'s padel markets{% else %} in an early-stage padel market with room for growth{% endif %}. The city currently has **{{ padel_venue_count }} padel venues** serving a population of {% if population >= 1000000 %}{{ (population / 1000000) | round(1) }}M{% else %}{{ (population / 1000) | round(0) | int }}K{% endif %} residents — a density of {{ venues_per_100k | round(1) }} venues per 100,000 people.{% if opportunity_score %} The **<a href="/{{ language }}/market-score" style="text-decoration:none"><span style="font-family:'Bricolage Grotesque',sans-serif;font-weight:800;color:#0F172A;letter-spacing:-0.02em">padelnomics</span> Opportunity Score</a> of {{ opportunity_score | round(1) }}/100** scores investment potential — supply gaps, catchment reach, and sports culture as a demand proxy:{% if opportunity_score >= 65 and market_score < 40 %} limited competition meets strong location fundamentals{% elif opportunity_score >= 65 %} strong potential despite an already active market{% elif opportunity_score >= 40 %} solid potential as the market starts to fill in{% else %} the area is comparatively well-served; differentiation is the key lever{% endif %}.{% endif %}
|
||||
|
||||
The question investors actually need answered is: given current pricing, occupancy, and build costs, what does the return look like? The financial model below uses real {{ city_name }} market data to give you that answer.
|
||||
The question that matters: given current pricing, occupancy, and build costs, what does a padel investment in {{ city_name }} actually return? The financial model below works with real local market data.
|
||||
|
||||
## What Does a Padel Investment Cost in {{ city_name }}?
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ Der **Market Score (Ø {{ avg_market_score }}/100)** bewertet die Marktreife: Be
|
||||
|
||||
Padel is growing rapidly across {{ country_name_en }}. Our data tracks {{ total_venues }} venues — a figure that likely understates the true count given independent clubs not listed on booking platforms. The average <span style="font-family:'Bricolage Grotesque',sans-serif;font-weight:800;color:#0F172A;letter-spacing:-0.02em">padelnomics</span> Market Score of {{ avg_market_score }}/100 across {{ city_count }} cities reflects both market maturity and data availability.
|
||||
|
||||
{% if avg_market_score >= 55 %}Markets scoring above 55 typically show an established player base, reliable pricing data, and predictable demand patterns — all critical for sound financial planning. Yet even in mature markets, venue density per 100,000 residents varies significantly between cities, pointing to pockets of underserved demand.{% elif avg_market_score >= 35 %}A mid-range score signals a growth phase: demand is proven, venue infrastructure is building, and pricing hasn't fully settled to competitive levels. This creates opportunities for well-positioned new entrants who can secure good locations before the market matures.{% else %}Emerging markets offer first-mover advantages — less direct competition, potentially better lease terms, and the opportunity to build a loyal player base before the market fills out. The trade-off is less pricing data and more uncertainty in demand projections.{% endif %}
|
||||
{% if avg_market_score >= 55 %}Markets scoring above 55 typically show an established player base, reliable pricing data, and predictable demand patterns — all critical for sound financial planning. Yet even in mature markets, venue density per 100,000 residents varies significantly between cities, leaving genuine supply gaps even in established markets.{% elif avg_market_score >= 35 %}A mid-range score signals a growth phase: demand is proven, venue infrastructure is building, and pricing hasn't fully settled to competitive levels. This creates opportunities for well-positioned new entrants who can secure good locations before the market matures.{% else %}Emerging markets offer first-mover advantages — less direct competition, potentially better lease terms, and the opportunity to build a loyal player base before the market fills out. The trade-off is less pricing data and more uncertainty in demand projections.{% endif %}
|
||||
|
||||
{% if avg_opportunity_score %}The average **<a href="/{{ language }}/market-score" style="text-decoration:none"><span style="font-family:'Bricolage Grotesque',sans-serif;font-weight:800;color:#0F172A;letter-spacing:-0.02em">padelnomics</span> Opportunity Score</a> of {{ avg_opportunity_score }}/100** shows how much investment potential remains untapped in {{ country_name_en }}. {% if avg_opportunity_score >= 60 and avg_market_score < 40 %}The combination of a high Opportunity Score and a moderate Market Score makes {{ country_name_en }} particularly attractive for new entrants: demand potential and sports culture are there, infrastructure is still building — first-mover conditions for well-chosen locations.{% elif avg_opportunity_score >= 60 %}Despite an already active market, locations with significant potential remain — particularly in mid-size cities and at the periphery of major metro areas.{% else %}Many locations in {{ country_name_en }} are already well-served. New projects need careful site selection and a clear differentiation strategy to compete.{% endif %}{% endif %}
|
||||
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
{# Affiliate product card — editorial recommendation style.
|
||||
Variables: product (dict with parsed pros/cons lists), t (translations), lang.
|
||||
Rendered bake-time by bake_product_cards(); no request context available. #}
|
||||
{%- set price_eur = (product.price_cents / 100) if product.price_cents else none -%}
|
||||
{%- set cta = product.cta_label if product.cta_label else t.affiliate_cta_buy -%}
|
||||
<div class="aff-card" style="background:#fff;border:1px solid #E2E8F0;border-radius:16px;padding:1.5rem;margin:1.5rem 0;box-shadow:0 1px 3px rgba(0,0,0,.05);transition:transform .2s,box-shadow .2s;" onmouseover="this.style.transform='translateY(-2px)';this.style.boxShadow='0 8px 24px rgba(0,0,0,.08)'" onmouseout="this.style.transform='';this.style.boxShadow='0 1px 3px rgba(0,0,0,.05)'">
|
||||
<div style="display:flex;gap:1.25rem;align-items:flex-start;flex-wrap:wrap;">
|
||||
|
||||
{# ── Image ── #}
|
||||
<div style="width:160px;flex-shrink:0;aspect-ratio:1;border-radius:12px;background:#F8FAFC;border:1px solid #E2E8F0;overflow:hidden;display:flex;align-items:center;justify-content:center;">
|
||||
{% if product.image_url %}
|
||||
<img src="{{ product.image_url }}" alt="{{ product.name }}" style="width:100%;height:100%;object-fit:contain;" loading="lazy">
|
||||
{% else %}
|
||||
<svg width="48" height="48" fill="none" stroke="#CBD5E1" stroke-width="1.5" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" d="m2.25 15.75 5.159-5.159a2.25 2.25 0 0 1 3.182 0l5.159 5.159m-1.5-1.5 1.409-1.409a2.25 2.25 0 0 1 3.182 0l2.909 2.909m-18 3.75h16.5a1.5 1.5 0 0 0 1.5-1.5V6a1.5 1.5 0 0 0-1.5-1.5H3.75A1.5 1.5 0 0 0 2.25 6v12a1.5 1.5 0 0 0 1.5 1.5Z"/></svg>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# ── Content ── #}
|
||||
<div style="flex:1;min-width:0;">
|
||||
|
||||
{# Brand + retailer #}
|
||||
<div style="display:flex;align-items:center;gap:.5rem;margin-bottom:.375rem;">
|
||||
{% if product.brand %}
|
||||
<span style="text-transform:uppercase;font-size:.6875rem;font-weight:600;letter-spacing:.06em;color:#64748B;">{{ product.brand }}</span>
|
||||
{% endif %}
|
||||
{% if product.retailer %}
|
||||
<span style="background:#F1F5F9;border-radius:999px;padding:2px 8px;font-size:.625rem;font-weight:600;color:#64748B;letter-spacing:.04em;text-transform:uppercase;">{{ t.affiliate_at_retailer | tformat(retailer=product.retailer) }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Name #}
|
||||
<h3 style="font-family:'Bricolage Grotesque',sans-serif;font-size:1.125rem;font-weight:700;color:#0F172A;letter-spacing:-.01em;margin:0 0 .375rem;">{{ product.name }}</h3>
|
||||
|
||||
{# Rating #}
|
||||
{% if product.rating %}
|
||||
{%- set stars_full = product.rating | int -%}
|
||||
{%- set has_half = (product.rating - stars_full) >= 0.5 -%}
|
||||
<div style="display:flex;align-items:center;gap:.25rem;margin-bottom:.375rem;">
|
||||
<span style="color:#D97706;font-size:.9375rem;">
|
||||
{%- for i in range(stars_full) %}★{% endfor -%}
|
||||
{%- if has_half %}★{% endif -%}
|
||||
{%- for i in range(5 - stars_full - (1 if has_half else 0)) %}<span style="color:#E2E8F0;">★</span>{% endfor -%}
|
||||
</span>
|
||||
<span style="font-size:.8125rem;color:#64748B;">{{ "%.1f" | format(product.rating) }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Price #}
|
||||
{% if price_eur %}
|
||||
<div style="font-family:'Commit Mono',monospace;font-size:1.25rem;font-weight:700;color:#0F172A;margin-bottom:.5rem;">{{ "%.2f" | format(price_eur) | replace('.', ',') }} €</div>
|
||||
{% endif %}
|
||||
|
||||
{# Description #}
|
||||
{% if product.description %}
|
||||
<p style="font-size:.875rem;color:#475569;line-height:1.55;margin:.625rem 0;overflow:hidden;display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;">{{ product.description }}</p>
|
||||
{% endif %}
|
||||
|
||||
{# Pros #}
|
||||
{% if product.pros %}
|
||||
<ul style="list-style:none;padding:0;margin:.625rem 0 .25rem;">
|
||||
{% for pro in product.pros %}
|
||||
<li style="font-size:.8125rem;color:#475569;line-height:1.7;"><span style="color:#16A34A;margin-right:.25rem;">✓</span>{{ pro }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{# Cons #}
|
||||
{% if product.cons %}
|
||||
<ul style="list-style:none;padding:0;margin:.25rem 0 .75rem;">
|
||||
{% for con in product.cons %}
|
||||
<li style="font-size:.8125rem;color:#475569;line-height:1.7;"><span style="color:#EF4444;margin-right:.25rem;">✗</span>{{ con }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{# CTA #}
|
||||
<a href="/go/{{ product.slug }}" rel="sponsored nofollow noopener" target="_blank"
|
||||
style="display:block;width:100%;background:#1D4ED8;color:#fff;border-radius:12px;padding:.625rem 1.25rem;font-weight:600;font-size:.875rem;text-align:center;text-decoration:none;box-shadow:0 2px 10px rgba(29,78,216,.25);transition:background .2s,transform .2s;margin-top:.5rem;"
|
||||
onmouseover="this.style.background='#1E40AF';this.style.transform='translateY(-1px)'"
|
||||
onmouseout="this.style.background='#1D4ED8';this.style.transform=''">
|
||||
{{ cta }} →
|
||||
</a>
|
||||
|
||||
{# Disclosure #}
|
||||
<p style="font-size:.6875rem;color:#94A3B8;font-style:italic;margin:.5rem 0 0;text-align:center;">{{ t.affiliate_disclosure }}</p>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,68 @@
|
||||
{# Affiliate product comparison grid — editorial picks layout.
|
||||
Variables: products (list of dicts), category (str), t (translations), lang.
|
||||
Rendered bake-time by bake_product_cards(). #}
|
||||
{% if products %}
|
||||
<div style="margin:2rem 0;">
|
||||
|
||||
{# Section header #}
|
||||
<div style="text-transform:uppercase;font-size:.75rem;font-weight:600;color:#64748B;letter-spacing:.06em;margin-bottom:1rem;padding-bottom:.75rem;border-bottom:2px solid #E2E8F0;">
|
||||
{{ t.affiliate_our_picks }} · {{ category | capitalize }}
|
||||
</div>
|
||||
|
||||
{# Responsive grid of compact cards #}
|
||||
<div style="display:grid;grid-template-columns:repeat(auto-fill,minmax(220px,1fr));gap:1rem;">
|
||||
{% for product in products %}
|
||||
{%- set price_eur = (product.price_cents / 100) if product.price_cents else none -%}
|
||||
{%- set cta = product.cta_label if product.cta_label else t.affiliate_cta_buy -%}
|
||||
<div class="aff-card-compact" style="background:#fff;border:1px solid #E2E8F0;border-radius:16px;padding:1rem;display:flex;flex-direction:column;gap:.5rem;transition:transform .2s,box-shadow .2s;" onmouseover="this.style.transform='translateY(-2px)';this.style.boxShadow='0 8px 24px rgba(0,0,0,.08)'" onmouseout="this.style.transform='';this.style.boxShadow=''">
|
||||
|
||||
{# Image #}
|
||||
<div style="aspect-ratio:1;border-radius:10px;background:#F8FAFC;border:1px solid #E2E8F0;overflow:hidden;display:flex;align-items:center;justify-content:center;">
|
||||
{% if product.image_url %}
|
||||
<img src="{{ product.image_url }}" alt="{{ product.name }}" style="width:100%;height:100%;object-fit:contain;" loading="lazy">
|
||||
{% else %}
|
||||
<svg width="36" height="36" fill="none" stroke="#CBD5E1" stroke-width="1.5" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" d="m2.25 15.75 5.159-5.159a2.25 2.25 0 0 1 3.182 0l5.159 5.159m-1.5-1.5 1.409-1.409a2.25 2.25 0 0 1 3.182 0l2.909 2.909m-18 3.75h16.5a1.5 1.5 0 0 0 1.5-1.5V6a1.5 1.5 0 0 0-1.5-1.5H3.75A1.5 1.5 0 0 0 2.25 6v12a1.5 1.5 0 0 0 1.5 1.5Z"/></svg>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Brand #}
|
||||
{% if product.brand %}
|
||||
<span style="text-transform:uppercase;font-size:.625rem;font-weight:600;letter-spacing:.06em;color:#94A3B8;">{{ product.brand }}</span>
|
||||
{% endif %}
|
||||
|
||||
{# Name #}
|
||||
<h4 style="font-family:'Bricolage Grotesque',sans-serif;font-size:1rem;font-weight:700;color:#0F172A;letter-spacing:-.01em;margin:0;line-height:1.3;">{{ product.name }}</h4>
|
||||
|
||||
{# Rating + pros/cons counts #}
|
||||
<div style="display:flex;align-items:center;gap:.5rem;flex-wrap:wrap;">
|
||||
{% if product.rating %}
|
||||
<span style="color:#D97706;font-size:.8125rem;">★</span>
|
||||
<span style="font-size:.75rem;color:#64748B;">{{ "%.1f" | format(product.rating) }}</span>
|
||||
{% endif %}
|
||||
{% if product.pros %}
|
||||
<span style="font-size:.6875rem;color:#16A34A;background:#F0FDF4;border-radius:999px;padding:1px 6px;">{{ product.pros | length }} {{ t.affiliate_pros_label }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Price #}
|
||||
{% if price_eur %}
|
||||
<div style="font-family:'Commit Mono',monospace;font-size:1.0625rem;font-weight:700;color:#0F172A;">{{ "%.2f" | format(price_eur) | replace('.', ',') }} €</div>
|
||||
{% endif %}
|
||||
|
||||
{# CTA — pushed to bottom via margin-top:auto #}
|
||||
<a href="/go/{{ product.slug }}" rel="sponsored nofollow noopener" target="_blank"
|
||||
style="display:block;background:#1D4ED8;color:#fff;border-radius:10px;padding:.5rem 1rem;font-weight:600;font-size:.8125rem;text-align:center;text-decoration:none;margin-top:auto;transition:background .2s;"
|
||||
onmouseover="this.style.background='#1E40AF'"
|
||||
onmouseout="this.style.background='#1D4ED8'">
|
||||
{{ cta }} →
|
||||
</a>
|
||||
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{# Shared disclosure #}
|
||||
<p style="font-size:.6875rem;color:#94A3B8;font-style:italic;margin:.75rem 0 0;text-align:center;">{{ t.affiliate_disclosure }}</p>
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -1777,5 +1777,12 @@
|
||||
"report_q1_confirmed_title": "Download bereit",
|
||||
"report_q1_confirmed_body": "Unten auf den Button klicken, um das vollständige Bericht-PDF zu öffnen.",
|
||||
"report_q1_download_btn": "PDF herunterladen",
|
||||
"report_q1_download_note": "PDF öffnet im Browser. Rechtsklick zum Speichern."
|
||||
"report_q1_download_note": "PDF öffnet im Browser. Rechtsklick zum Speichern.",
|
||||
|
||||
"affiliate_cta_buy": "Zum Angebot",
|
||||
"affiliate_disclosure": "Affiliate-Link — wir erhalten eine Provision ohne Mehrkosten für dich.",
|
||||
"affiliate_pros_label": "Vorteile",
|
||||
"affiliate_cons_label": "Nachteile",
|
||||
"affiliate_at_retailer": "bei {retailer}",
|
||||
"affiliate_our_picks": "Unsere Empfehlungen"
|
||||
}
|
||||
@@ -1780,5 +1780,12 @@
|
||||
"report_q1_confirmed_title": "Your download is ready",
|
||||
"report_q1_confirmed_body": "Click below to open the full report PDF.",
|
||||
"report_q1_download_btn": "Download PDF",
|
||||
"report_q1_download_note": "PDF opens in your browser. Right-click to save."
|
||||
"report_q1_download_note": "PDF opens in your browser. Right-click to save.",
|
||||
|
||||
"affiliate_cta_buy": "View offer",
|
||||
"affiliate_disclosure": "Affiliate link — we may earn a commission at no extra cost to you.",
|
||||
"affiliate_pros_label": "Pros",
|
||||
"affiliate_cons_label": "Cons",
|
||||
"affiliate_at_retailer": "at {retailer}",
|
||||
"affiliate_our_picks": "Our picks"
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
"""Migration 0026: Affiliate product catalog + click tracking tables.
|
||||
|
||||
affiliate_products: admin-managed product catalog for editorial affiliate cards.
|
||||
- slug+language uniqueness mirrors articles (same slug can exist in DE + EN
|
||||
with different affiliate URLs, copy, and pros/cons).
|
||||
- retailer: display name (Amazon, Padel Nuestro, etc.) — stored in full URL
|
||||
with tracking params already baked into affiliate_url.
|
||||
- cta_label: per-product override; empty → use i18n default "Zum Angebot".
|
||||
- status: draft/active/archived — only active products are baked into articles.
|
||||
|
||||
affiliate_clicks: one row per /go/<slug> redirect hit.
|
||||
- ip_hash: SHA256(ip + YYYY-MM-DD + SECRET_KEY[:16]), daily rotation for GDPR.
|
||||
- article_slug: best-effort extraction from Referer header.
|
||||
"""
|
||||
|
||||
|
||||
def up(conn) -> None:
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
slug TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
brand TEXT NOT NULL DEFAULT '',
|
||||
category TEXT NOT NULL DEFAULT 'accessory',
|
||||
retailer TEXT NOT NULL DEFAULT '',
|
||||
affiliate_url TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL DEFAULT '',
|
||||
price_cents INTEGER,
|
||||
currency TEXT NOT NULL DEFAULT 'EUR',
|
||||
rating REAL,
|
||||
pros TEXT NOT NULL DEFAULT '[]',
|
||||
cons TEXT NOT NULL DEFAULT '[]',
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
cta_label TEXT NOT NULL DEFAULT '',
|
||||
status TEXT NOT NULL DEFAULT 'draft',
|
||||
language TEXT NOT NULL DEFAULT 'de',
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT,
|
||||
UNIQUE(slug, language)
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_clicks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
product_id INTEGER NOT NULL REFERENCES affiliate_products(id),
|
||||
article_slug TEXT,
|
||||
referrer TEXT,
|
||||
ip_hash TEXT NOT NULL,
|
||||
clicked_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
)
|
||||
""")
|
||||
# Queries: products by category+status, clicks by product and time
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_products_category_status"
|
||||
" ON affiliate_products(category, status)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_clicks_product_id"
|
||||
" ON affiliate_clicks(product_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX idx_affiliate_clicks_clicked_at"
|
||||
" ON affiliate_clicks(clicked_at)"
|
||||
)
|
||||
@@ -0,0 +1,79 @@
|
||||
"""Migration 0027: Affiliate programs table + program FK on products.
|
||||
|
||||
affiliate_programs: centralises retailer configs (URL template + tag + commission).
|
||||
- url_template uses {product_id} and {tag} placeholders, assembled at redirect time.
|
||||
- tracking_tag: e.g. "padelnomics-21" — changing it propagates to all products instantly.
|
||||
- commission_pct: stored as a decimal (0.03 = 3%) for revenue estimates.
|
||||
- status: active/inactive — only active programs appear in the product form dropdown.
|
||||
- notes: internal field for login URLs, account IDs, etc.
|
||||
|
||||
affiliate_products changes:
|
||||
- program_id (nullable FK): new products use a program; existing products keep their
|
||||
baked affiliate_url (backward compat via build_affiliate_url() fallback).
|
||||
- product_identifier: ASIN, product path, or other program-specific ID (e.g. B0XXXXX).
|
||||
|
||||
Amazon OneLink note: we use a single "Amazon" program pointing to amazon.de.
|
||||
Amazon OneLink (configured in the Associates dashboard, no code changes needed)
|
||||
auto-redirects visitors to their local marketplace (UK→amazon.co.uk, ES→amazon.es)
|
||||
with the correct regional tag. One program covers all Amazon marketplaces.
|
||||
"""
|
||||
import re
|
||||
|
||||
|
||||
def up(conn) -> None:
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_programs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL UNIQUE,
|
||||
url_template TEXT NOT NULL,
|
||||
tracking_tag TEXT NOT NULL DEFAULT '',
|
||||
commission_pct REAL NOT NULL DEFAULT 0,
|
||||
homepage_url TEXT NOT NULL DEFAULT '',
|
||||
status TEXT NOT NULL DEFAULT 'active',
|
||||
notes TEXT NOT NULL DEFAULT '',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# Seed the default Amazon program.
|
||||
# OneLink handles geo-redirect to local marketplaces — no per-country programs needed.
|
||||
conn.execute("""
|
||||
INSERT INTO affiliate_programs (name, slug, url_template, tracking_tag, commission_pct, homepage_url)
|
||||
VALUES ('Amazon', 'amazon', 'https://www.amazon.de/dp/{product_id}?tag={tag}', 'padelnomics-21', 3.0, 'https://www.amazon.de')
|
||||
""")
|
||||
|
||||
# Add program FK + product identifier to products table.
|
||||
# program_id is nullable — existing rows keep their baked affiliate_url.
|
||||
conn.execute("""
|
||||
ALTER TABLE affiliate_products
|
||||
ADD COLUMN program_id INTEGER REFERENCES affiliate_programs(id)
|
||||
""")
|
||||
conn.execute("""
|
||||
ALTER TABLE affiliate_products
|
||||
ADD COLUMN product_identifier TEXT NOT NULL DEFAULT ''
|
||||
""")
|
||||
|
||||
# Backfill: extract ASIN from existing Amazon affiliate URLs.
|
||||
# Pattern: /dp/<ASIN> where ASIN is 10 uppercase alphanumeric chars.
|
||||
amazon_program = conn.execute(
|
||||
"SELECT id FROM affiliate_programs WHERE slug = 'amazon'"
|
||||
).fetchone()
|
||||
assert amazon_program is not None, "Amazon program must exist after seed"
|
||||
amazon_id = amazon_program[0]
|
||||
|
||||
rows = conn.execute(
|
||||
"SELECT id, affiliate_url FROM affiliate_products"
|
||||
).fetchall()
|
||||
asin_re = re.compile(r"/dp/([A-Z0-9]{10})")
|
||||
for product_id, url in rows:
|
||||
if not url:
|
||||
continue
|
||||
m = asin_re.search(url)
|
||||
if m:
|
||||
asin = m.group(1)
|
||||
conn.execute(
|
||||
"UPDATE affiliate_products SET program_id=?, product_identifier=? WHERE id=?",
|
||||
(amazon_id, asin, product_id),
|
||||
)
|
||||
@@ -284,6 +284,184 @@ LEADS = [
|
||||
]
|
||||
|
||||
|
||||
AFFILIATE_PRODUCTS = [
|
||||
# Rackets
|
||||
{
|
||||
"slug": "bullpadel-vertex-04-amazon",
|
||||
"name": "Bullpadel Vertex 04",
|
||||
"brand": "Bullpadel",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST01?tag=padelnomics-21",
|
||||
"price_cents": 17999,
|
||||
"rating": 4.7,
|
||||
"pros": '["Carbon-Rahmen für maximale Power", "Diamant-Form für aggressive Spieler", "Sehr gute Balance"]',
|
||||
"cons": '["Nur für fortgeschrittene Spieler", "Höherer Preis"]',
|
||||
"description": "Der Vertex 04 ist der Flaggschiff-Schläger von Bullpadel für Power-Spieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "head-delta-pro-amazon",
|
||||
"name": "HEAD Delta Pro",
|
||||
"brand": "HEAD",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST02?tag=padelnomics-21",
|
||||
"price_cents": 14999,
|
||||
"rating": 4.5,
|
||||
"pros": '["Sehr kontrollorientiert", "Ideal für Defensivspieler", "Leicht"]',
|
||||
"cons": '["Weniger Power als Diamant-Formen"]',
|
||||
"description": "Runde Form mit perfekter Kontrolle — ideal für Einsteiger und Defensivspieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 2,
|
||||
},
|
||||
{
|
||||
"slug": "adidas-metalbone-30-amazon",
|
||||
"name": "Adidas Metalbone 3.0",
|
||||
"brand": "Adidas",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST03?tag=padelnomics-21",
|
||||
"price_cents": 18999,
|
||||
"rating": 4.8,
|
||||
"pros": '["Brutale Power", "Hochwertige Verarbeitung", "Sehr beliebt auf Pro-Tour"]',
|
||||
"cons": '["Teuer", "Gewöhnungsbedürftig"]',
|
||||
"description": "Das Flaggschiff von Adidas Padel — getragen von den besten Profis der Welt.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 3,
|
||||
},
|
||||
{
|
||||
"slug": "wilson-bela-pro-v2-amazon",
|
||||
"name": "Wilson Bela Pro v2",
|
||||
"brand": "Wilson",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST04?tag=padelnomics-21",
|
||||
"price_cents": 16999,
|
||||
"rating": 4.6,
|
||||
"pros": '["Bekannter Signature-Schläger", "Gute Mischung aus Power und Kontrolle"]',
|
||||
"cons": '["Fortgeschrittene bevorzugt"]',
|
||||
"description": "Der Schläger von Fernando Belasteguín — einer der meistgekauften Schläger weltweit.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 4,
|
||||
},
|
||||
# Beginner racket — draft (tests that draft products are excluded from public)
|
||||
{
|
||||
"slug": "dunlop-aero-star-amazon",
|
||||
"name": "Dunlop Aero Star",
|
||||
"brand": "Dunlop",
|
||||
"category": "racket",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST05?tag=padelnomics-21",
|
||||
"price_cents": 8999,
|
||||
"rating": 4.2,
|
||||
"pros": '["Günstig", "Für Einsteiger ideal"]',
|
||||
"cons": '["Wenig Power für Fortgeschrittene"]',
|
||||
"description": "Solider Einsteigerschläger für unter 90 Euro.",
|
||||
"status": "draft",
|
||||
"language": "de",
|
||||
"sort_order": 5,
|
||||
},
|
||||
# Shoes
|
||||
{
|
||||
"slug": "adidas-adipower-ctrl-amazon",
|
||||
"name": "Adidas Adipower Ctrl",
|
||||
"brand": "Adidas",
|
||||
"category": "shoe",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST10?tag=padelnomics-21",
|
||||
"price_cents": 9999,
|
||||
"rating": 4.4,
|
||||
"pros": '["Hervorragender Halt auf Sand", "Leicht und atmungsaktiv"]',
|
||||
"cons": '["Größenfehler möglich — eine Größe größer bestellen"]',
|
||||
"description": "Professioneller Padelschuh mit optimierter Sohle für Sand- und Kunstrasencourts.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "babolat-jet-premura-amazon",
|
||||
"name": "Babolat Jet Premura",
|
||||
"brand": "Babolat",
|
||||
"category": "shoe",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST11?tag=padelnomics-21",
|
||||
"price_cents": 11999,
|
||||
"rating": 4.6,
|
||||
"pros": '["Sehr leicht", "Gute Dämpfung", "Stylisches Design"]',
|
||||
"cons": '["Teurer als Mitbewerber"]',
|
||||
"description": "Ultraleichter Padelschuh von Babolat — ideal für schnelle Spieler.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 2,
|
||||
},
|
||||
# Balls
|
||||
{
|
||||
"slug": "head-padel-pro-balls-amazon",
|
||||
"name": "HEAD Padel Pro Bälle (3er-Dose)",
|
||||
"brand": "HEAD",
|
||||
"category": "ball",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST20?tag=padelnomics-21",
|
||||
"price_cents": 799,
|
||||
"rating": 4.5,
|
||||
"pros": '["Offizieller Turnierball", "Guter Druckerhalt", "Günstig"]',
|
||||
"cons": '["Bei intensivem Spiel nach 4–5 Sessions platter"]',
|
||||
"description": "Offizieller Turnierball von HEAD — der am häufigsten gespielte Padelball in Europa.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
# Grips/Accessories
|
||||
{
|
||||
"slug": "bullpadel-overgrip-3er-amazon",
|
||||
"name": "Bullpadel Overgrip (3er-Pack)",
|
||||
"brand": "Bullpadel",
|
||||
"category": "grip",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST30?tag=padelnomics-21",
|
||||
"price_cents": 499,
|
||||
"rating": 4.3,
|
||||
"pros": '["Günstig", "Guter Halt auch bei Schweiß", "Einfach zu wechseln"]',
|
||||
"cons": '["Hält weniger lang als Originalgriff"]',
|
||||
"description": "Günstiges Overgrip-Set — jeder Padelspieler sollte regelmäßig wechseln.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
{
|
||||
"slug": "nox-padel-bag-amazon",
|
||||
"name": "NOX ML10 Schläger-Tasche",
|
||||
"brand": "NOX",
|
||||
"category": "accessory",
|
||||
"retailer": "Amazon",
|
||||
"affiliate_url": "https://www.amazon.de/dp/B0CXTEST40?tag=padelnomics-21",
|
||||
"price_cents": 5999,
|
||||
"rating": 4.4,
|
||||
"pros": '["Platz für 2 Schläger", "Gepolstertes Schlägerfach", "Robustes Material"]',
|
||||
"cons": '["Kein Schuhfach"]',
|
||||
"description": "Praktische Padelschläger-Tasche mit Platz für 2 Schläger und Zubehör.",
|
||||
"status": "active",
|
||||
"language": "de",
|
||||
"sort_order": 1,
|
||||
},
|
||||
]
|
||||
|
||||
# Article slugs for realistic click referrers
|
||||
_ARTICLE_SLUGS = [
|
||||
"beste-padelschlaeger-2026",
|
||||
"padelschlaeger-anfaenger",
|
||||
"padelschuhe-test",
|
||||
"padelbaelle-vergleich",
|
||||
"padel-zubehoer",
|
||||
]
|
||||
|
||||
|
||||
def main():
|
||||
db_path = DATABASE_PATH
|
||||
if not Path(db_path).exists():
|
||||
@@ -481,6 +659,72 @@ def main():
|
||||
)
|
||||
logger.info(" PadelTech unlocked lead #%s", lead_id)
|
||||
|
||||
# 7. Seed affiliate products
|
||||
logger.info("Seeding %s affiliate products...", len(AFFILIATE_PRODUCTS))
|
||||
product_ids: dict[str, int] = {}
|
||||
for p in AFFILIATE_PRODUCTS:
|
||||
existing = conn.execute(
|
||||
"SELECT id FROM affiliate_products WHERE slug = ? AND language = ?",
|
||||
(p["slug"], p["language"]),
|
||||
).fetchone()
|
||||
if existing:
|
||||
product_ids[p["slug"]] = existing["id"]
|
||||
logger.info(" %s already exists (id=%s)", p["name"], existing["id"])
|
||||
continue
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, rating, pros, cons, description,
|
||||
status, language, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
p["slug"], p["name"], p["brand"], p["category"], p["retailer"],
|
||||
p["affiliate_url"], p["price_cents"], p["rating"],
|
||||
p["pros"], p["cons"], p["description"],
|
||||
p["status"], p["language"], p["sort_order"],
|
||||
),
|
||||
)
|
||||
product_ids[p["slug"]] = cursor.lastrowid
|
||||
logger.info(" %s -> id=%s (%s)", p["name"], cursor.lastrowid, p["status"])
|
||||
|
||||
# 8. Seed affiliate clicks (realistic 30-day spread for dashboard charts)
|
||||
logger.info("Seeding affiliate clicks...")
|
||||
import random
|
||||
rng = random.Random(42)
|
||||
# click distribution: more on popular rackets, fewer on accessories
|
||||
click_weights = [
|
||||
("bullpadel-vertex-04-amazon", "beste-padelschlaeger-2026", 52),
|
||||
("adidas-metalbone-30-amazon", "beste-padelschlaeger-2026", 41),
|
||||
("head-delta-pro-amazon", "padelschlaeger-anfaenger", 38),
|
||||
("wilson-bela-pro-v2-amazon", "padelschlaeger-anfaenger", 29),
|
||||
("adidas-adipower-ctrl-amazon", "padelschuhe-test", 24),
|
||||
("babolat-jet-premura-amazon", "padelschuhe-test", 18),
|
||||
("head-padel-pro-balls-amazon", "padelbaelle-vergleich", 15),
|
||||
("bullpadel-overgrip-3er-amazon", "padel-zubehoer", 11),
|
||||
("nox-padel-bag-amazon", "padel-zubehoer", 8),
|
||||
]
|
||||
existing_click_count = conn.execute("SELECT COUNT(*) FROM affiliate_clicks").fetchone()[0]
|
||||
if existing_click_count == 0:
|
||||
for slug, article_slug, count in click_weights:
|
||||
pid = product_ids.get(slug)
|
||||
if not pid:
|
||||
continue
|
||||
for _ in range(count):
|
||||
days_ago = rng.randint(0, 29)
|
||||
hours_ago = rng.randint(0, 23)
|
||||
clicked_at = (now - timedelta(days=days_ago, hours=hours_ago)).strftime("%Y-%m-%d %H:%M:%S")
|
||||
ip_hash = f"dev_{slug}_{_:04d}" # stable fake hash (not real SHA256)
|
||||
conn.execute(
|
||||
"""INSERT INTO affiliate_clicks
|
||||
(product_id, article_slug, referrer, ip_hash, clicked_at)
|
||||
VALUES (?, ?, ?, ?, ?)""",
|
||||
(pid, article_slug, f"https://padelnomics.io/de/blog/{article_slug}", ip_hash, clicked_at),
|
||||
)
|
||||
total_clicks = sum(c for _, _, c in click_weights)
|
||||
logger.info(" Inserted %s click events across 9 products", total_clicks)
|
||||
else:
|
||||
logger.info(" Clicks already seeded (%s rows), skipping", existing_click_count)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
@@ -218,9 +218,7 @@
|
||||
.nav-bar[data-navopen="true"] .nav-mobile {
|
||||
display: flex;
|
||||
}
|
||||
.nav-mobile a,
|
||||
.nav-mobile button.nav-auth-btn,
|
||||
.nav-mobile a.nav-auth-btn {
|
||||
.nav-mobile a:not(.nav-auth-btn) {
|
||||
display: block;
|
||||
padding: 0.625rem 0;
|
||||
border-bottom: 1px solid #F1F5F9;
|
||||
@@ -230,15 +228,18 @@
|
||||
text-decoration: none;
|
||||
transition: color 0.15s;
|
||||
}
|
||||
.nav-mobile a:last-child { border-bottom: none; }
|
||||
.nav-mobile a:hover { color: #1D4ED8; }
|
||||
.nav-mobile a:not(.nav-auth-btn):last-child { border-bottom: none; }
|
||||
.nav-mobile a:not(.nav-auth-btn):hover { color: #1D4ED8; }
|
||||
/* nav-auth-btn in mobile menu: override block style, keep button colours */
|
||||
.nav-mobile a.nav-auth-btn,
|
||||
.nav-mobile button.nav-auth-btn {
|
||||
display: inline-flex;
|
||||
margin-top: 0.5rem;
|
||||
padding: 6px 16px;
|
||||
border-bottom: none;
|
||||
width: auto;
|
||||
align-self: flex-start;
|
||||
color: #fff;
|
||||
}
|
||||
.nav-mobile .nav-mobile-section {
|
||||
font-size: 0.6875rem;
|
||||
@@ -569,6 +570,270 @@
|
||||
@apply px-4 pb-4 text-slate-dark;
|
||||
}
|
||||
|
||||
/* ── Article Timeline (phase/process diagrams) ── */
|
||||
.article-timeline {
|
||||
display: flex;
|
||||
gap: 0;
|
||||
margin: 1.5rem 0 2rem;
|
||||
position: relative;
|
||||
overflow-x: auto;
|
||||
padding-bottom: 0.5rem;
|
||||
}
|
||||
.article-timeline__phase {
|
||||
flex: 1;
|
||||
min-width: 130px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
}
|
||||
/* Connecting line between phases */
|
||||
.article-timeline__phase + .article-timeline__phase::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 22px;
|
||||
left: calc(-50% + 22px);
|
||||
right: calc(50% + 22px);
|
||||
height: 2px;
|
||||
background: #CBD5E1;
|
||||
z-index: 0;
|
||||
}
|
||||
.article-timeline__phase + .article-timeline__phase::after {
|
||||
content: '›';
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
left: calc(-50% + 18px);
|
||||
font-size: 1rem;
|
||||
line-height: 1;
|
||||
color: #94A3B8;
|
||||
z-index: 1;
|
||||
}
|
||||
.article-timeline__num {
|
||||
width: 44px;
|
||||
height: 44px;
|
||||
border-radius: 50%;
|
||||
background: #0F172A;
|
||||
color: #fff;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 700;
|
||||
font-family: var(--font-display);
|
||||
flex-shrink: 0;
|
||||
position: relative;
|
||||
z-index: 2;
|
||||
}
|
||||
.article-timeline__card {
|
||||
margin-top: 0.75rem;
|
||||
background: #F8FAFC;
|
||||
border: 1px solid #E2E8F0;
|
||||
border-radius: 12px;
|
||||
padding: 0.75rem 0.875rem;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
.article-timeline__title {
|
||||
font-weight: 700;
|
||||
font-size: 0.8125rem;
|
||||
color: #0F172A;
|
||||
line-height: 1.3;
|
||||
margin-bottom: 0.25rem;
|
||||
font-family: var(--font-display);
|
||||
}
|
||||
.article-timeline__subtitle {
|
||||
font-size: 0.75rem;
|
||||
color: #64748B;
|
||||
margin-bottom: 0.375rem;
|
||||
line-height: 1.3;
|
||||
}
|
||||
.article-timeline__meta {
|
||||
font-size: 0.6875rem;
|
||||
color: #94A3B8;
|
||||
line-height: 1.4;
|
||||
}
|
||||
/* Mobile: vertical timeline */
|
||||
@media (max-width: 600px) {
|
||||
.article-timeline {
|
||||
flex-direction: column;
|
||||
gap: 0.75rem;
|
||||
overflow-x: visible;
|
||||
}
|
||||
.article-timeline__phase {
|
||||
flex-direction: row;
|
||||
align-items: flex-start;
|
||||
min-width: auto;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
.article-timeline__phase + .article-timeline__phase::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: calc(-0.375rem);
|
||||
left: 21px;
|
||||
right: auto;
|
||||
width: 2px;
|
||||
height: 0.75rem;
|
||||
background: #CBD5E1;
|
||||
}
|
||||
.article-timeline__phase + .article-timeline__phase::after {
|
||||
content: '›';
|
||||
position: absolute;
|
||||
top: calc(-0.3rem);
|
||||
left: 15px;
|
||||
font-size: 0.9rem;
|
||||
transform: rotate(90deg);
|
||||
}
|
||||
.article-timeline__card {
|
||||
margin-top: 0;
|
||||
text-align: left;
|
||||
flex: 1;
|
||||
}
|
||||
.article-timeline__num {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* ── Article Callout Boxes ── */
|
||||
.article-callout {
|
||||
display: flex;
|
||||
gap: 0.875rem;
|
||||
padding: 1rem 1.25rem;
|
||||
border-radius: 12px;
|
||||
border-left: 4px solid;
|
||||
margin: 1.5rem 0;
|
||||
}
|
||||
.article-callout::before {
|
||||
font-size: 1.1rem;
|
||||
flex-shrink: 0;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.article-callout__body {
|
||||
flex: 1;
|
||||
}
|
||||
.article-callout__title {
|
||||
font-weight: 700;
|
||||
font-size: 0.875rem;
|
||||
margin-bottom: 0.375rem;
|
||||
display: block;
|
||||
}
|
||||
.article-callout p {
|
||||
font-size: 0.875rem;
|
||||
line-height: 1.6;
|
||||
margin: 0;
|
||||
color: inherit;
|
||||
}
|
||||
.article-callout--warning {
|
||||
background: #FFFBEB;
|
||||
border-color: #D97706;
|
||||
color: #78350F;
|
||||
}
|
||||
.article-callout--warning::before {
|
||||
content: '⚠';
|
||||
color: #D97706;
|
||||
}
|
||||
.article-callout--warning .article-callout__title {
|
||||
color: #92400E;
|
||||
}
|
||||
.article-callout--tip {
|
||||
background: #F0FDF4;
|
||||
border-color: #16A34A;
|
||||
color: #14532D;
|
||||
}
|
||||
.article-callout--tip::before {
|
||||
content: '💡';
|
||||
}
|
||||
.article-callout--tip .article-callout__title {
|
||||
color: #166534;
|
||||
}
|
||||
.article-callout--info {
|
||||
background: #EFF6FF;
|
||||
border-color: #1D4ED8;
|
||||
color: #1E3A5F;
|
||||
}
|
||||
.article-callout--info::before {
|
||||
content: 'ℹ';
|
||||
color: #1D4ED8;
|
||||
}
|
||||
.article-callout--info .article-callout__title {
|
||||
color: #1E40AF;
|
||||
}
|
||||
|
||||
/* ── Article Cards (2-col comparison grid) ── */
|
||||
.article-cards {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: 1rem;
|
||||
margin: 1.5rem 0;
|
||||
}
|
||||
@media (max-width: 580px) {
|
||||
.article-cards {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
.article-card {
|
||||
border-radius: 12px;
|
||||
border: 1px solid #E2E8F0;
|
||||
overflow: hidden;
|
||||
background: #fff;
|
||||
}
|
||||
.article-card__accent {
|
||||
height: 4px;
|
||||
}
|
||||
.article-card--success .article-card__accent { background: #16A34A; }
|
||||
.article-card--failure .article-card__accent { background: #EF4444; }
|
||||
.article-card--neutral .article-card__accent { background: #1D4ED8; }
|
||||
.article-card--established .article-card__accent { background: #0F172A; }
|
||||
.article-card--growth .article-card__accent { background: #1D4ED8; }
|
||||
.article-card--emerging .article-card__accent { background: #16A34A; }
|
||||
.article-card__inner {
|
||||
padding: 1rem 1.125rem;
|
||||
}
|
||||
.article-card__title {
|
||||
font-weight: 700;
|
||||
font-size: 0.875rem;
|
||||
color: #0F172A;
|
||||
margin-bottom: 0.5rem;
|
||||
font-family: var(--font-display);
|
||||
display: block;
|
||||
}
|
||||
.article-card__body {
|
||||
font-size: 0.8125rem;
|
||||
color: #475569;
|
||||
line-height: 1.6;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* ── Severity Pills (risk table badges) ── */
|
||||
.severity {
|
||||
display: inline-block;
|
||||
padding: 0.125rem 0.5rem;
|
||||
border-radius: 9999px;
|
||||
font-size: 0.6875rem;
|
||||
font-weight: 700;
|
||||
letter-spacing: 0.03em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.severity--high {
|
||||
background: #FEE2E2;
|
||||
color: #991B1B;
|
||||
}
|
||||
.severity--medium-high {
|
||||
background: #FEF3C7;
|
||||
color: #92400E;
|
||||
}
|
||||
.severity--medium {
|
||||
background: #FEF9C3;
|
||||
color: #713F12;
|
||||
}
|
||||
.severity--low-medium {
|
||||
background: #ECFDF5;
|
||||
color: #065F46;
|
||||
}
|
||||
.severity--low {
|
||||
background: #F0FDF4;
|
||||
color: #166534;
|
||||
}
|
||||
|
||||
/* Inline HTMX loading indicator for search forms.
|
||||
Opacity is handled by HTMX's built-in .htmx-indicator CSS.
|
||||
This class only adds positioning and the spin animation. */
|
||||
|
||||
@@ -735,6 +735,107 @@ async def handle_run_extraction(payload: dict) -> None:
|
||||
logger.info("Extraction completed: %s", result.stdout[-300:] if result.stdout else "(no output)")
|
||||
|
||||
|
||||
@task("run_transform")
|
||||
async def handle_run_transform(payload: dict) -> None:
|
||||
"""Run SQLMesh transform (prod plan --auto-apply) in the background.
|
||||
|
||||
Shells out to `uv run sqlmesh -p transform/sqlmesh_padelnomics plan prod --auto-apply`.
|
||||
2-hour absolute timeout — same as extraction.
|
||||
"""
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
repo_root = Path(__file__).resolve().parents[4]
|
||||
result = await asyncio.to_thread(
|
||||
subprocess.run,
|
||||
["uv", "run", "sqlmesh", "-p", "transform/sqlmesh_padelnomics", "plan", "prod", "--auto-apply"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=7200,
|
||||
cwd=str(repo_root),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"SQLMesh transform failed (exit {result.returncode}): {result.stderr[:500]}"
|
||||
)
|
||||
logger.info("SQLMesh transform completed: %s", result.stdout[-300:] if result.stdout else "(no output)")
|
||||
|
||||
|
||||
@task("run_export")
|
||||
async def handle_run_export(payload: dict) -> None:
|
||||
"""Export serving tables from lakehouse.duckdb → analytics.duckdb.
|
||||
|
||||
Shells out to `uv run python src/padelnomics/export_serving.py`.
|
||||
10-minute absolute timeout.
|
||||
"""
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
repo_root = Path(__file__).resolve().parents[4]
|
||||
result = await asyncio.to_thread(
|
||||
subprocess.run,
|
||||
["uv", "run", "python", "src/padelnomics/export_serving.py"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=600,
|
||||
cwd=str(repo_root),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"Export failed (exit {result.returncode}): {result.stderr[:500]}"
|
||||
)
|
||||
logger.info("Export completed: %s", result.stdout[-300:] if result.stdout else "(no output)")
|
||||
|
||||
|
||||
@task("run_pipeline")
|
||||
async def handle_run_pipeline(payload: dict) -> None:
|
||||
"""Run full ELT pipeline: extract → transform → export, stopping on first failure."""
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
repo_root = Path(__file__).resolve().parents[4]
|
||||
|
||||
steps = [
|
||||
(
|
||||
"extraction",
|
||||
["uv", "run", "--package", "padelnomics_extract", "extract"],
|
||||
7200,
|
||||
),
|
||||
(
|
||||
"transform",
|
||||
["uv", "run", "sqlmesh", "-p", "transform/sqlmesh_padelnomics", "plan", "prod", "--auto-apply"],
|
||||
7200,
|
||||
),
|
||||
(
|
||||
"export",
|
||||
["uv", "run", "python", "src/padelnomics/export_serving.py"],
|
||||
600,
|
||||
),
|
||||
]
|
||||
|
||||
for step_name, cmd, timeout_seconds in steps:
|
||||
logger.info("Pipeline step starting: %s", step_name)
|
||||
result = await asyncio.to_thread(
|
||||
subprocess.run,
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout_seconds,
|
||||
cwd=str(repo_root),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"Pipeline failed at {step_name} (exit {result.returncode}): {result.stderr[:500]}"
|
||||
)
|
||||
logger.info(
|
||||
"Pipeline step complete: %s — %s",
|
||||
step_name,
|
||||
result.stdout[-200:] if result.stdout else "(no output)",
|
||||
)
|
||||
|
||||
logger.info("Full pipeline complete (extract → transform → export)")
|
||||
|
||||
|
||||
@task("generate_articles")
|
||||
async def handle_generate_articles(payload: dict) -> None:
|
||||
"""Generate articles from a template in the background."""
|
||||
@@ -745,7 +846,7 @@ async def handle_generate_articles(payload: dict) -> None:
|
||||
slug = payload["template_slug"]
|
||||
start_date = date_cls.fromisoformat(payload["start_date"])
|
||||
articles_per_day = payload.get("articles_per_day", 3)
|
||||
limit = payload.get("limit", 500)
|
||||
limit = payload.get("limit", 0)
|
||||
task_id = payload.get("_task_id")
|
||||
|
||||
count = await generate_articles(
|
||||
|
||||
616
web/tests/test_affiliate.py
Normal file
616
web/tests/test_affiliate.py
Normal file
@@ -0,0 +1,616 @@
|
||||
"""
|
||||
Tests for the affiliate product system.
|
||||
|
||||
Covers: hash_ip determinism, product CRUD, bake_product_cards marker replacement,
|
||||
click redirect (302 + logged), rate limiting, inactive product 404, multi-retailer,
|
||||
program CRUD, build_affiliate_url(), program-based redirect.
|
||||
"""
|
||||
import json
|
||||
from datetime import date
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from padelnomics.affiliate import (
|
||||
build_affiliate_url,
|
||||
get_all_products,
|
||||
get_all_programs,
|
||||
get_click_counts,
|
||||
get_click_stats,
|
||||
get_product,
|
||||
get_products_by_category,
|
||||
get_program,
|
||||
get_program_by_slug,
|
||||
hash_ip,
|
||||
log_click,
|
||||
)
|
||||
from padelnomics.content.routes import PRODUCT_GROUP_RE, PRODUCT_RE, bake_product_cards
|
||||
from padelnomics.core import execute, fetch_all
|
||||
|
||||
# ── Helpers ────────────────────────────────────────────────────────────────────
|
||||
|
||||
async def _insert_product(
|
||||
slug="test-racket-amazon",
|
||||
name="Test Racket",
|
||||
brand="TestBrand",
|
||||
category="racket",
|
||||
retailer="Amazon",
|
||||
affiliate_url="https://amazon.de/dp/TEST?tag=test-21",
|
||||
status="active",
|
||||
language="de",
|
||||
price_cents=14999,
|
||||
pros=None,
|
||||
cons=None,
|
||||
sort_order=0,
|
||||
) -> int:
|
||||
"""Insert an affiliate product, return its id."""
|
||||
return await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, status, language, pros, cons, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, status, language,
|
||||
json.dumps(pros or ["Gut"]),
|
||||
json.dumps(cons or ["Teuer"]),
|
||||
sort_order,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# ── hash_ip ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_hash_ip_deterministic():
|
||||
"""Same IP + same day → same hash."""
|
||||
h1 = hash_ip("1.2.3.4")
|
||||
h2 = hash_ip("1.2.3.4")
|
||||
assert h1 == h2
|
||||
assert len(h1) == 64 # SHA256 hex digest
|
||||
|
||||
|
||||
def test_hash_ip_different_ips_differ():
|
||||
"""Different IPs → different hashes."""
|
||||
assert hash_ip("1.2.3.4") != hash_ip("5.6.7.8")
|
||||
|
||||
|
||||
def test_hash_ip_rotates_daily():
|
||||
"""Different days → different hashes for same IP (GDPR daily rotation)."""
|
||||
with patch("padelnomics.affiliate.date") as mock_date:
|
||||
mock_date.today.return_value = date(2026, 2, 1)
|
||||
h1 = hash_ip("1.2.3.4")
|
||||
mock_date.today.return_value = date(2026, 2, 2)
|
||||
h2 = hash_ip("1.2.3.4")
|
||||
assert h1 != h2
|
||||
|
||||
|
||||
# ── get_product ────────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_active_by_lang(db):
|
||||
"""get_product returns active product for correct language."""
|
||||
await _insert_product(slug="vertex-amazon", language="de", status="active")
|
||||
product = await get_product("vertex-amazon", "de")
|
||||
assert product is not None
|
||||
assert product["slug"] == "vertex-amazon"
|
||||
assert isinstance(product["pros"], list)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_draft_returns_none(db):
|
||||
"""Draft products are not returned."""
|
||||
await _insert_product(slug="vertex-draft", status="draft")
|
||||
product = await get_product("vertex-draft", "de")
|
||||
assert product is None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_lang_fallback(db):
|
||||
"""Falls back to any language when no match for requested lang."""
|
||||
await _insert_product(slug="vertex-de-only", language="de", status="active")
|
||||
# Request EN but only DE exists — should fall back
|
||||
product = await get_product("vertex-de-only", "en")
|
||||
assert product is not None
|
||||
assert product["language"] == "de"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_product_not_found(db):
|
||||
"""Returns None for unknown slug."""
|
||||
product = await get_product("nonexistent-slug", "de")
|
||||
assert product is None
|
||||
|
||||
|
||||
# ── get_products_by_category ───────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_products_by_category_sorted(db):
|
||||
"""Returns products sorted by sort_order."""
|
||||
await _insert_product(slug="racket-b", name="Racket B", sort_order=2)
|
||||
await _insert_product(slug="racket-a", name="Racket A", sort_order=1)
|
||||
products = await get_products_by_category("racket", "de")
|
||||
assert len(products) == 2
|
||||
assert products[0]["sort_order"] == 1
|
||||
assert products[1]["sort_order"] == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_products_by_category_inactive_excluded(db):
|
||||
"""Draft and archived products are excluded."""
|
||||
await _insert_product(slug="racket-draft", status="draft")
|
||||
await _insert_product(slug="racket-archived", status="archived")
|
||||
products = await get_products_by_category("racket", "de")
|
||||
assert products == []
|
||||
|
||||
|
||||
# ── get_all_products ───────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_products_no_filter(db):
|
||||
"""Returns all products regardless of status."""
|
||||
await _insert_product(slug="p1", status="active")
|
||||
await _insert_product(slug="p2", status="draft")
|
||||
products = await get_all_products()
|
||||
assert len(products) == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_products_status_filter(db):
|
||||
"""Status filter returns only matching rows."""
|
||||
await _insert_product(slug="p-active", status="active")
|
||||
await _insert_product(slug="p-draft", status="draft")
|
||||
active = await get_all_products(status="active")
|
||||
assert len(active) == 1
|
||||
assert active[0]["slug"] == "p-active"
|
||||
|
||||
|
||||
# ── log_click + get_click_counts ──────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_log_click_inserts_row(db):
|
||||
"""log_click inserts a row into affiliate_clicks."""
|
||||
product_id = await _insert_product(slug="clickable")
|
||||
await log_click(product_id, "1.2.3.4", "beste-padelschlaeger", "https://example.com/de/blog/test")
|
||||
rows = await fetch_all("SELECT * FROM affiliate_clicks WHERE product_id = ?", (product_id,))
|
||||
assert len(rows) == 1
|
||||
assert rows[0]["article_slug"] == "beste-padelschlaeger"
|
||||
# IP hash must not be the raw IP
|
||||
assert rows[0]["ip_hash"] != "1.2.3.4"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_click_counts(db):
|
||||
"""get_click_counts returns dict of product_id → count."""
|
||||
pid = await _insert_product(slug="tracked-product")
|
||||
await log_click(pid, "1.2.3.4", None, None)
|
||||
await log_click(pid, "5.6.7.8", None, None)
|
||||
counts = await get_click_counts()
|
||||
assert counts.get(pid) == 2
|
||||
|
||||
|
||||
# ── get_click_stats ────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_click_stats_structure(db):
|
||||
"""get_click_stats returns expected keys."""
|
||||
stats = await get_click_stats(days_count=30)
|
||||
assert "total_clicks" in stats
|
||||
assert "top_products" in stats
|
||||
assert "daily_bars" in stats
|
||||
assert "by_retailer" in stats
|
||||
|
||||
|
||||
# ── bake_product_cards ────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_replaces_marker(db):
|
||||
"""[product:slug] marker is replaced with rendered HTML."""
|
||||
await _insert_product(slug="vertex-04-amazon", name="Bullpadel Vertex 04", status="active")
|
||||
html = "<p>Intro</p>\n[product:vertex-04-amazon]\n<p>Outro</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "[product:vertex-04-amazon]" not in result
|
||||
assert "Bullpadel Vertex 04" in result
|
||||
assert "/go/vertex-04-amazon" in result
|
||||
assert "sponsored" in result
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_missing_slug_passthrough(db):
|
||||
"""Unknown slugs pass through unchanged — no product card rendered."""
|
||||
html = "<p>Text</p>\n[product:nonexistent-slug]\n<p>End</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
# Surrounding content is intact; no product HTML injected
|
||||
assert "<p>Text</p>" in result
|
||||
assert "<p>End</p>" in result
|
||||
assert "<article" not in result # no product card rendered
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_group_marker(db):
|
||||
"""[product-group:category] renders a grid of products."""
|
||||
await _insert_product(slug="shoe-1-amazon", name="Test Shoe", category="shoe", status="active")
|
||||
html = "<h2>Shoes</h2>\n[product-group:shoe]\n<p>End</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "[product-group:shoe]" not in result
|
||||
assert "Test Shoe" in result
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_no_markers(db):
|
||||
"""HTML without markers is returned unchanged."""
|
||||
html = "<p>No markers here.</p>"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert result == html
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_bake_product_cards_draft_not_shown(db):
|
||||
"""Draft products are not baked into articles."""
|
||||
await _insert_product(slug="draft-product", name="Draft Product", status="draft")
|
||||
html = "[product:draft-product]"
|
||||
result = await bake_product_cards(html, lang="de")
|
||||
assert "Draft Product" not in result
|
||||
|
||||
|
||||
# ── regex patterns ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_product_re_matches():
|
||||
"""PRODUCT_RE matches valid [product:slug] markers."""
|
||||
assert PRODUCT_RE.match("[product:bullpadel-vertex-04-amazon]")
|
||||
assert PRODUCT_RE.match("[product:test-123]")
|
||||
|
||||
|
||||
def test_product_group_re_matches():
|
||||
"""PRODUCT_GROUP_RE matches valid [product-group:category] markers."""
|
||||
assert PRODUCT_GROUP_RE.match("[product-group:racket]")
|
||||
assert PRODUCT_GROUP_RE.match("[product-group:shoe]")
|
||||
|
||||
|
||||
# ── multi-retailer ────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_multi_retailer_same_slug_different_lang(db):
|
||||
"""Same slug can exist in DE and EN with different affiliate URLs."""
|
||||
await _insert_product(
|
||||
slug="vertex-04", language="de",
|
||||
affiliate_url="https://amazon.de/dp/TEST?tag=de-21",
|
||||
)
|
||||
await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, status, language, pros, cons, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
"vertex-04", "Test Racket EN", "TestBrand", "racket", "Amazon UK",
|
||||
"https://amazon.co.uk/dp/TEST?tag=en-21",
|
||||
14999, "active", "en", "[]", "[]", 0,
|
||||
),
|
||||
)
|
||||
de_product = await get_product("vertex-04", "de")
|
||||
en_product = await get_product("vertex-04", "en")
|
||||
assert de_product is not None
|
||||
assert en_product is not None
|
||||
assert de_product["affiliate_url"] != en_product["affiliate_url"]
|
||||
assert "amazon.de" in de_product["affiliate_url"]
|
||||
assert "amazon.co.uk" in en_product["affiliate_url"]
|
||||
|
||||
|
||||
# ── click redirect (e2e via Quart test client) ────────────────────────────────
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_302(app, db):
|
||||
"""GET /go/<slug> redirects to affiliate_url with 302."""
|
||||
await _insert_product(slug="redirect-test", affiliate_url="https://amazon.de/dp/XYZ?tag=test-21")
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/redirect-test")
|
||||
assert response.status_code == 302
|
||||
assert "amazon.de" in response.headers.get("Location", "")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_logs_click(app, db):
|
||||
"""Successful redirect logs a click in affiliate_clicks."""
|
||||
pid = await _insert_product(slug="logged-test", affiliate_url="https://amazon.de/dp/LOG?tag=test-21")
|
||||
async with app.test_client() as client:
|
||||
await client.get(
|
||||
"/go/logged-test",
|
||||
headers={"Referer": "https://padelnomics.io/de/beste-padelschlaeger-2026"},
|
||||
)
|
||||
rows = await fetch_all("SELECT * FROM affiliate_clicks WHERE product_id = ?", (pid,))
|
||||
assert len(rows) == 1
|
||||
assert rows[0]["article_slug"] == "beste-padelschlaeger-2026"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_inactive_404(app, db):
|
||||
"""Draft products return 404 on /go/<slug>."""
|
||||
await _insert_product(slug="inactive-test", status="draft")
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/inactive-test")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_unknown_404(app, db):
|
||||
"""Unknown slug returns 404."""
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/totally-unknown-xyz")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
# ── affiliate_programs ────────────────────────────────────────────────────────
|
||||
|
||||
async def _insert_program(
|
||||
name="Test Shop",
|
||||
slug="test-shop",
|
||||
url_template="https://testshop.example.com/p/{product_id}?ref={tag}",
|
||||
tracking_tag="testref",
|
||||
commission_pct=5.0,
|
||||
homepage_url="https://testshop.example.com",
|
||||
status="active",
|
||||
) -> int:
|
||||
"""Insert an affiliate program, return its id."""
|
||||
return await execute(
|
||||
"""INSERT INTO affiliate_programs
|
||||
(name, slug, url_template, tracking_tag, commission_pct, homepage_url, status)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(name, slug, url_template, tracking_tag, commission_pct, homepage_url, status),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_programs_returns_all(db):
|
||||
"""get_all_programs returns inserted programs sorted by name."""
|
||||
await _insert_program(slug="zebra-shop", name="Zebra Shop")
|
||||
await _insert_program(slug="alpha-shop", name="Alpha Shop")
|
||||
programs = await get_all_programs()
|
||||
names = [p["name"] for p in programs]
|
||||
assert "Alpha Shop" in names
|
||||
assert "Zebra Shop" in names
|
||||
# Sorted by name ascending
|
||||
assert names.index("Alpha Shop") < names.index("Zebra Shop")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_programs_status_filter(db):
|
||||
"""get_all_programs(status='active') excludes inactive programs."""
|
||||
await _insert_program(slug="inactive-prog", status="inactive")
|
||||
await _insert_program(slug="active-prog", name="Active Shop")
|
||||
active = await get_all_programs(status="active")
|
||||
statuses = [p["status"] for p in active]
|
||||
assert all(s == "active" for s in statuses)
|
||||
slugs = [p["slug"] for p in active]
|
||||
assert "inactive-prog" not in slugs
|
||||
assert "active-prog" in slugs
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_program_by_id(db):
|
||||
"""get_program returns a program by id."""
|
||||
prog_id = await _insert_program()
|
||||
prog = await get_program(prog_id)
|
||||
assert prog is not None
|
||||
assert prog["slug"] == "test-shop"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_program_not_found(db):
|
||||
"""get_program returns None for unknown id."""
|
||||
prog = await get_program(99999)
|
||||
assert prog is None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_program_by_slug(db):
|
||||
"""get_program_by_slug returns the program for a known slug."""
|
||||
await _insert_program(slug="find-by-slug")
|
||||
prog = await get_program_by_slug("find-by-slug")
|
||||
assert prog is not None
|
||||
assert prog["name"] == "Test Shop"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_program_by_slug_not_found(db):
|
||||
"""get_program_by_slug returns None for unknown slug."""
|
||||
prog = await get_program_by_slug("nonexistent-slug-xyz")
|
||||
assert prog is None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_get_all_programs_product_count(db):
|
||||
"""get_all_programs includes product_count for each program."""
|
||||
prog_id = await _insert_program(slug="counted-prog")
|
||||
await _insert_product(slug="p-for-count", program_id=prog_id)
|
||||
programs = await get_all_programs()
|
||||
prog = next(p for p in programs if p["slug"] == "counted-prog")
|
||||
assert prog["product_count"] == 1
|
||||
|
||||
|
||||
# ── build_affiliate_url ───────────────────────────────────────────────────────
|
||||
|
||||
def test_build_affiliate_url_with_program():
|
||||
"""build_affiliate_url assembles URL from program template."""
|
||||
product = {"program_id": 1, "product_identifier": "B0TESTTEST", "affiliate_url": ""}
|
||||
program = {"url_template": "https://amazon.de/dp/{product_id}?tag={tag}", "tracking_tag": "mysite-21"}
|
||||
url = build_affiliate_url(product, program)
|
||||
assert url == "https://amazon.de/dp/B0TESTTEST?tag=mysite-21"
|
||||
|
||||
|
||||
def test_build_affiliate_url_legacy_fallback():
|
||||
"""build_affiliate_url falls back to baked affiliate_url when no program."""
|
||||
product = {"program_id": None, "product_identifier": "", "affiliate_url": "https://baked.example.com/p?tag=x"}
|
||||
url = build_affiliate_url(product, None)
|
||||
assert url == "https://baked.example.com/p?tag=x"
|
||||
|
||||
|
||||
def test_build_affiliate_url_no_program_id():
|
||||
"""build_affiliate_url uses fallback when program_id is 0/falsy."""
|
||||
product = {"program_id": 0, "product_identifier": "B0IGNORED", "affiliate_url": "https://fallback.example.com"}
|
||||
program = {"url_template": "https://shop.example.com/{product_id}?ref={tag}", "tracking_tag": "tag123"}
|
||||
url = build_affiliate_url(product, program)
|
||||
# program_id is falsy → fallback
|
||||
assert url == "https://fallback.example.com"
|
||||
|
||||
|
||||
def test_build_affiliate_url_no_program_dict():
|
||||
"""build_affiliate_url uses fallback when program dict is None."""
|
||||
product = {"program_id": 5, "product_identifier": "ASIN123", "affiliate_url": "https://fallback.example.com"}
|
||||
url = build_affiliate_url(product, None)
|
||||
assert url == "https://fallback.example.com"
|
||||
|
||||
|
||||
# ── program-based redirect ────────────────────────────────────────────────────
|
||||
|
||||
async def _insert_product( # noqa: F811 — redefined to add program_id support
|
||||
slug="test-racket-amazon",
|
||||
name="Test Racket",
|
||||
brand="TestBrand",
|
||||
category="racket",
|
||||
retailer="Amazon",
|
||||
affiliate_url="https://amazon.de/dp/TEST?tag=test-21",
|
||||
status="active",
|
||||
language="de",
|
||||
price_cents=14999,
|
||||
pros=None,
|
||||
cons=None,
|
||||
sort_order=0,
|
||||
program_id=None,
|
||||
product_identifier="",
|
||||
) -> int:
|
||||
"""Insert an affiliate product with optional program_id, return its id."""
|
||||
return await execute(
|
||||
"""INSERT INTO affiliate_products
|
||||
(slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, currency, status, language, pros, cons, sort_order,
|
||||
program_id, product_identifier)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, 'EUR', ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
slug, name, brand, category, retailer, affiliate_url,
|
||||
price_cents, status, language,
|
||||
json.dumps(pros or ["Gut"]),
|
||||
json.dumps(cons or ["Teuer"]),
|
||||
sort_order,
|
||||
program_id,
|
||||
product_identifier,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_uses_program_url(app, db):
|
||||
"""Redirect assembles URL from program template when product has program_id."""
|
||||
prog_id = await _insert_program(
|
||||
slug="amzn-test",
|
||||
url_template="https://www.amazon.de/dp/{product_id}?tag={tag}",
|
||||
tracking_tag="testsite-21",
|
||||
)
|
||||
await _insert_product(
|
||||
slug="program-redirect-test",
|
||||
affiliate_url="",
|
||||
program_id=prog_id,
|
||||
product_identifier="B0PROGRAM01",
|
||||
)
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/program-redirect-test")
|
||||
assert response.status_code == 302
|
||||
location = response.headers.get("Location", "")
|
||||
assert "B0PROGRAM01" in location
|
||||
assert "testsite-21" in location
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("db")
|
||||
async def test_affiliate_redirect_legacy_url_still_works(app, db):
|
||||
"""Legacy products with baked affiliate_url still redirect correctly."""
|
||||
await _insert_product(
|
||||
slug="legacy-redirect-test",
|
||||
affiliate_url="https://amazon.de/dp/LEGACY?tag=old-21",
|
||||
program_id=None,
|
||||
product_identifier="",
|
||||
)
|
||||
async with app.test_client() as client:
|
||||
response = await client.get("/go/legacy-redirect-test")
|
||||
assert response.status_code == 302
|
||||
assert "LEGACY" in response.headers.get("Location", "")
|
||||
|
||||
|
||||
# ── migration backfill ────────────────────────────────────────────────────────
|
||||
|
||||
def _load_migration_0027():
|
||||
"""Import migration 0027 via importlib (filename starts with a digit)."""
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
||||
versions_dir = Path(__file__).parent.parent / "src/padelnomics/migrations/versions"
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
"migration_0027", versions_dir / "0027_affiliate_programs.py"
|
||||
)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
def _make_pre_migration_db():
|
||||
"""Create a minimal sqlite3 DB simulating state just before migration 0027.
|
||||
|
||||
Provides the affiliate_products table (migration ALTERs it), but not
|
||||
affiliate_programs (migration CREATEs it).
|
||||
"""
|
||||
import sqlite3
|
||||
|
||||
conn = sqlite3.connect(":memory:")
|
||||
conn.execute("""
|
||||
CREATE TABLE affiliate_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
slug TEXT NOT NULL,
|
||||
name TEXT NOT NULL DEFAULT '',
|
||||
affiliate_url TEXT NOT NULL DEFAULT '',
|
||||
UNIQUE(slug)
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def test_migration_seeds_amazon_program():
|
||||
"""Migration 0027 up() seeds the Amazon program with expected fields.
|
||||
|
||||
Tests the migration function directly against a real sqlite3 DB
|
||||
(the conftest only replays CREATE TABLE DDL, not INSERT seeds).
|
||||
"""
|
||||
migration = _load_migration_0027()
|
||||
conn = _make_pre_migration_db()
|
||||
migration.up(conn)
|
||||
conn.commit()
|
||||
|
||||
row = conn.execute(
|
||||
"SELECT * FROM affiliate_programs WHERE slug = 'amazon'"
|
||||
).fetchone()
|
||||
assert row is not None
|
||||
cols = [d[0] for d in conn.execute("SELECT * FROM affiliate_programs WHERE slug = 'amazon'").description]
|
||||
prog = dict(zip(cols, row))
|
||||
assert prog["name"] == "Amazon"
|
||||
assert "padelnomics-21" in prog["tracking_tag"]
|
||||
assert "{product_id}" in prog["url_template"]
|
||||
assert "{tag}" in prog["url_template"]
|
||||
assert prog["commission_pct"] == 3.0
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_migration_backfills_asin_from_url():
|
||||
"""Migration 0027 up() extracts ASINs from existing affiliate_url values."""
|
||||
migration = _load_migration_0027()
|
||||
conn = _make_pre_migration_db()
|
||||
conn.execute(
|
||||
"INSERT INTO affiliate_products (slug, affiliate_url) VALUES (?, ?)",
|
||||
("test-racket", "https://www.amazon.de/dp/B0ASIN1234?tag=test-21"),
|
||||
)
|
||||
conn.commit()
|
||||
migration.up(conn)
|
||||
conn.commit()
|
||||
|
||||
row = conn.execute(
|
||||
"SELECT program_id, product_identifier FROM affiliate_products WHERE slug = 'test-racket'"
|
||||
).fetchone()
|
||||
assert row is not None
|
||||
assert row[0] is not None # program_id set
|
||||
assert row[1] == "B0ASIN1234" # ASIN extracted correctly
|
||||
conn.close()
|
||||
@@ -24,9 +24,11 @@ sup = _ilu.module_from_spec(_spec)
|
||||
_spec.loader.exec_module(sup)
|
||||
|
||||
from padelnomics_extract.proxy import ( # noqa: E402
|
||||
load_proxy_urls,
|
||||
fetch_webshare_proxies,
|
||||
load_proxy_tiers,
|
||||
make_round_robin_cycler,
|
||||
make_sticky_selector,
|
||||
make_tiered_cycler,
|
||||
)
|
||||
|
||||
# ── load_workflows ────────────────────────────────────────────────
|
||||
@@ -198,28 +200,112 @@ class TestTopologicalWaves:
|
||||
# ── proxy.py ─────────────────────────────────────────────────────
|
||||
|
||||
|
||||
class TestLoadProxyUrls:
|
||||
def test_returns_empty_when_unset(self, monkeypatch):
|
||||
monkeypatch.delenv("PROXY_URLS", raising=False)
|
||||
assert load_proxy_urls() == []
|
||||
class TestFetchWebshareProxies:
|
||||
def test_parses_ip_port_user_pass_format(self):
|
||||
raw = "1.2.3.4:1080:user1:pass1\n5.6.7.8:1080:user2:pass2\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == [
|
||||
"http://user1:pass1@1.2.3.4:1080",
|
||||
"http://user2:pass2@5.6.7.8:1080",
|
||||
]
|
||||
|
||||
def test_parses_comma_separated_urls(self, monkeypatch):
|
||||
monkeypatch.setenv(
|
||||
"PROXY_URLS",
|
||||
"http://p1:8080,http://p2:8080,http://p3:8080",
|
||||
)
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080", "http://p3:8080"]
|
||||
def test_network_error_returns_empty(self):
|
||||
import urllib.error
|
||||
with patch("urllib.request.urlopen", side_effect=urllib.error.URLError("timeout")):
|
||||
result = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert result == []
|
||||
|
||||
def test_strips_whitespace(self, monkeypatch):
|
||||
monkeypatch.setenv("PROXY_URLS", " http://p1:8080 , http://p2:8080 ")
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080"]
|
||||
def test_malformed_lines_are_skipped(self):
|
||||
raw = "bad_line\n1.2.3.4:1080:user:pass\nonly:three:parts\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == ["http://user:pass@1.2.3.4:1080"]
|
||||
|
||||
def test_ignores_empty_segments(self, monkeypatch):
|
||||
monkeypatch.setenv("PROXY_URLS", "http://p1:8080,,http://p2:8080,")
|
||||
urls = load_proxy_urls()
|
||||
assert urls == ["http://p1:8080", "http://p2:8080"]
|
||||
def test_max_proxies_respected(self):
|
||||
lines = "\n".join(f"10.0.0.{i}:1080:u{i}:p{i}" for i in range(10))
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = lines.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list", max_proxies=3)
|
||||
assert len(urls) == 3
|
||||
|
||||
def test_empty_lines_skipped(self):
|
||||
raw = "\n\n1.2.3.4:1080:user:pass\n\n"
|
||||
with patch("urllib.request.urlopen") as mock_open:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = raw.encode("utf-8")
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value = mock_resp
|
||||
urls = fetch_webshare_proxies("http://example.com/proxy-list")
|
||||
assert urls == ["http://user:pass@1.2.3.4:1080"]
|
||||
|
||||
|
||||
class TestLoadProxyTiers:
|
||||
def _clear_proxy_env(self, monkeypatch):
|
||||
for var in ("WEBSHARE_DOWNLOAD_URL", "PROXY_URLS_DATACENTER", "PROXY_URLS_RESIDENTIAL"):
|
||||
monkeypatch.delenv(var, raising=False)
|
||||
|
||||
def test_returns_empty_when_all_unset(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
assert load_proxy_tiers() == []
|
||||
|
||||
def test_single_datacenter_tier(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080,http://dc2:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://dc1:8080", "http://dc2:8080"]
|
||||
|
||||
def test_residential_only(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://res1:8080"]
|
||||
|
||||
def test_empty_tiers_skipped(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "")
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://res1:8080"]
|
||||
|
||||
def test_three_tiers_correct_order(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
with patch("padelnomics_extract.proxy.fetch_webshare_proxies", return_value=["http://user:pass@1.2.3.4:1080"]):
|
||||
monkeypatch.setenv("WEBSHARE_DOWNLOAD_URL", "http://example.com/list")
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080")
|
||||
monkeypatch.setenv("PROXY_URLS_RESIDENTIAL", "http://res1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 3
|
||||
assert tiers[0] == ["http://user:pass@1.2.3.4:1080"] # free
|
||||
assert tiers[1] == ["http://dc1:8080"] # datacenter
|
||||
assert tiers[2] == ["http://res1:8080"] # residential
|
||||
|
||||
def test_webshare_fetch_failure_skips_tier(self, monkeypatch):
|
||||
self._clear_proxy_env(monkeypatch)
|
||||
with patch("padelnomics_extract.proxy.fetch_webshare_proxies", return_value=[]):
|
||||
monkeypatch.setenv("WEBSHARE_DOWNLOAD_URL", "http://example.com/list")
|
||||
monkeypatch.setenv("PROXY_URLS_DATACENTER", "http://dc1:8080")
|
||||
tiers = load_proxy_tiers()
|
||||
assert len(tiers) == 1
|
||||
assert tiers[0] == ["http://dc1:8080"]
|
||||
|
||||
|
||||
class TestRoundRobinCycler:
|
||||
@@ -279,3 +365,266 @@ class TestStickySelectorProxy:
|
||||
fn = make_sticky_selector(urls)
|
||||
for i in range(20):
|
||||
assert fn(f"key_{i}") in urls
|
||||
|
||||
|
||||
class TestTieredCyclerNTier:
|
||||
def test_starts_on_first_tier(self):
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1a"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
assert cycler["active_tier_index"]() == 0
|
||||
assert not cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() in tiers[0]
|
||||
|
||||
def test_escalates_after_threshold(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
# Two failures — stays on tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 0
|
||||
# Third failure — escalates
|
||||
escalated = cycler["record_failure"]()
|
||||
assert escalated is True
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
assert cycler["next_proxy"]() == "http://t1"
|
||||
|
||||
def test_escalates_through_all_tiers(self):
|
||||
tiers = [["http://t0"], ["http://t1"], ["http://t2"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=2)
|
||||
# Exhaust tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# Exhaust tier 1
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 2
|
||||
# Exhaust tier 2
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
|
||||
def test_success_resets_counter(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=3)
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
cycler["record_success"]()
|
||||
# Counter reset — need threshold more failures to escalate
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 0 # still on tier 0
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1 # now escalated
|
||||
|
||||
def test_counter_resets_on_escalation(self):
|
||||
"""After escalating, failure counter resets so new tier gets a fresh start."""
|
||||
tiers = [["http://t0"], ["http://t1"], ["http://t2"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=2)
|
||||
# Exhaust tier 0
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# One failure on tier 1 — should NOT escalate yet (counter reset)
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
# Second failure on tier 1 — escalates to tier 2
|
||||
cycler["record_failure"]()
|
||||
assert cycler["active_tier_index"]() == 2
|
||||
|
||||
def test_is_exhausted_false_when_tiers_remain(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
assert not cycler["is_exhausted"]()
|
||||
cycler["record_failure"]() # escalates to tier 1
|
||||
assert not cycler["is_exhausted"]()
|
||||
|
||||
def test_is_exhausted_true_after_all_tiers_fail(self):
|
||||
tiers = [["http://t0"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
assert not cycler["is_exhausted"]()
|
||||
cycler["record_failure"]()
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
|
||||
def test_empty_tiers_immediately_exhausted(self):
|
||||
cycler = make_tiered_cycler([], threshold=3)
|
||||
assert cycler["is_exhausted"]()
|
||||
assert cycler["next_proxy"]() is None
|
||||
assert cycler["tier_count"]() == 0
|
||||
|
||||
def test_single_tier_cycles_within_tier(self):
|
||||
tiers = [["http://p1", "http://p2", "http://p3"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10)
|
||||
results = [cycler["next_proxy"]() for _ in range(6)]
|
||||
assert results == ["http://p1", "http://p2", "http://p3"] * 2
|
||||
|
||||
def test_tier_count_reflects_input(self):
|
||||
assert make_tiered_cycler([], threshold=1)["tier_count"]() == 0
|
||||
assert make_tiered_cycler([["a"]], threshold=1)["tier_count"]() == 1
|
||||
assert make_tiered_cycler([["a"], ["b"], ["c"]], threshold=1)["tier_count"]() == 3
|
||||
|
||||
def test_record_failure_noop_when_exhausted(self):
|
||||
tiers = [["http://t0"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=1)
|
||||
cycler["record_failure"]() # exhausts
|
||||
assert cycler["is_exhausted"]()
|
||||
# Further failures are no-ops, not exceptions
|
||||
result = cycler["record_failure"]()
|
||||
assert result is False
|
||||
assert cycler["is_exhausted"]()
|
||||
|
||||
def test_thread_safety(self):
|
||||
"""Concurrent next_proxy and record calls do not raise or corrupt state."""
|
||||
import threading
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1a", "http://t1b"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=5)
|
||||
errors = []
|
||||
lock = threading.Lock()
|
||||
|
||||
def worker():
|
||||
try:
|
||||
for _ in range(20):
|
||||
cycler["next_proxy"]()
|
||||
cycler["record_failure"]()
|
||||
cycler["record_success"]()
|
||||
except Exception as e:
|
||||
with lock:
|
||||
errors.append(e)
|
||||
|
||||
threads = [threading.Thread(target=worker) for _ in range(8)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert errors == [], f"Thread safety errors: {errors}"
|
||||
|
||||
|
||||
class TestTieredCyclerDeadProxyTracking:
|
||||
"""Per-proxy dead tracking: individual proxies marked dead are skipped."""
|
||||
|
||||
def test_dead_proxy_skipped_in_next_proxy(self):
|
||||
"""After a proxy hits the failure limit it is never returned again."""
|
||||
tiers = [["http://dead", "http://live"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=1)
|
||||
# Mark http://dead as dead
|
||||
cycler["record_failure"]("http://dead")
|
||||
# next_proxy must always return the live one
|
||||
for _ in range(6):
|
||||
assert cycler["next_proxy"]() == "http://live"
|
||||
|
||||
def test_dead_proxy_count_increments(self):
|
||||
tiers = [["http://a", "http://b", "http://c"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=2)
|
||||
assert cycler["dead_proxy_count"]() == 0
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 0 # only 1 failure, limit is 2
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 1
|
||||
cycler["record_failure"]("http://b")
|
||||
cycler["record_failure"]("http://b")
|
||||
assert cycler["dead_proxy_count"]() == 2
|
||||
|
||||
def test_auto_escalates_when_all_proxies_in_tier_dead(self):
|
||||
"""If all proxies in the active tier are dead, next_proxy auto-escalates."""
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=1)
|
||||
# Kill all proxies in tier 0
|
||||
cycler["record_failure"]("http://t0a")
|
||||
cycler["record_failure"]("http://t0b")
|
||||
# next_proxy should transparently escalate and return tier 1 proxy
|
||||
assert cycler["next_proxy"]() == "http://t1"
|
||||
|
||||
def test_auto_escalates_updates_active_tier_index(self):
|
||||
"""Auto-escalation via dead proxies bumps active_tier_index."""
|
||||
tiers = [["http://t0a", "http://t0b"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=1)
|
||||
cycler["record_failure"]("http://t0a")
|
||||
cycler["record_failure"]("http://t0b")
|
||||
cycler["next_proxy"]() # triggers auto-escalation
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
|
||||
def test_returns_none_when_all_tiers_exhausted_by_dead_proxies(self):
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=1)
|
||||
cycler["record_failure"]("http://t0")
|
||||
cycler["record_failure"]("http://t1")
|
||||
assert cycler["next_proxy"]() is None
|
||||
|
||||
def test_record_success_resets_per_proxy_counter(self):
|
||||
"""Success resets the failure count so proxy is not marked dead."""
|
||||
tiers = [["http://a", "http://b"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=3)
|
||||
# Two failures — not dead yet
|
||||
cycler["record_failure"]("http://a")
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 0
|
||||
# Success resets the counter
|
||||
cycler["record_success"]("http://a")
|
||||
# Two more failures — still not dead (counter was reset)
|
||||
cycler["record_failure"]("http://a")
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 0
|
||||
# Third failure after reset — now dead
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 1
|
||||
|
||||
def test_dead_proxy_stays_dead_after_success(self):
|
||||
"""Once marked dead, a proxy is not revived by record_success."""
|
||||
tiers = [["http://a", "http://b"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=1)
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 1
|
||||
cycler["record_success"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 1
|
||||
# http://a is still skipped
|
||||
for _ in range(6):
|
||||
assert cycler["next_proxy"]() == "http://b"
|
||||
|
||||
def test_backward_compat_no_proxy_url(self):
|
||||
"""Calling record_failure/record_success without proxy_url still works."""
|
||||
tiers = [["http://t0"], ["http://t1"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=2)
|
||||
cycler["record_failure"]()
|
||||
cycler["record_failure"]() # escalates
|
||||
assert cycler["active_tier_index"]() == 1
|
||||
cycler["record_success"]()
|
||||
assert cycler["dead_proxy_count"]() == 0 # no per-proxy tracking happened
|
||||
|
||||
def test_proxy_failure_limit_zero_disables_per_proxy_tracking(self):
|
||||
"""proxy_failure_limit=0 disables per-proxy dead tracking entirely."""
|
||||
tiers = [["http://a", "http://b"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=10, proxy_failure_limit=0)
|
||||
for _ in range(100):
|
||||
cycler["record_failure"]("http://a")
|
||||
assert cycler["dead_proxy_count"]() == 0
|
||||
|
||||
def test_thread_safety_with_per_proxy_tracking(self):
|
||||
"""Concurrent record_failure(proxy_url) calls don't corrupt state."""
|
||||
import threading as _threading
|
||||
|
||||
tiers = [["http://t0a", "http://t0b", "http://t0c"], ["http://t1a"]]
|
||||
cycler = make_tiered_cycler(tiers, threshold=50, proxy_failure_limit=5)
|
||||
errors = []
|
||||
lock = _threading.Lock()
|
||||
|
||||
def worker():
|
||||
try:
|
||||
for _ in range(30):
|
||||
p = cycler["next_proxy"]()
|
||||
if p is not None:
|
||||
cycler["record_failure"](p)
|
||||
cycler["record_success"](p)
|
||||
except Exception as e:
|
||||
with lock:
|
||||
errors.append(e)
|
||||
|
||||
threads = [_threading.Thread(target=worker) for _ in range(10)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert errors == [], f"Thread safety errors: {errors}"
|
||||
|
||||
Reference in New Issue
Block a user