Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f52bd22ff9 | ||
|
|
f4d116592c | ||
|
|
c45f5d747a | ||
|
|
b5fae9d528 | ||
|
|
d6bd5d927c | ||
|
|
ba62214bbd | ||
|
|
a79c1cec7b | ||
|
|
890fb0e693 | ||
|
|
260c9058b3 | ||
|
|
9201a4dca9 | ||
|
|
79ce3f2913 | ||
|
|
d14990bb01 | ||
|
|
9ea4f09600 | ||
|
|
c8b86569ff | ||
|
|
42c1309b20 |
@@ -6,8 +6,6 @@ DEBUG=ENC[AES256_GCM,data:ntBp/hw=,iv:906FN6bz3SHoEclG7MquCNUhHa9wDD2PyhxTDCVFUG
|
||||
ADMIN_EMAILS=ENC[AES256_GCM,data:W7kmtrgck47tGpiHy4bIoF7TZouqjNGPHK+zQoZvxT9iz1reuHbP6bXUfuMzsh0=,iv:GXkKRbComRXAVLzif8DV14IySjzRkAg/U9DUj4ytEjE=,tag:6iKYsgbhDgjDQbwZM6hSNg==,type:str]
|
||||
#ENC[AES256_GCM,data:tIhB0x4AbNjs,iv:fkmVB5Cfa11g4YVXGEXPPnGDznhoMm+J108L/ZRkCn8=,tag:y7tqZ7cQ64A3ArM/MmfTlw==,type:comment]
|
||||
DATABASE_PATH=ENC[AES256_GCM,data:Rzif9KAhrVn/F3U=,iv:VgXwn8b38/dFkiTYHDiKe660eWtGPdbeMPC4Xc2RPHk=,tag:OSlbuCeQHcVigj0zxnH+5Q==,type:str]
|
||||
DUCKDB_PATH=ENC[AES256_GCM,data:UWMI9RTAHBNgb9EOxnmKUZovyGedu/xz5/yoOFpd,iv:oWVAoDtboVAC+SCTf+b/mQ+zzCGSRTrf3fjt1femqng=,tag:B46K6jTM0iVWQvL1FJlbyg==,type:str]
|
||||
SERVING_DUCKDB_PATH=ENC[AES256_GCM,data:Y3bouhWcgp3d9v1KGuXuPZIFiIe/WKnVwEVs799T,iv:uTpVqvRYOhUKM2JNiFsX/YK/sfmajWI899vtmuWuozA=,tag:z8ASJTKzG6lSUBLuvzciwQ==,type:str]
|
||||
#ENC[AES256_GCM,data:BUei2Df8W9g=,iv:NpOYrAUKS8tRwkGrh9SfyCowHaLSCaB9WrUEleVKp+Q=,tag:W5mebu7kkxC/ZAY+7NDfcA==,type:comment]
|
||||
R2_ACCESS_KEY_ID=ENC[AES256_GCM,data:pd3cnES/P1/m7ydbes4QLD6lgLY1GVIVYman+oGGv4Y=,iv:UTlG2vehbaaI9Hs72Pgr84963EILdyZNDiV14n7HPtM=,tag:fXKpaVzlZLeiQeQaYS7uzg==,type:str]
|
||||
R2_SECRET_ACCESS_KEY=ENC[AES256_GCM,data:KAY/y2sWeEudSOoljyDCNVaxqTJf31qtOOlZCPdgg3jJRYhRzXDfVJQ1LhNc0lRu4aGuXW295NNTtfASNCYg0A==,iv:O3DkVlD1r25Nb/jYeg4/lyzDnzux2XyYBPgd6+OmeW0=,tag:E4Onwxo7Vbocd30o8gKwmA==,type:str]
|
||||
@@ -35,14 +33,13 @@ UMAMI_WEBSITE_ID=ENC[AES256_GCM,data:ZM3JMBncWZ0qYyGwxSbePkDNs9/Tw4+LfAWssB/nazL
|
||||
UMAMI_API_URL=ENC[AES256_GCM,data:ErcfZNoB9ESrRqSG/laQMvRJjx2ieeSRlFQ=,iv:WAvEGFsVS2y07dnGZI2KcSpJQ93WjUqW93en1bS03kk=,tag:F1a/yZihEfH/PpUfZHGKCw==,type:str]
|
||||
UMAMI_API_TOKEN=ENC[AES256_GCM,data:N7xqH1we7SvCkPLxI7olyV3/bKaiDhUoVYfj/OgVja39+NyeZKeWwkpKy5Mg8Hs/KXBwDJgN8NHA5T4gJgvoZ4MC+n1FMMaAOcmZkTGH412Uu8SEiTlfj4ZLUC5v1+izzAPC6BgBngDEz5SOsH+sdh2Tb/eImIgGw/CxLjyRBk2LsCrKsG8KtYAJ71peH8FF8s+najkhYaZ7zUH2Kl7xcPtyBnMfF+0PfnUSaoEatucAXX0LEf+pCEPp80wbKb1sWafXmfAEXgRX+Lv+rjxSJxmr1xoJ5eG5loNs3gHUoj9WOwqwmws61pnhRtoIKi8sc0wu4D09m/RWuDm3dVqtWQ8AxuS/dmqx5VICVaBoDM6t9ePIpirVct/Hxge+68XKKKI0VaW6uWXECJn8T1vLyChNc0x5Z1mqi1E6tmcdvm6O/EBL/Qgw/MaHQk4UwgmEvRIBnqxxUcjYllS4CGADdcxmbEfNknMWFrCS/x0EIoI0dLzjst9xT/peUQk=,iv:OdJqyCreI9i8cYi+58cU8ZboCPWOPTCqo/iFO9zkh4M=,tag:8giR+gN/1pjWj+tbZOlilA==,type:str]
|
||||
#ENC[AES256_GCM,data:zx6ieYt6brZX6IrIgGkfGCqDlf0FOw==,iv:3dBgRYc9eI/Dhx109NUMh2yW2Fmqegg0n3rsjcbzJEw=,tag:4lbfJT/n1T53D0peeI4IhQ==,type:comment]
|
||||
LANDING_DIR=ENC[AES256_GCM,data:3YAGFB10q6g6ZLIHdDuvzMaD59+E,iv:S9NVxU/w+cwU1OPWjOEjnG8ocMdWrqR9VG4rFa4h4uA=,tag:0vq5Cn0Di1cUmbLrv1C1Uw==,type:str]
|
||||
ALERT_WEBHOOK_URL=ENC[AES256_GCM,data:ArkJg/hBzLN8P/Q+jmbmWOM2iQVLybBCaoCGMJgaYQM=,iv:zroifzAQ4rGn+QLF/SZUPeWmIOFkLWq8QVtVWUeiYOk=,tag:8oVeHuXStKxCLaP77TMxDA==,type:str]
|
||||
NTFY_TOKEN=ENC[AES256_GCM,data:63Y734rhyTCHt4hdw4S/LPOZ/eEktk6X7SMFsFidwps=,iv:3OJEXCN5sqGyOJwE6fOniBXXslT/rOMASOotE1s+quk=,tag:NTc3YJp6dtNRpLGxIDLO8Q==,type:str]
|
||||
sops_age__list_0__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBrUWNTNnlLMWgzU21RY3p4\nUjRaVnJzK3lBS2hUTVZCcXI0Y0Q1K2QrOERJCi9qeVc2UVlleWZldUpUckF3WWVM\nMVhWanpGdGlGdXhGV2FnQytQYnZCSncKLS0tIER6RlNqMDhXMlFObkhOVmtVOXZw\nSVRHZTVzYkl5aytSWmNEblhTOVJCOGcKjWhIRS+pjFCMNp52Nt5GyLMhG9Xich7O\n8AlIkVaNN96Q7bVa52norLUQNQOprIGwEu5JXdUFU5Y3ULnoCTQimQ==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_0__map_recipient=age1f5002gj4s78jju45jd28kuejtcfhn5cdujz885fl7z2p9ym68pnsgky87a
|
||||
sops_age__list_1__map_enc=-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBQTllrSWJnU2I4WitWYVY0\nL1RRdFZudzdoSU1Ycmorb3ZRS0p0YnNBR2gwCmQ1T0M0YlpuNFo4SzVMUC9iTlM0\ndHhpd0NOZldJcVBsU0d6M3BwVVJaWjQKLS0tICtVVUVTRm1QSFZCNncvb0RqdC8r\nTmpMM3NmMFBKMDN6QjlIdko3NmFubEkK94oIMrcOYcBy69NjWn6NyWqhvKcP/0Az\nbOuqR0tgSs5xb8s9UUFHRpegZ3uJhQz4VMvOBN8fYaQjO5+4X22D9A==\n-----END AGE ENCRYPTED FILE-----\n
|
||||
sops_age__list_1__map_recipient=age1frqzxxwumn0zfkl3mp647t3zgx7uudksevsqrnxvycfhkl84yvmqskdzq5
|
||||
sops_lastmodified=2026-02-27T12:28:08Z
|
||||
sops_mac=ENC[AES256_GCM,data:5olw6kL/IWhL3MtQZev0s58EuKaaWpiXpsEdPJEhGHvrjEVfqrI0gCaPdzn5fuCu2hXu0iP13zP3MbInhaWlpCUc8A0LZFjzvkvPPHYKgFrAgzImDa78OJlZDeesYu4co8JoX3FjyRBL4YxFKq7UNPRzmlNJBPTtWFPIe8EtUfQ=,iv:rApUbbZNDhII9pwtfRlPHGLWljVbarusv5PVz9B9AYs=,tag:r8FBVGbubqxUdrH2sAJRgg==,type:str]
|
||||
sops_lastmodified=2026-02-28T22:44:47Z
|
||||
sops_mac=ENC[AES256_GCM,data:V2w9VF12xKi8gcOHGSOT7WTaJdLboNhnOubQnKUIf8EhKr6TO/4YgBwNughTsyZlVdd8rxo/+jdisADixUvM7H4TpEq/1OW1Cx0ihGLajmw0JftS63TcWaZdEkI9soF7Z91yE84Msrm3gwgQxY/c5+5jyg9KWAhbrFqki0L+dpc=,iv:GcaNiVdbuEdib3BvK8NjnbQPmx8GoHCysgz5YqIp5Ik=,tag:TQKQlhO+Mm1UbQB7GVN7ZQ==,type:str]
|
||||
sops_unencrypted_suffix=_unencrypted
|
||||
sops_version=3.12.1
|
||||
|
||||
@@ -45,7 +45,7 @@ test:web:
|
||||
script:
|
||||
- uv sync --all-packages
|
||||
- cd web && uv run pytest tests/ -x -q
|
||||
- cd web && uv run ruff check src/ tests/
|
||||
- uv run ruff check .
|
||||
|
||||
# ── Tag (pull-based deploy) ───────────────────────────────────────────────────
|
||||
# Creates v<N> tag after all tests pass. The on-server supervisor polls for new
|
||||
|
||||
@@ -9,7 +9,7 @@ RUN tailwindcss -i ./web/src/beanflows/static/css/input.css \
|
||||
|
||||
|
||||
# Build stage
|
||||
FROM python:3.12-slim AS build
|
||||
FROM python:3.13-slim AS build
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.8 /uv /uvx /bin/
|
||||
WORKDIR /app
|
||||
ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy
|
||||
@@ -20,7 +20,7 @@ RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --no-dev --frozen --package beanflows
|
||||
|
||||
# Runtime stage
|
||||
FROM python:3.12-slim AS runtime
|
||||
FROM python:3.13-slim AS runtime
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
RUN useradd -m -u 1000 appuser
|
||||
WORKDIR /app
|
||||
@@ -31,4 +31,4 @@ USER appuser
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV DATABASE_PATH=/app/data/app.db
|
||||
EXPOSE 5000
|
||||
CMD ["hypercorn", "beanflows.app:app", "--bind", "0.0.0.0:5000", "--workers", "1"]
|
||||
CMD ["granian", "--interface", "asgi", "--host", "0.0.0.0", "--port", "5000", "--workers", "1", "beanflows.app:app"]
|
||||
|
||||
8
Makefile
8
Makefile
@@ -2,7 +2,7 @@ TAILWIND_VERSION := v4.1.18
|
||||
TAILWIND := web/bin/tailwindcss
|
||||
SOPS_DOTENV := sops --input-type dotenv --output-type dotenv
|
||||
|
||||
.PHONY: help dev css-build css-watch \
|
||||
.PHONY: help dev css-build css-watch install-hooks \
|
||||
secrets-decrypt-dev secrets-decrypt-prod \
|
||||
secrets-edit-dev secrets-edit-prod \
|
||||
secrets-encrypt-dev secrets-encrypt-prod \
|
||||
@@ -13,6 +13,7 @@ help:
|
||||
@echo " dev Start full dev environment (migrate, seed, app + worker + CSS watcher)"
|
||||
@echo " css-build Build + minify Tailwind CSS"
|
||||
@echo " css-watch Watch + rebuild Tailwind CSS"
|
||||
@echo " install-hooks Install git pre-commit hook (run once after cloning)"
|
||||
@echo " secrets-decrypt-dev Decrypt .env.dev.sops → .env"
|
||||
@echo " secrets-decrypt-prod Decrypt .env.prod.sops → .env"
|
||||
@echo " secrets-edit-dev Edit .env.dev.sops in \$$EDITOR"
|
||||
@@ -23,6 +24,11 @@ help:
|
||||
|
||||
# ── Dev environment ───────────────────────────────────────────────────────────
|
||||
|
||||
install-hooks:
|
||||
cp scripts/hooks/pre-commit .git/hooks/pre-commit
|
||||
chmod +x .git/hooks/pre-commit
|
||||
@echo "✓ pre-commit hook installed"
|
||||
|
||||
dev:
|
||||
@./web/scripts/dev_run.sh
|
||||
|
||||
|
||||
@@ -33,10 +33,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/data/materia/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/data/beanflows/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/materia/analytics.duckdb:/data/materia/analytics.duckdb:ro
|
||||
- /data/beanflows:/data/beanflows:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
@@ -84,10 +84,10 @@ services:
|
||||
env_file: ./.env
|
||||
environment:
|
||||
- DATABASE_PATH=/app/data/app.db
|
||||
- SERVING_DUCKDB_PATH=/data/materia/analytics.duckdb
|
||||
- SERVING_DUCKDB_PATH=/data/beanflows/analytics.duckdb
|
||||
volumes:
|
||||
- app-data:/app/data
|
||||
- /data/materia/analytics.duckdb:/data/materia/analytics.duckdb:ro
|
||||
- /data/beanflows:/data/beanflows:ro
|
||||
networks:
|
||||
- net
|
||||
healthcheck:
|
||||
|
||||
@@ -15,8 +15,14 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yfinance as yf
|
||||
from extract_core import content_hash, end_run, landing_path, open_state_db, start_run
|
||||
from extract_core import write_bytes_atomic
|
||||
from extract_core import (
|
||||
content_hash,
|
||||
end_run,
|
||||
landing_path,
|
||||
open_state_db,
|
||||
start_run,
|
||||
write_bytes_atomic,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from .normalize import normalize_zipped_csv
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@@ -7,8 +6,16 @@ from io import BytesIO
|
||||
from pathlib import Path
|
||||
|
||||
import niquests
|
||||
from extract_core import end_run, landing_path, normalize_etag, open_state_db, start_run
|
||||
from extract_core import write_bytes_atomic
|
||||
from extract_core import (
|
||||
end_run,
|
||||
landing_path,
|
||||
normalize_etag,
|
||||
open_state_db,
|
||||
start_run,
|
||||
write_bytes_atomic,
|
||||
)
|
||||
|
||||
from .normalize import normalize_zipped_csv
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import zipfile
|
||||
import gzip
|
||||
from io import BytesIO
|
||||
import pathlib
|
||||
|
||||
import zipfile
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
def normalize_zipped_csv(buffer: BytesIO)->BytesIO:
|
||||
|
||||
9
infra/backup/beanflows-backup.service
Normal file
9
infra/backup/beanflows-backup.service
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Beanflows Landing Data Backup to R2
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/rclone sync /data/beanflows/landing/ r2:backup/beanflows/landing/ --log-level INFO
|
||||
TimeoutStartSec=1800
|
||||
@@ -1,5 +1,5 @@
|
||||
[Unit]
|
||||
Description=Materia Landing Data Backup Timer
|
||||
Description=Beanflows Landing Data Backup Timer
|
||||
|
||||
[Timer]
|
||||
OnCalendar=*-*-* 00/6:00:00
|
||||
@@ -1,9 +0,0 @@
|
||||
[Unit]
|
||||
Description=Materia Landing Data Backup to R2
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/rclone sync /data/materia/landing/ r2:backup/materia/landing/ --log-level INFO
|
||||
TimeoutStartSec=1800
|
||||
@@ -14,7 +14,7 @@
|
||||
set -euo pipefail
|
||||
|
||||
SERVICE_USER="beanflows_service"
|
||||
REPO_DIR="/opt/materia"
|
||||
REPO_DIR="/opt/beanflows"
|
||||
GITEA_REPO="ssh://git@git.padelnomics.io:2222/deemanone/beanflows.git"
|
||||
UV="/home/${SERVICE_USER}/.local/bin/uv"
|
||||
|
||||
@@ -57,9 +57,9 @@ sudo -u "${SERVICE_USER}" bash -c "cd ${REPO_DIR} && ${UV} sync --all-packages"
|
||||
|
||||
# ── Systemd supervisor service ────────────────────────────────────────────────
|
||||
|
||||
cp "${REPO_DIR}/infra/supervisor/materia-supervisor.service" /etc/systemd/system/
|
||||
cp "${REPO_DIR}/infra/supervisor/beanflows-supervisor.service" /etc/systemd/system/
|
||||
systemctl daemon-reload
|
||||
systemctl enable --now materia-supervisor
|
||||
systemctl enable --now beanflows-supervisor
|
||||
|
||||
# ── R2 backup timer (optional) ────────────────────────────────────────────────
|
||||
# Enabled only when R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY, and R2_ENDPOINT
|
||||
@@ -96,7 +96,7 @@ EOF
|
||||
chmod 600 "${RCLONE_CONF}"
|
||||
|
||||
UNITS_CHANGED=0
|
||||
for unit in materia-backup.service materia-backup.timer; do
|
||||
for unit in beanflows-backup.service beanflows-backup.timer; do
|
||||
if ! diff -q "${REPO_DIR}/infra/backup/${unit}" "/etc/systemd/system/${unit}" >/dev/null 2>&1; then
|
||||
cp "${REPO_DIR}/infra/backup/${unit}" /etc/systemd/system/
|
||||
UNITS_CHANGED=1
|
||||
@@ -104,7 +104,7 @@ EOF
|
||||
done
|
||||
[ "${UNITS_CHANGED}" = "1" ] && systemctl daemon-reload
|
||||
|
||||
systemctl enable --now materia-backup.timer
|
||||
systemctl enable --now beanflows-backup.timer
|
||||
echo "$(date '+%H:%M:%S') ==> R2 backup timer enabled."
|
||||
else
|
||||
echo "$(date '+%H:%M:%S') ==> R2_ACCESS_KEY_ID / R2_SECRET_ACCESS_KEY / R2_ENDPOINT not set — skipping backup timer."
|
||||
@@ -113,8 +113,8 @@ fi
|
||||
echo ""
|
||||
echo "=== Bootstrap complete! ==="
|
||||
echo ""
|
||||
echo "Check status: systemctl status materia-supervisor"
|
||||
echo "View logs: journalctl -u materia-supervisor -f"
|
||||
echo "Workflow status: sudo -u ${SERVICE_USER} ${UV} run -p ${REPO_DIR} python src/materia/supervisor.py status"
|
||||
echo "Backup timer: systemctl list-timers materia-backup.timer"
|
||||
echo "Check status: systemctl status beanflows-supervisor"
|
||||
echo "View logs: journalctl -u beanflows-supervisor -f"
|
||||
echo "Workflow status: sudo -u ${SERVICE_USER} ${UV} run -p ${REPO_DIR} python src/beanflows_pipeline/supervisor.py status"
|
||||
echo "Backup timer: systemctl list-timers beanflows-backup.timer"
|
||||
echo "Tag: $(sudo -u "${SERVICE_USER}" git -C "${REPO_DIR}" describe --tags --always)"
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
#
|
||||
# What it does:
|
||||
# 1. Creates beanflows_service user (nologin) + adds to docker group
|
||||
# 2. Creates /opt/materia + /data/materia/landing with correct ownership
|
||||
# 2. Creates /opt/beanflows + /data/beanflows/landing with correct ownership
|
||||
# 3. Installs git, curl, age, sops, rclone, uv
|
||||
# 4. Generates ed25519 SSH deploy key for GitLab read access
|
||||
# 5. Generates age keypair at ~/.config/sops/age/keys.txt (as service user)
|
||||
@@ -17,10 +17,10 @@
|
||||
set -euo pipefail
|
||||
|
||||
SERVICE_USER="beanflows_service"
|
||||
APP_DIR="/opt/materia"
|
||||
DATA_DIR="/data/materia"
|
||||
APP_DIR="/opt/beanflows"
|
||||
DATA_DIR="/data/beanflows"
|
||||
SSH_DIR="/home/${SERVICE_USER}/.ssh"
|
||||
DEPLOY_KEY="${SSH_DIR}/materia_deploy"
|
||||
DEPLOY_KEY="${SSH_DIR}/beanflows_deploy"
|
||||
SOPS_AGE_DIR="/home/${SERVICE_USER}/.config/sops/age"
|
||||
|
||||
ROTATE_KEYS="${ROTATE_KEYS:-}"
|
||||
@@ -63,7 +63,7 @@ fi
|
||||
|
||||
if [ ! -f "${DEPLOY_KEY}" ]; then
|
||||
sudo -u "${SERVICE_USER}" ssh-keygen -t ed25519 \
|
||||
-f "${DEPLOY_KEY}" -N "" -C "materia-deploy"
|
||||
-f "${DEPLOY_KEY}" -N "" -C "beanflows-deploy"
|
||||
fi
|
||||
|
||||
if [ ! -f "${SSH_DIR}/config" ]; then
|
||||
|
||||
29
infra/supervisor/beanflows-supervisor.service
Normal file
29
infra/supervisor/beanflows-supervisor.service
Normal file
@@ -0,0 +1,29 @@
|
||||
[Unit]
|
||||
Description=Beanflows Supervisor - Pipeline Orchestration
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=beanflows_service
|
||||
WorkingDirectory=/opt/beanflows
|
||||
ExecStart=/bin/sh -c 'exec uv run python src/beanflows_pipeline/supervisor.py'
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
EnvironmentFile=/opt/beanflows/.env
|
||||
Environment=PATH=/home/beanflows_service/.local/bin:/usr/local/bin:/usr/bin:/bin
|
||||
Environment=LANDING_DIR=/data/beanflows/landing
|
||||
Environment=DUCKDB_PATH=/data/beanflows/lakehouse.duckdb
|
||||
Environment=SERVING_DUCKDB_PATH=/data/beanflows/analytics.duckdb
|
||||
# Environment=SUPERVISOR_GIT_PULL=1 # Uncomment once deploy.sh + blue/green is set up
|
||||
|
||||
# Resource limits
|
||||
LimitNOFILE=65536
|
||||
|
||||
# Logging
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=beanflows-supervisor
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,29 +0,0 @@
|
||||
[Unit]
|
||||
Description=Materia Supervisor - Pipeline Orchestration
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=beanflows_service
|
||||
WorkingDirectory=/opt/materia
|
||||
ExecStart=/bin/sh -c 'exec uv run python src/materia/supervisor.py'
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
EnvironmentFile=/opt/materia/.env
|
||||
Environment=PATH=/home/beanflows_service/.local/bin:/usr/local/bin:/usr/bin:/bin
|
||||
Environment=LANDING_DIR=/data/materia/landing
|
||||
Environment=DUCKDB_PATH=/data/materia/lakehouse.duckdb
|
||||
Environment=SERVING_DUCKDB_PATH=/data/materia/analytics.duckdb
|
||||
Environment=SUPERVISOR_GIT_PULL=1
|
||||
|
||||
# Resource limits
|
||||
LimitNOFILE=65536
|
||||
|
||||
# Logging
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=materia-supervisor
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -18,6 +18,11 @@ module = "cftc_cot.execute"
|
||||
entry = "extract_cot_dataset"
|
||||
schedule = "weekly"
|
||||
|
||||
[extract_cot_combined]
|
||||
module = "cftc_cot.execute"
|
||||
entry = "extract_cot_combined"
|
||||
schedule = "weekly"
|
||||
|
||||
[extract_prices]
|
||||
module = "coffee_prices.execute"
|
||||
entry = "extract_coffee_prices"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[project]
|
||||
name = "materia"
|
||||
name = "beanflows-pipeline"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "readme.md"
|
||||
@@ -20,7 +20,7 @@ dependencies = [
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
materia = "materia.cli:app"
|
||||
beanflows = "beanflows_pipeline.cli:app"
|
||||
|
||||
|
||||
[dependency-groups]
|
||||
@@ -43,7 +43,7 @@ dev = [
|
||||
[tool.uv.sources]
|
||||
extract_core = {workspace = true }
|
||||
psdonline = {workspace = true }
|
||||
sqlmesh_materia = {workspace = true }
|
||||
sqlmesh_beanflows = {workspace = true }
|
||||
cftc_cot = {workspace = true }
|
||||
coffee_prices = {workspace = true }
|
||||
ice_stocks = {workspace = true }
|
||||
@@ -90,11 +90,13 @@ exclude = [
|
||||
"notebooks",
|
||||
]
|
||||
|
||||
line-length = 100
|
||||
indent-width = 4
|
||||
|
||||
target-version = "py313"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "UP"]
|
||||
|
||||
ignore = [
|
||||
"E501", # line too long (handled by formatter)
|
||||
|
||||
18
scripts/hooks/pre-commit
Normal file
18
scripts/hooks/pre-commit
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
# Pre-commit hook: ruff lint + auto-fix.
|
||||
# Install: make install-hooks
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT="$(git rev-parse --show-toplevel)"
|
||||
RUFF="$REPO_ROOT/.venv/bin/ruff"
|
||||
|
||||
if [[ ! -x "$RUFF" ]]; then
|
||||
echo "pre-commit: ruff not found at $RUFF — run 'uv sync' first" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "→ ruff check"
|
||||
"$RUFF" check --fix "$REPO_ROOT"
|
||||
|
||||
# Re-stage any files ruff fixed so they land in the commit.
|
||||
git diff --name-only | xargs -r git add
|
||||
2
src/beanflows_pipeline/__init__.py
Normal file
2
src/beanflows_pipeline/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
def main() -> None:
|
||||
print("Hello from beanflows!")
|
||||
@@ -5,7 +5,7 @@ from typing import Annotated
|
||||
import typer
|
||||
|
||||
app = typer.Typer(
|
||||
name="materia",
|
||||
name="beanflows",
|
||||
help="BeanFlows.coffee data platform management CLI",
|
||||
no_args_is_help=True,
|
||||
)
|
||||
@@ -26,7 +26,7 @@ def worker_list(
|
||||
provider: Annotated[str, typer.Option("--provider", "-p")] = "hetzner",
|
||||
):
|
||||
"""List all active worker instances."""
|
||||
from materia.workers import list_workers
|
||||
from beanflows_pipeline.workers import list_workers
|
||||
|
||||
workers = list_workers(provider)
|
||||
if not workers:
|
||||
@@ -47,7 +47,7 @@ def worker_create(
|
||||
location: Annotated[str | None, typer.Option("--location", "-l")] = None,
|
||||
):
|
||||
"""Create a new worker instance."""
|
||||
from materia.workers import create_worker
|
||||
from beanflows_pipeline.workers import create_worker
|
||||
|
||||
typer.echo(f"Creating worker '{name}' ({server_type}) on {provider}...")
|
||||
worker = create_worker(name, server_type, provider, location)
|
||||
@@ -61,7 +61,7 @@ def worker_destroy(
|
||||
force: Annotated[bool, typer.Option("--force", "-f")] = False,
|
||||
):
|
||||
"""Destroy a worker instance."""
|
||||
from materia.workers import destroy_worker
|
||||
from beanflows_pipeline.workers import destroy_worker
|
||||
|
||||
if not force:
|
||||
confirm = typer.confirm(f"Destroy worker '{name}'?")
|
||||
@@ -82,7 +82,7 @@ def pipeline_run(
|
||||
name: Annotated[str, typer.Argument(help="Pipeline name (extract, transform)")],
|
||||
):
|
||||
"""Run a pipeline locally."""
|
||||
from materia.pipelines import run_pipeline
|
||||
from beanflows_pipeline.pipelines import run_pipeline
|
||||
|
||||
typer.echo(f"Running pipeline '{name}'...")
|
||||
result = run_pipeline(name)
|
||||
@@ -98,7 +98,7 @@ def pipeline_run(
|
||||
@pipeline_app.command("list")
|
||||
def pipeline_list():
|
||||
"""List available pipelines."""
|
||||
from materia.pipelines import PIPELINES
|
||||
from beanflows_pipeline.pipelines import PIPELINES
|
||||
|
||||
typer.echo("Available pipelines:")
|
||||
for name, config in PIPELINES.items():
|
||||
@@ -113,7 +113,7 @@ app.add_typer(secrets_app, name="secrets")
|
||||
@secrets_app.command("list")
|
||||
def secrets_list():
|
||||
"""List available secrets (keys only)."""
|
||||
from materia.secrets import list_secrets
|
||||
from beanflows_pipeline.vault import list_secrets
|
||||
|
||||
secrets = list_secrets()
|
||||
if not secrets:
|
||||
@@ -130,7 +130,7 @@ def secrets_get(
|
||||
key: Annotated[str, typer.Argument(help="Secret key")],
|
||||
):
|
||||
"""Get a secret value."""
|
||||
from materia.secrets import get_secret
|
||||
from beanflows_pipeline.vault import get_secret
|
||||
|
||||
value = get_secret(key)
|
||||
if value is None:
|
||||
@@ -143,7 +143,7 @@ def secrets_get(
|
||||
@secrets_app.command("test")
|
||||
def secrets_test():
|
||||
"""Test sops decryption (verifies sops is installed and age key is present)."""
|
||||
from materia.secrets import test_connection
|
||||
from beanflows_pipeline.vault import test_connection
|
||||
|
||||
typer.echo("Testing SOPS decryption...")
|
||||
if test_connection():
|
||||
91
src/beanflows_pipeline/export_serving.py
Normal file
91
src/beanflows_pipeline/export_serving.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Export serving tables from lakehouse.duckdb to serving.duckdb (atomic swap).
|
||||
|
||||
Called by the supervisor after each SQLMesh transform run. Reads all tables in
|
||||
the 'serving' schema from the pipeline DB (DUCKDB_PATH / lakehouse.duckdb),
|
||||
writes them to a temp file, then atomically renames it to the serving DB path
|
||||
(SERVING_DUCKDB_PATH / serving.duckdb).
|
||||
|
||||
The web app's _get_conn() detects the inode change on the next query and
|
||||
reopens the connection automatically — no restart or signal required.
|
||||
|
||||
Why two files?
|
||||
SQLMesh holds an exclusive write lock on DUCKDB_PATH during plan/run.
|
||||
The web app needs read-only access at all times. Two separate files allow
|
||||
both to operate concurrently: SQLMesh writes to the pipeline DB, the web
|
||||
app reads from the serving DB, and this script swaps them atomically.
|
||||
|
||||
The temp file is named _export.duckdb (not serving.duckdb.tmp) because DuckDB
|
||||
names its catalog after the filename stem. A file named serving.* would create
|
||||
a catalog named 'serving', which conflicts with the schema named 'serving'
|
||||
inside the file, making all queries ambiguous.
|
||||
|
||||
Usage:
|
||||
DUCKDB_PATH=lakehouse.duckdb SERVING_DUCKDB_PATH=analytics.duckdb \\
|
||||
uv run beanflows pipeline run export_serving
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import duckdb
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def export_serving() -> None:
|
||||
"""Copy all serving.* tables from the pipeline DB to the serving DB atomically."""
|
||||
pipeline_path = os.getenv("DUCKDB_PATH", "")
|
||||
serving_path = os.getenv("SERVING_DUCKDB_PATH", "")
|
||||
assert pipeline_path, "DUCKDB_PATH must be set"
|
||||
assert serving_path, "SERVING_DUCKDB_PATH must be set"
|
||||
assert os.path.exists(pipeline_path), f"Pipeline DB not found: {pipeline_path}"
|
||||
|
||||
# Temp path in the same directory as the serving DB so rename() is atomic
|
||||
# (rename across filesystems is not atomic on Linux).
|
||||
tmp_path = os.path.join(os.path.dirname(os.path.abspath(serving_path)), "_export.duckdb")
|
||||
|
||||
src = duckdb.connect(pipeline_path, read_only=True)
|
||||
try:
|
||||
# SQLMesh creates serving views that reference "local".sqlmesh__serving.*
|
||||
# which fails when connecting directly. Resolve the physical table each
|
||||
# view points to by parsing the view definition.
|
||||
view_rows = src.execute(
|
||||
"SELECT view_name, sql FROM duckdb_views()"
|
||||
" WHERE schema_name = 'serving' ORDER BY view_name"
|
||||
).fetchall()
|
||||
assert view_rows, f"No views found in serving schema of {pipeline_path}"
|
||||
|
||||
# Extract physical table reference from: CREATE VIEW ... AS SELECT * FROM "local".schema.table;
|
||||
# Strip the "local". prefix to get schema.table
|
||||
physical_tables: list[tuple[str, str]] = [] # (logical_name, physical_ref)
|
||||
for view_name, view_sql in view_rows:
|
||||
match = re.search(r'FROM\s+"local"\.(sqlmesh__serving\.\S+)', view_sql)
|
||||
assert match, f"Cannot parse view definition for {view_name}: {view_sql[:200]}"
|
||||
physical_tables.append((view_name, match.group(1)))
|
||||
|
||||
logger.info(
|
||||
"Exporting %d serving tables: %s",
|
||||
len(physical_tables),
|
||||
[name for name, _ in physical_tables],
|
||||
)
|
||||
|
||||
dst = duckdb.connect(tmp_path)
|
||||
try:
|
||||
dst.execute("CREATE SCHEMA IF NOT EXISTS serving")
|
||||
for logical_name, physical_ref in physical_tables:
|
||||
# Read via Arrow to avoid cross-connection catalog ambiguity.
|
||||
arrow_data = src.sql(f"SELECT * FROM {physical_ref}").arrow()
|
||||
dst.register("_src", arrow_data)
|
||||
dst.execute(f"CREATE OR REPLACE TABLE serving.{logical_name} AS SELECT * FROM _src")
|
||||
dst.unregister("_src")
|
||||
row_count = dst.sql(f"SELECT count(*) FROM serving.{logical_name}").fetchone()[0]
|
||||
logger.info(f" serving.{logical_name}: {row_count:,} rows")
|
||||
finally:
|
||||
dst.close()
|
||||
finally:
|
||||
src.close()
|
||||
|
||||
# Atomic rename — on Linux, rename() is atomic when src and dst are on the same filesystem.
|
||||
os.rename(tmp_path, serving_path)
|
||||
logger.info(f"Serving DB atomically updated: {serving_path}")
|
||||
@@ -57,7 +57,7 @@ PIPELINES = {
|
||||
"timeout_seconds": 6600,
|
||||
},
|
||||
"transform": {
|
||||
"command": ["uv", "run", "--package", "sqlmesh_materia", "sqlmesh", "-p", "transform/sqlmesh_materia", "plan", "prod", "--no-prompts", "--auto-apply"],
|
||||
"command": ["uv", "run", "--package", "sqlmesh_beanflows", "sqlmesh", "-p", "transform/sqlmesh_beanflows", "plan", "prod", "--no-prompts", "--auto-apply"],
|
||||
"timeout_seconds": 3600,
|
||||
},
|
||||
# Copies serving.* tables from lakehouse.duckdb → serving.duckdb (atomic swap).
|
||||
@@ -65,7 +65,7 @@ PIPELINES = {
|
||||
"export_serving": {
|
||||
"command": ["uv", "run", "python", "-c",
|
||||
"import logging; logging.basicConfig(level=logging.INFO); "
|
||||
"from materia.export_serving import export_serving; export_serving()"],
|
||||
"from beanflows_pipeline.export_serving import export_serving; export_serving()"],
|
||||
"timeout_seconds": 300,
|
||||
},
|
||||
}
|
||||
@@ -15,7 +15,7 @@ class Instance:
|
||||
|
||||
def get_provider(provider_name: str):
|
||||
if provider_name == "hetzner":
|
||||
from materia.providers import hetzner
|
||||
from beanflows_pipeline.providers import hetzner
|
||||
|
||||
return hetzner
|
||||
else:
|
||||
@@ -7,8 +7,8 @@ from hcloud import Client
|
||||
from hcloud.images import Image
|
||||
from hcloud.server_types import ServerType
|
||||
|
||||
from materia.providers import Instance
|
||||
from materia.secrets import get_secret
|
||||
from beanflows_pipeline.providers import Instance
|
||||
from beanflows_pipeline.vault import get_secret
|
||||
|
||||
|
||||
def _get_client() -> Client:
|
||||
@@ -11,10 +11,10 @@ the supervisor is effectively unkillable.
|
||||
|
||||
Usage:
|
||||
# Run the supervisor loop (production)
|
||||
LANDING_DIR=data/landing uv run python src/materia/supervisor.py
|
||||
LANDING_DIR=data/landing uv run python src/beanflows/supervisor.py
|
||||
|
||||
# Show workflow status
|
||||
LANDING_DIR=data/landing uv run python src/materia/supervisor.py status
|
||||
LANDING_DIR=data/landing uv run python src/beanflows/supervisor.py status
|
||||
"""
|
||||
|
||||
import importlib
|
||||
@@ -38,7 +38,7 @@ from croniter import croniter
|
||||
TICK_INTERVAL_SECONDS = 60
|
||||
BACKOFF_SECONDS = 600 # 10 min on tick failure
|
||||
SUBPROCESS_TIMEOUT_SECONDS = 14400 # 4 hours max per subprocess
|
||||
REPO_DIR = Path(os.getenv("REPO_DIR", "/opt/materia"))
|
||||
REPO_DIR = Path(os.getenv("REPO_DIR", "/opt/beanflows"))
|
||||
LANDING_DIR = Path(os.getenv("LANDING_DIR", "data/landing"))
|
||||
DUCKDB_PATH = os.getenv("DUCKDB_PATH", "data/lakehouse.duckdb")
|
||||
SERVING_DUCKDB_PATH = os.getenv("SERVING_DUCKDB_PATH", "analytics.duckdb")
|
||||
@@ -58,7 +58,7 @@ logging.basicConfig(
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
handlers=[logging.StreamHandler(sys.stdout)],
|
||||
)
|
||||
logger = logging.getLogger("materia.supervisor")
|
||||
logger = logging.getLogger("beanflows_pipeline.supervisor")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -181,9 +181,9 @@ def run_workflow(conn, workflow: dict) -> None:
|
||||
entry_fn = getattr(module, entry_name)
|
||||
entry_fn()
|
||||
logger.info("Workflow %s completed successfully", workflow["name"])
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Workflow %s failed", workflow["name"])
|
||||
send_alert(f"Workflow '{workflow['name']}' failed")
|
||||
send_alert(f"[extract] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
raise
|
||||
|
||||
|
||||
@@ -222,8 +222,8 @@ def run_due_workflows(conn, workflows: list[dict]) -> bool:
|
||||
# Transform + Export + Deploy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bool:
|
||||
"""Run a shell command. Returns True on success."""
|
||||
def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> tuple[bool, str]:
|
||||
"""Run a shell command. Returns (success, error_snippet)."""
|
||||
logger.info("Shell: %s", cmd)
|
||||
result = subprocess.run(
|
||||
cmd, shell=True, capture_output=True, text=True, timeout=timeout_seconds
|
||||
@@ -233,45 +233,82 @@ def run_shell(cmd: str, timeout_seconds: int = SUBPROCESS_TIMEOUT_SECONDS) -> bo
|
||||
"Shell failed (rc=%d): %s\nstdout: %s\nstderr: %s",
|
||||
result.returncode, cmd, result.stdout[-500:], result.stderr[-500:],
|
||||
)
|
||||
return False
|
||||
return True
|
||||
raw = (result.stderr or result.stdout).strip()
|
||||
snippet = next((ln.strip() for ln in raw.splitlines() if ln.strip()), raw)[:120]
|
||||
return False, snippet
|
||||
return True, ""
|
||||
|
||||
|
||||
def run_transform() -> None:
|
||||
"""Run SQLMesh — evaluates model staleness internally."""
|
||||
logger.info("Running SQLMesh transform")
|
||||
ok = run_shell("uv run sqlmesh -p transform/sqlmesh_materia plan prod --auto-apply")
|
||||
ok, err = run_shell("uv run sqlmesh -p transform/sqlmesh_beanflows plan prod --auto-apply")
|
||||
if not ok:
|
||||
send_alert("SQLMesh transform failed")
|
||||
send_alert(f"[transform] {err}")
|
||||
|
||||
|
||||
def run_export() -> None:
|
||||
"""Export serving tables to analytics.duckdb."""
|
||||
logger.info("Exporting serving tables")
|
||||
ok = run_shell(
|
||||
ok, err = run_shell(
|
||||
f"DUCKDB_PATH={DUCKDB_PATH} SERVING_DUCKDB_PATH={SERVING_DUCKDB_PATH} "
|
||||
f"uv run materia pipeline run export_serving"
|
||||
f"uv run beanflows pipeline run export_serving"
|
||||
)
|
||||
if not ok:
|
||||
send_alert("Serving export failed")
|
||||
send_alert(f"[export] {err}")
|
||||
|
||||
|
||||
_last_seen_head: str | None = None
|
||||
|
||||
|
||||
def web_code_changed() -> bool:
|
||||
"""Check if web app code changed since last deploy."""
|
||||
"""True on the first tick after a commit that changed web app code.
|
||||
|
||||
Compares the current HEAD to the HEAD from the previous tick. On first call
|
||||
after process start, falls back to HEAD~1 so the just-deployed commit is
|
||||
evaluated exactly once. Records HEAD before returning so the same commit
|
||||
never triggers twice.
|
||||
"""
|
||||
global _last_seen_head
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-only", "HEAD~1", "HEAD", "--", "web/", "Dockerfile"],
|
||||
["git", "rev-parse", "HEAD"], capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return False
|
||||
current_head = result.stdout.strip()
|
||||
|
||||
if _last_seen_head is None:
|
||||
base_result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD~1"], capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
base = base_result.stdout.strip() if base_result.returncode == 0 else current_head
|
||||
else:
|
||||
base = _last_seen_head
|
||||
|
||||
_last_seen_head = current_head # advance now — won't fire again for this HEAD
|
||||
|
||||
if base == current_head:
|
||||
return False
|
||||
|
||||
diff = subprocess.run(
|
||||
["git", "diff", "--name-only", base, current_head, "--", "web/", "Dockerfile"],
|
||||
capture_output=True, text=True, timeout=30,
|
||||
)
|
||||
return bool(result.stdout.strip())
|
||||
return bool(diff.stdout.strip())
|
||||
|
||||
|
||||
def current_deployed_tag() -> str | None:
|
||||
"""Return the tag currently checked out, or None if not on a tag."""
|
||||
"""Return the highest-version tag pointing at HEAD, or None.
|
||||
|
||||
Uses --points-at HEAD so multiple tags on the same commit (e.g. a CI
|
||||
integer tag and a date-based tag) are handled correctly.
|
||||
"""
|
||||
result = subprocess.run(
|
||||
["git", "describe", "--tags", "--exact-match", "HEAD"],
|
||||
["git", "tag", "--list", "--sort=-version:refname", "--points-at", "HEAD", "v*"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return result.stdout.strip() or None
|
||||
tags = result.stdout.strip().splitlines()
|
||||
return tags[0] if tags else None
|
||||
|
||||
|
||||
def latest_remote_tag() -> str | None:
|
||||
@@ -353,11 +390,11 @@ def tick() -> None:
|
||||
# Deploy web app if code changed
|
||||
if os.getenv("SUPERVISOR_GIT_PULL") and web_code_changed():
|
||||
logger.info("Web code changed — deploying")
|
||||
ok = run_shell("./deploy.sh")
|
||||
ok, err = run_shell("./deploy.sh")
|
||||
if ok:
|
||||
send_alert("Deploy succeeded")
|
||||
send_alert("[deploy] ok")
|
||||
else:
|
||||
send_alert("Deploy FAILED — check journalctl -u materia-supervisor")
|
||||
send_alert(f"[deploy] failed: {err}")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@@ -374,9 +411,9 @@ def supervisor_loop() -> None:
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Supervisor stopped (KeyboardInterrupt)")
|
||||
break
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
logger.exception("Supervisor tick failed — backing off %ds", BACKOFF_SECONDS)
|
||||
send_alert("Supervisor tick failed")
|
||||
send_alert(f"[supervisor] {type(exc).__name__}: {str(exc)[:100]}")
|
||||
time.sleep(BACKOFF_SECONDS)
|
||||
else:
|
||||
time.sleep(TICK_INTERVAL_SECONDS)
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Worker instance management."""
|
||||
|
||||
from materia.providers import Instance, get_provider
|
||||
from materia.secrets import get_secret
|
||||
from beanflows_pipeline.providers import Instance, get_provider
|
||||
from beanflows_pipeline.vault import get_secret
|
||||
|
||||
DEFAULT_PROVIDER = "hetzner"
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
def main() -> None:
|
||||
print("Hello from materia!")
|
||||
@@ -1,64 +0,0 @@
|
||||
"""
|
||||
Export serving tables from lakehouse.duckdb to serving.duckdb (atomic swap).
|
||||
|
||||
Called by the supervisor after each SQLMesh transform run. Reads all tables in
|
||||
the 'serving' schema from the pipeline DB (DUCKDB_PATH / lakehouse.duckdb),
|
||||
writes them to a temp file, then atomically renames it to the serving DB path
|
||||
(SERVING_DUCKDB_PATH / serving.duckdb).
|
||||
|
||||
The web app's _get_conn() detects the inode change on the next query and
|
||||
reopens the connection automatically — no restart or signal required.
|
||||
|
||||
Usage:
|
||||
DUCKDB_PATH=lakehouse.duckdb SERVING_DUCKDB_PATH=serving.duckdb \
|
||||
uv run materia pipeline run export_serving
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import duckdb
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def export_serving() -> None:
|
||||
"""Copy all serving.* tables from the pipeline DB to the serving DB atomically."""
|
||||
pipeline_path = os.getenv("DUCKDB_PATH", "")
|
||||
serving_path = os.getenv("SERVING_DUCKDB_PATH", "")
|
||||
assert pipeline_path, "DUCKDB_PATH must be set"
|
||||
assert serving_path, "SERVING_DUCKDB_PATH must be set"
|
||||
assert os.path.exists(pipeline_path), f"Pipeline DB not found: {pipeline_path}"
|
||||
|
||||
# Temp path must not start with "serving" — DuckDB names the catalog after
|
||||
# the filename stem, so "serving.duckdb.tmp" → catalog "serving", which
|
||||
# clashes with the schema we create inside it.
|
||||
tmp_path = os.path.join(os.path.dirname(os.path.abspath(serving_path)), "_export.duckdb")
|
||||
|
||||
src = duckdb.connect(pipeline_path, read_only=True)
|
||||
try:
|
||||
tables = src.sql(
|
||||
"SELECT table_name FROM information_schema.tables"
|
||||
" WHERE table_schema = 'serving' ORDER BY table_name"
|
||||
).fetchall()
|
||||
assert tables, f"No tables found in serving schema of {pipeline_path}"
|
||||
logger.info(f"Exporting {len(tables)} serving tables: {[t[0] for t in tables]}")
|
||||
|
||||
dst = duckdb.connect(tmp_path)
|
||||
try:
|
||||
dst.execute("CREATE SCHEMA IF NOT EXISTS serving")
|
||||
for (table,) in tables:
|
||||
# Read via Arrow so there is no cross-connection catalog ambiguity.
|
||||
arrow_data = src.sql(f"SELECT * FROM serving.{table}").arrow()
|
||||
dst.register("_src", arrow_data)
|
||||
dst.execute(f"CREATE OR REPLACE TABLE serving.{table} AS SELECT * FROM _src")
|
||||
dst.unregister("_src")
|
||||
row_count = dst.sql(f"SELECT count(*) FROM serving.{table}").fetchone()[0]
|
||||
logger.info(f" serving.{table}: {row_count:,} rows")
|
||||
finally:
|
||||
dst.close()
|
||||
finally:
|
||||
src.close()
|
||||
|
||||
# Atomic rename — on Linux, rename() is atomic when src and dst are on the same filesystem.
|
||||
os.rename(tmp_path, serving_path)
|
||||
logger.info(f"Serving DB atomically updated: {serving_path}")
|
||||
@@ -7,7 +7,6 @@ from unittest.mock import MagicMock
|
||||
|
||||
from cftc_cot.normalize import find_csv_inner_filename, normalize_zipped_csv
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# normalize.py
|
||||
# =============================================================================
|
||||
|
||||
@@ -6,7 +6,6 @@ from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
import xlwt # noqa: F401 — needed to create XLS fixtures; skip tests if missing
|
||||
|
||||
from ice_stocks.ice_api import fetch_report_listings, find_latest_report
|
||||
from ice_stocks.xls_parse import OLE2_MAGIC, detect_file_format, xls_to_rows
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[project]
|
||||
name = "sqlmesh_materia"
|
||||
name = "sqlmesh_beanflows"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
authors = [
|
||||
@@ -16,4 +16,4 @@ requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["sqlmesh_materia"]
|
||||
packages = ["sqlmesh_beanflows"]
|
||||
177
uv.lock
generated
177
uv.lock
generated
@@ -9,14 +9,14 @@ resolution-markers = [
|
||||
[manifest]
|
||||
members = [
|
||||
"beanflows",
|
||||
"beanflows-pipeline",
|
||||
"cftc-cot",
|
||||
"coffee-prices",
|
||||
"extract-core",
|
||||
"ice-stocks",
|
||||
"materia",
|
||||
"openmeteo",
|
||||
"psdonline",
|
||||
"sqlmesh-materia",
|
||||
"sqlmesh-beanflows",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -204,8 +204,8 @@ source = { editable = "web" }
|
||||
dependencies = [
|
||||
{ name = "aiosqlite" },
|
||||
{ name = "duckdb" },
|
||||
{ name = "granian" },
|
||||
{ name = "httpx" },
|
||||
{ name = "hypercorn" },
|
||||
{ name = "itsdangerous" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "paddle-python-sdk" },
|
||||
@@ -227,8 +227,8 @@ dev = [
|
||||
requires-dist = [
|
||||
{ name = "aiosqlite", specifier = ">=0.19.0" },
|
||||
{ name = "duckdb", specifier = ">=1.0.0" },
|
||||
{ name = "granian", specifier = ">=2.7.2" },
|
||||
{ name = "httpx", specifier = ">=0.27.0" },
|
||||
{ name = "hypercorn", specifier = ">=0.17.0" },
|
||||
{ name = "itsdangerous", specifier = ">=2.1.0" },
|
||||
{ name = "jinja2", specifier = ">=3.1.0" },
|
||||
{ name = "paddle-python-sdk", specifier = ">=1.13.0" },
|
||||
@@ -246,6 +246,63 @@ dev = [
|
||||
{ name = "ruff", specifier = ">=0.3.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beanflows-pipeline"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "croniter" },
|
||||
{ name = "hcloud" },
|
||||
{ name = "msgspec" },
|
||||
{ name = "niquests" },
|
||||
{ name = "prefect" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "typer" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "pre-commit" },
|
||||
{ name = "pulumi" },
|
||||
{ name = "pulumi-cloudflare" },
|
||||
{ name = "pulumi-hcloud" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "ruff" },
|
||||
{ name = "xlwt" },
|
||||
]
|
||||
exploration = [
|
||||
{ name = "ipykernel" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "croniter", specifier = ">=6.0.0" },
|
||||
{ name = "hcloud", specifier = ">=2.8.0" },
|
||||
{ name = "msgspec", specifier = ">=0.19" },
|
||||
{ name = "niquests", specifier = ">=3.15.2" },
|
||||
{ name = "prefect", specifier = ">=3.6.15" },
|
||||
{ name = "python-dotenv", specifier = ">=1.1.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.2" },
|
||||
{ name = "typer", specifier = ">=0.15.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "pre-commit", specifier = ">=4.1.0" },
|
||||
{ name = "pulumi", specifier = ">=3.202.0" },
|
||||
{ name = "pulumi-cloudflare", specifier = ">=6.10.0" },
|
||||
{ name = "pulumi-hcloud", specifier = ">=1.25.0" },
|
||||
{ name = "pytest", specifier = ">=8.4.2" },
|
||||
{ name = "pytest-cov", specifier = ">=7.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.2" },
|
||||
{ name = "ruff", specifier = ">=0.9.9" },
|
||||
{ name = "xlwt", specifier = ">=1.3.0" },
|
||||
]
|
||||
exploration = [{ name = "ipykernel", specifier = ">=6.29.5" }]
|
||||
|
||||
[[package]]
|
||||
name = "beartype"
|
||||
version = "0.22.9"
|
||||
@@ -838,6 +895,57 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "granian"
|
||||
version = "2.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/19/d4ea523715ba8dd2ed295932cc3dda6bb197060f78aada6e886ff08587b2/granian-2.7.2.tar.gz", hash = "sha256:cdae2f3a26fa998d41fefad58f1d1c84a0b035a6cc9377addd81b51ba82f927f", size = 128969, upload-time = "2026-02-24T23:04:23.314Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/bc/cf0bc29f583096a842cf0f26ae2fe40c72ed5286d4548be99ecfcdbb17e2/granian-2.7.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:76b840ff13dde8838fd33cd096f2e7cadf2c21a499a67f695f53de57deab6ff8", size = 6440868, upload-time = "2026-02-24T23:02:53.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/0d/bae1dcd2182ba5d9a5df33eb50b56dc5bbe67e31033d822e079aa8c1ff30/granian-2.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:00ccc8d7284bc7360f310179d0b4d17e5ca3077bbe24427e9e9310df397e3831", size = 6097336, upload-time = "2026-02-24T23:02:55.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/7d/3e0a7f32b0ad5faa1d847c51191391552fa239821c95fc7c022688985df2/granian-2.7.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:675987c1b321dc8af593db8639e00c25277449b32e8c1b2ddd46b35f28d9fac4", size = 7098742, upload-time = "2026-02-24T23:02:57.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/41/3b44386d636ac6467f0f13f45474c71fc3b90a4f0ba8b536de91b2845a09/granian-2.7.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:681c6fbe3354aaa6251e6191ec89f5174ac3b9fbc4b4db606fea456d01969fcb", size = 6430667, upload-time = "2026-02-24T23:02:59.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/70/7b24e187aed3fb7ac2b29d2480a045559a509ef9fec54cffb8694a2d94af/granian-2.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5c9ae65af5e572dca27d8ca0da4c5180b08473ac47e6f5329699e9455a5cc3", size = 6948424, upload-time = "2026-02-24T23:03:01.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/4c/cb74c367f9efb874f2c8433fe9bf3e824f05cf719f2251d40e29e07f08c0/granian-2.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e37fab2be919ceb195db00d7f49ec220444b1ecaa07c03f7c1c874cacff9de83", size = 7000407, upload-time = "2026-02-24T23:03:03.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/98/dfed3966ed7fbd3aae56e123598f90dc206484092b8373d0a71e2d8b82a8/granian-2.7.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:8ec167ab30f5396b5caaff16820a39f4e91986d2fe5bdc02992a03c2b2b2b313", size = 7121626, upload-time = "2026-02-24T23:03:05.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/82/acec732a345cd03b2f6e48ac04b66b7b8b61f5c50eb08d7421fc8c56591a/granian-2.7.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:63f426d793f2116d23be265dd826bec1e623680baf94cc270fe08923113a86ba", size = 7253447, upload-time = "2026-02-24T23:03:06.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/2b/64779e69b08c1ff1bfc09a4ede904ab761ff63f936c275710886057c52f7/granian-2.7.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1617cbb4efe3112f07fb6762cf81d2d9fe4bdb78971d1fd0a310f8b132f6a51e", size = 7053005, upload-time = "2026-02-24T23:03:09.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/c9/83e546d5f6b0447a4b9ee48ce15c29e43bb3f6b5e1040d33ac61fc9e3b6f/granian-2.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:7a4bd347694ace7a48cd784b911f2d519c2a22154e0d1ed59f5b4864914a8cfe", size = 4145886, upload-time = "2026-02-24T23:03:10.829Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/49/9eb88875d709db7e7844e1c681546448dab5ff5651cd1c1d80ac4b1de4e3/granian-2.7.2-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:016c5857c8baedeab7eb065f98417f5ea26bb72b0f7e0544fe76071efc5ab255", size = 6401748, upload-time = "2026-02-24T23:03:12.802Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/80/85726ad9999ed89cb6a32f7f57eb50ce7261459d9c30c3b194ae4c5aa2c5/granian-2.7.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dcbe01fa141adf3f90964e86a959e250754aa7c6dad8fa7a855e6fd382de4c13", size = 6101265, upload-time = "2026-02-24T23:03:14.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/82/0df56a42b9f4c327d0e0b052f43369127e1b565b9e66bf2c9488f1c8d759/granian-2.7.2-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:283ba23817a685784b66f45423d2f25715fdc076c8ffb43c49a807ee56a0ffc0", size = 6249488, upload-time = "2026-02-24T23:03:16.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/cc/d83a351560a3d6377672636129c52f06f8393f5831c5ee0f06f274883ea6/granian-2.7.2-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3258419c741897273ce155568b5a9cbacb7700a00516e87119a90f7d520d6783", size = 7104734, upload-time = "2026-02-24T23:03:17.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/d1/539907ee96d0ee2bcceabb4a6a9643b75378d6dfea09b7a9e4fd22cdf977/granian-2.7.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a196125c4837491c139c9cc83541b48c408c92b9cfbbf004fd28717f9c02ad21", size = 6785504, upload-time = "2026-02-24T23:03:19.763Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/bf/4b6f45882f8341e7c6cb824d693deb94c306be6525b483c76fb373d1e749/granian-2.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:746555ac8a2dcd9257bfe7ad58f1d7a60892bc4613df6a7d8f736692b3bb3b88", size = 6902790, upload-time = "2026-02-24T23:03:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/b8/832970d2d4b144b87be39f5b9dfd31fdb17f298dc238a0b2100c95002cf8/granian-2.7.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:5ac1843c6084933a54a07d9dcae643365f1d83aaff3fd4f2676ea301185e4e8b", size = 7082682, upload-time = "2026-02-24T23:03:23.875Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/bc/1521dbf026d1c9d2465cd54e016efd8ff6e1e72eff521071dab20dd61c44/granian-2.7.2-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:3612eb6a3f4351dd2c4df246ed0d21056c0556a6b1ed772dd865310aa55a9ba9", size = 7264742, upload-time = "2026-02-24T23:03:25.562Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/ae/00884ab77045a2f54db90932f9d1ca522201e2a6b2cf2a9b38840db0fd54/granian-2.7.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:34708b145e31b4538e0556704a07454a76d6776c55c5bc3a1335e80ef6b3bae3", size = 7062571, upload-time = "2026-02-24T23:03:27.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/0e/4321e361bccb9681e1045c75e783476de5be7aa47cf05066907530772eba/granian-2.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:841c48608e55daa2fa434392397cc24175abd48bc5bcefa1e4f74b7243e36c72", size = 4098734, upload-time = "2026-02-24T23:03:28.973Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/4a/8ce622f4f7d58e035d121b9957dd5a8929028dc99cfc5d2bf7f2aa28912c/granian-2.7.2-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:592806c28c491f9c1d1501bac706ecf5e72b73969f20f912678d53308786d658", size = 6442041, upload-time = "2026-02-24T23:03:30.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/62/7d36ed38a40a68c2856b6d2a6fedd40833e7f82eb90ba0d03f2d69ffadf5/granian-2.7.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9dcde3968b921654bde999468e97d03031f28668bc1fc145c81d8bedb0fb2a4", size = 6100793, upload-time = "2026-02-24T23:03:32.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/c5/17fea68f4cb280c217cbd65534664722c9c9b0138c2754e20c235d70b5f4/granian-2.7.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d4d78408283ec51f0fb00557856b4593947ad5b48287c04e1c22764a0ac28a5", size = 7119810, upload-time = "2026-02-24T23:03:34.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/76/35e240d107e0f158662652fd61191de4fb0c2c080e3786ca8f16c71547b7/granian-2.7.2-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d28b078e8087f794b83822055f95caf93d83b23f47f4efcd5e2f0f7a5d8a81", size = 6450789, upload-time = "2026-02-24T23:03:36.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/55/a6d08cfecc808149a910e51c57883ab26fad69d922dc2e76fb2d87469e2d/granian-2.7.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ff7a93123ab339ba6cad51cc7141f8880ec47b152ce2491595bb08edda20106", size = 6902672, upload-time = "2026-02-24T23:03:38.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/2e/c86d95f324248fcc5dcaf034c9f688b32f7a488f0b2a4a25e6673776107f/granian-2.7.2-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:a52effb9889f0944f0353afd6ce5a9d9aa83826d44bbf3c8013e978a3d6ef7b7", size = 6964399, upload-time = "2026-02-24T23:03:40.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/4b/44fde33fe10245a3fba76bf843c387fad2d548244345115b9d87e1c40994/granian-2.7.2-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:76c987c3ca78bf7666ab053c3ed7e3af405af91b2e5ce2f1cf92634c1581e238", size = 7034929, upload-time = "2026-02-24T23:03:42.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/76/38d205cb527046241a9ee4f51048bf44101c626ad4d2af16dd9d14dc1db6/granian-2.7.2-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:6590f8092c2bb6614e561ba771f084cbf72ecbc38dbf9849762ac38718085c29", size = 7259609, upload-time = "2026-02-24T23:03:43.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/37/04245c7259e65f1083ce193875c6c44da4c98604d3b00a264a74dd4f042b/granian-2.7.2-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7c1ce9b0c9446b680e9545e7fc95a75f0c53a25dedcf924b1750c3e5ba5bf908", size = 7073161, upload-time = "2026-02-24T23:03:45.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/e4/28097a852d8f93f8e3be2014a81f03aa914b8a2c12ca761fac5ae1344b8b/granian-2.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:a69cafb6518c630c84a9285674d45ea6f7342a6279dc25c6bd933b6fad5c55ab", size = 4121462, upload-time = "2026-02-24T23:03:47.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/07/0e56fb4f178e14b4c1fa1f6f00586ca81761ccbe2d8803f2c12b6b17a7d6/granian-2.7.2-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:a698d9b662d5648c8ae3dc01ad01688e1a8afc3525e431e7cddb841c53e5e291", size = 6415279, upload-time = "2026-02-24T23:03:48.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/bc/3e69305bf34806cd852f4683deec844a2cb9a4d8888d7f172b507f6080a8/granian-2.7.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:17516095b520b3c039ddbe41a6beb2c59d554b668cc229d36d82c93154a799af", size = 6090528, upload-time = "2026-02-24T23:03:50.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/10/7d58a922b44417a6207c0a3230b0841cd7385a36fc518ac15fed16ebf6f7/granian-2.7.2-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96b0fd9eac60f939b3cbe44c8f32a42fdb7c1a1a9e07ca89e7795cdc7a606beb", size = 6252291, upload-time = "2026-02-24T23:03:52.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/56/65776c6d759dcef9cce15bc11bdea2c64fe668088faf35d87916bd88f595/granian-2.7.2-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e50fb13e053384b8bd3823d4967606c6fd89f2b0d20e64de3ae212b85ffdfed2", size = 7106748, upload-time = "2026-02-24T23:03:53.994Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/ee/d9ed836316607401f158ac264a3f770469d1b1edbf119402777a9eff1833/granian-2.7.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bb1ef13125bc05ab2e18869ed311beaeb085a4c4c195d55d0865f5753a4c0b4", size = 6778883, upload-time = "2026-02-24T23:03:55.574Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/46/eabab80e07a14527c336dec6d902329399f3ba2b82dc94b6435651021359/granian-2.7.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b1c77189335070c6ba6b8d158518fde4c50f892753620f0b22a7552ad4347143", size = 6903426, upload-time = "2026-02-24T23:03:57.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/8a/8ce186826066f6d453316229383a5be3b0b8a4130146c21f321ee64fe2cb/granian-2.7.2-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:1777166c3c853eed4440adb3cbbf34bba2b77d595bfc143a5826904a80b22f34", size = 7083877, upload-time = "2026-02-24T23:03:59.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/eb/91ed4646ce1c920ad39db0bcddb6f4755e1823002b14fb026104e3eb8bce/granian-2.7.2-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:0ffac19208ae548f3647c849579b803beaed2b50dfb0f3790ad26daac0033484", size = 7267282, upload-time = "2026-02-24T23:04:01.218Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/2f/58cba479254530ab09132e150e4ab55362f6e875d9e82b6790477843e0aa/granian-2.7.2-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:82f34e78c1297bf5a1b6a5097e30428db98b59fce60a7387977b794855c0c3bc", size = 7054941, upload-time = "2026-02-24T23:04:03.211Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b3/fd13123ac936a4f79f1ba20ad67328a8d09d586262b8f28cc1cfaa555213/granian-2.7.2-cp314-cp314t-win_amd64.whl", hash = "sha256:e8b87d7ada696eec7e9023974665c83cec978cb83c205eae8fe377de20622f25", size = 4101983, upload-time = "2026-02-24T23:04:04.792Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "graphviz"
|
||||
version = "0.21"
|
||||
@@ -1561,63 +1669,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "materia"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "croniter" },
|
||||
{ name = "hcloud" },
|
||||
{ name = "msgspec" },
|
||||
{ name = "niquests" },
|
||||
{ name = "prefect" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "typer" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "pre-commit" },
|
||||
{ name = "pulumi" },
|
||||
{ name = "pulumi-cloudflare" },
|
||||
{ name = "pulumi-hcloud" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "ruff" },
|
||||
{ name = "xlwt" },
|
||||
]
|
||||
exploration = [
|
||||
{ name = "ipykernel" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "croniter", specifier = ">=6.0.0" },
|
||||
{ name = "hcloud", specifier = ">=2.8.0" },
|
||||
{ name = "msgspec", specifier = ">=0.19" },
|
||||
{ name = "niquests", specifier = ">=3.15.2" },
|
||||
{ name = "prefect", specifier = ">=3.6.15" },
|
||||
{ name = "python-dotenv", specifier = ">=1.1.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.2" },
|
||||
{ name = "typer", specifier = ">=0.15.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "pre-commit", specifier = ">=4.1.0" },
|
||||
{ name = "pulumi", specifier = ">=3.202.0" },
|
||||
{ name = "pulumi-cloudflare", specifier = ">=6.10.0" },
|
||||
{ name = "pulumi-hcloud", specifier = ">=1.25.0" },
|
||||
{ name = "pytest", specifier = ">=8.4.2" },
|
||||
{ name = "pytest-cov", specifier = ">=7.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.2" },
|
||||
{ name = "ruff", specifier = ">=0.9.9" },
|
||||
{ name = "xlwt", specifier = ">=1.3.0" },
|
||||
]
|
||||
exploration = [{ name = "ipykernel", specifier = ">=6.29.5" }]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib-inline"
|
||||
version = "0.2.1"
|
||||
@@ -3165,9 +3216,9 @@ lsp = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlmesh-materia"
|
||||
name = "sqlmesh-beanflows"
|
||||
version = "0.1.0"
|
||||
source = { editable = "transform/sqlmesh_materia" }
|
||||
source = { editable = "transform/sqlmesh_beanflows" }
|
||||
dependencies = [
|
||||
{ name = "sqlmesh", extra = ["lsp"] },
|
||||
]
|
||||
|
||||
@@ -17,7 +17,7 @@ dependencies = [
|
||||
"paddle-python-sdk>=1.13.0",
|
||||
"itsdangerous>=2.1.0",
|
||||
"jinja2>=3.1.0",
|
||||
"hypercorn>=0.17.0",
|
||||
"granian>=2.7.2",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
@@ -27,8 +27,8 @@ build-backend = "hatchling.build"
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/beanflows"]
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"hypothesis>=6.100.0",
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.23.0",
|
||||
@@ -36,14 +36,6 @@ dev-dependencies = [
|
||||
"ruff>=0.3.0",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
target-version = "py311"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "UP"]
|
||||
ignore = ["E501"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
|
||||
@@ -23,8 +23,7 @@ def admin_required(f):
|
||||
async def decorated(*args, **kwargs):
|
||||
if "admin" not in (g.get("user") or {}).get("roles", []):
|
||||
await flash("Admin access required.", "error")
|
||||
from quart import redirect as _redirect
|
||||
from quart import url_for as _url_for
|
||||
from quart import redirect as _redirect, url_for as _url_for
|
||||
return _redirect(_url_for("auth.login"))
|
||||
return await f(*args, **kwargs)
|
||||
return decorated
|
||||
|
||||
@@ -105,8 +105,7 @@ def create_app() -> Quart:
|
||||
# Health check
|
||||
@app.route("/health")
|
||||
async def health():
|
||||
from .analytics import _db_path as serving_db_path
|
||||
from .analytics import fetch_analytics
|
||||
from .analytics import _db_path as serving_db_path, fetch_analytics
|
||||
from .core import fetch_one
|
||||
result = {"status": "healthy", "sqlite": "ok", "duckdb": "ok"}
|
||||
try:
|
||||
|
||||
@@ -13,8 +13,7 @@ import os
|
||||
import sys
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from paddle_billing import Client as PaddleClient
|
||||
from paddle_billing import Environment, Options
|
||||
from paddle_billing import Client as PaddleClient, Environment, Options
|
||||
from paddle_billing.Entities.Shared import CurrencyCode, Money, TaxCategory
|
||||
from paddle_billing.Resources.Prices.Operations import CreatePrice
|
||||
from paddle_billing.Resources.Products.Operations import CreateProduct
|
||||
|
||||
@@ -7,7 +7,6 @@ from unittest.mock import AsyncMock, patch
|
||||
|
||||
import aiosqlite
|
||||
import pytest
|
||||
|
||||
from beanflows import core
|
||||
from beanflows.app import create_app
|
||||
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
Unit tests for billing SQL helpers, feature/limit access, and plan determination.
|
||||
"""
|
||||
import pytest
|
||||
from hypothesis import HealthCheck, given
|
||||
from hypothesis import settings as h_settings
|
||||
from hypothesis import strategies as st
|
||||
|
||||
from beanflows.billing.routes import (
|
||||
can_access_feature,
|
||||
get_billing_customer,
|
||||
@@ -18,6 +14,7 @@ from beanflows.billing.routes import (
|
||||
upsert_subscription,
|
||||
)
|
||||
from beanflows.core import config
|
||||
from hypothesis import HealthCheck, given, settings as h_settings, strategies as st
|
||||
|
||||
# ════════════════════════════════════════════════════════════
|
||||
# get_subscription
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
Tests for the billing event hook system.
|
||||
"""
|
||||
import pytest
|
||||
|
||||
from beanflows.billing.routes import _billing_hooks, _fire_hooks, on_billing_event
|
||||
|
||||
|
||||
|
||||
@@ -144,9 +144,8 @@ class TestCancelRoute:
|
||||
# subscription_required decorator
|
||||
# ════════════════════════════════════════════════════════════
|
||||
|
||||
from quart import Blueprint # noqa: E402
|
||||
|
||||
from beanflows.auth.routes import subscription_required # noqa: E402
|
||||
from quart import Blueprint # noqa: E402
|
||||
|
||||
test_bp = Blueprint("test", __name__)
|
||||
|
||||
|
||||
@@ -5,12 +5,9 @@ Covers signature verification, event parsing, subscription lifecycle transitions
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from conftest import make_webhook_payload, sign_payload
|
||||
from hypothesis import HealthCheck, given
|
||||
from hypothesis import settings as h_settings
|
||||
from hypothesis import strategies as st
|
||||
|
||||
from beanflows.billing.routes import get_billing_customer, get_subscription
|
||||
from conftest import make_webhook_payload, sign_payload
|
||||
from hypothesis import HealthCheck, given, settings as h_settings, strategies as st
|
||||
|
||||
WEBHOOK_PATH = "/billing/webhook/paddle"
|
||||
SIG_HEADER = "Paddle-Signature"
|
||||
|
||||
@@ -3,8 +3,6 @@ Tests for role-based access control: role_required decorator, grant/revoke/ensur
|
||||
and admin route protection.
|
||||
"""
|
||||
import pytest
|
||||
from quart import Blueprint
|
||||
|
||||
from beanflows import core
|
||||
from beanflows.auth.routes import (
|
||||
ensure_admin_role,
|
||||
@@ -12,6 +10,7 @@ from beanflows.auth.routes import (
|
||||
revoke_role,
|
||||
role_required,
|
||||
)
|
||||
from quart import Blueprint
|
||||
|
||||
# ════════════════════════════════════════════════════════════
|
||||
# grant_role / revoke_role
|
||||
|
||||
Reference in New Issue
Block a user