Compare commits
11 Commits
2e928de156
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8b86569ff | ||
|
|
42c1309b20 | ||
|
|
c5a218490e | ||
|
|
52bd731fc3 | ||
|
|
e85d0eab63 | ||
|
|
e872ba0204 | ||
|
|
8d1dbace0f | ||
|
|
cddcd4463e | ||
|
|
efb5a165e7 | ||
|
|
d58fa67238 | ||
|
|
66d484955d |
@@ -45,7 +45,7 @@ test:web:
|
|||||||
script:
|
script:
|
||||||
- uv sync --all-packages
|
- uv sync --all-packages
|
||||||
- cd web && uv run pytest tests/ -x -q
|
- cd web && uv run pytest tests/ -x -q
|
||||||
- cd web && uv run ruff check src/ tests/
|
- uv run ruff check .
|
||||||
|
|
||||||
# ── Tag (pull-based deploy) ───────────────────────────────────────────────────
|
# ── Tag (pull-based deploy) ───────────────────────────────────────────────────
|
||||||
# Creates v<N> tag after all tests pass. The on-server supervisor polls for new
|
# Creates v<N> tag after all tests pass. The on-server supervisor polls for new
|
||||||
|
|||||||
@@ -118,11 +118,11 @@ uv add --package new_source extract-core niquests
|
|||||||
- Each tick: git pull (tag-based) → due extractors → SQLMesh → export_serving → web deploy if changed
|
- Each tick: git pull (tag-based) → due extractors → SQLMesh → export_serving → web deploy if changed
|
||||||
- Crash-safe: systemd `Restart=always` + 10-minute backoff on tick failure
|
- Crash-safe: systemd `Restart=always` + 10-minute backoff on tick failure
|
||||||
|
|
||||||
**CI/CD** (`.gitlab/.gitlab-ci.yml`) — pull-based, no SSH:
|
**CI/CD** (`.gitea/workflows/ci.yaml`) — pull-based, no SSH:
|
||||||
- `test` stage: pytest, sqlmesh test, web pytest
|
- `test-cli`, `test-sqlmesh`, `test-web` jobs: pytest, sqlmesh test, web pytest
|
||||||
- `tag` stage: creates `v${CI_PIPELINE_IID}` tag after tests pass (master branch only)
|
- `tag` job: creates `v${github.run_number}` tag after all tests pass (master branch only)
|
||||||
- Supervisor polls for new tags every 60s, checks out latest, runs `uv sync`
|
- Supervisor polls for new tags every 60s, checks out latest, runs `uv sync`
|
||||||
- No SSH keys or deploy credentials in CI — only `CI_JOB_TOKEN` (built-in)
|
- No SSH keys or deploy credentials in CI — only `github.token` (built-in Gitea Actions)
|
||||||
|
|
||||||
**CLI modules** (`src/materia/`):
|
**CLI modules** (`src/materia/`):
|
||||||
- `cli.py` — Typer app with subcommands: pipeline, secrets, version
|
- `cli.py` — Typer app with subcommands: pipeline, secrets, version
|
||||||
|
|||||||
8
Makefile
8
Makefile
@@ -2,7 +2,7 @@ TAILWIND_VERSION := v4.1.18
|
|||||||
TAILWIND := web/bin/tailwindcss
|
TAILWIND := web/bin/tailwindcss
|
||||||
SOPS_DOTENV := sops --input-type dotenv --output-type dotenv
|
SOPS_DOTENV := sops --input-type dotenv --output-type dotenv
|
||||||
|
|
||||||
.PHONY: help dev css-build css-watch \
|
.PHONY: help dev css-build css-watch install-hooks \
|
||||||
secrets-decrypt-dev secrets-decrypt-prod \
|
secrets-decrypt-dev secrets-decrypt-prod \
|
||||||
secrets-edit-dev secrets-edit-prod \
|
secrets-edit-dev secrets-edit-prod \
|
||||||
secrets-encrypt-dev secrets-encrypt-prod \
|
secrets-encrypt-dev secrets-encrypt-prod \
|
||||||
@@ -13,6 +13,7 @@ help:
|
|||||||
@echo " dev Start full dev environment (migrate, seed, app + worker + CSS watcher)"
|
@echo " dev Start full dev environment (migrate, seed, app + worker + CSS watcher)"
|
||||||
@echo " css-build Build + minify Tailwind CSS"
|
@echo " css-build Build + minify Tailwind CSS"
|
||||||
@echo " css-watch Watch + rebuild Tailwind CSS"
|
@echo " css-watch Watch + rebuild Tailwind CSS"
|
||||||
|
@echo " install-hooks Install git pre-commit hook (run once after cloning)"
|
||||||
@echo " secrets-decrypt-dev Decrypt .env.dev.sops → .env"
|
@echo " secrets-decrypt-dev Decrypt .env.dev.sops → .env"
|
||||||
@echo " secrets-decrypt-prod Decrypt .env.prod.sops → .env"
|
@echo " secrets-decrypt-prod Decrypt .env.prod.sops → .env"
|
||||||
@echo " secrets-edit-dev Edit .env.dev.sops in \$$EDITOR"
|
@echo " secrets-edit-dev Edit .env.dev.sops in \$$EDITOR"
|
||||||
@@ -23,6 +24,11 @@ help:
|
|||||||
|
|
||||||
# ── Dev environment ───────────────────────────────────────────────────────────
|
# ── Dev environment ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
install-hooks:
|
||||||
|
cp scripts/hooks/pre-commit .git/hooks/pre-commit
|
||||||
|
chmod +x .git/hooks/pre-commit
|
||||||
|
@echo "✓ pre-commit hook installed"
|
||||||
|
|
||||||
dev:
|
dev:
|
||||||
@./web/scripts/dev_run.sh
|
@./web/scripts/dev_run.sh
|
||||||
|
|
||||||
|
|||||||
@@ -15,8 +15,14 @@ import sys
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import yfinance as yf
|
import yfinance as yf
|
||||||
from extract_core import content_hash, end_run, landing_path, open_state_db, start_run
|
from extract_core import (
|
||||||
from extract_core import write_bytes_atomic
|
content_hash,
|
||||||
|
end_run,
|
||||||
|
landing_path,
|
||||||
|
open_state_db,
|
||||||
|
start_run,
|
||||||
|
write_bytes_atomic,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.INFO,
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
from .normalize import normalize_zipped_csv
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -7,8 +6,16 @@ from io import BytesIO
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import niquests
|
import niquests
|
||||||
from extract_core import end_run, landing_path, normalize_etag, open_state_db, start_run
|
from extract_core import (
|
||||||
from extract_core import write_bytes_atomic
|
end_run,
|
||||||
|
landing_path,
|
||||||
|
normalize_etag,
|
||||||
|
open_state_db,
|
||||||
|
start_run,
|
||||||
|
write_bytes_atomic,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .normalize import normalize_zipped_csv
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.INFO,
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import zipfile
|
|
||||||
import gzip
|
import gzip
|
||||||
from io import BytesIO
|
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import zipfile
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
||||||
def normalize_zipped_csv(buffer: BytesIO)->BytesIO:
|
def normalize_zipped_csv(buffer: BytesIO)->BytesIO:
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ set -euo pipefail
|
|||||||
|
|
||||||
SERVICE_USER="beanflows_service"
|
SERVICE_USER="beanflows_service"
|
||||||
REPO_DIR="/opt/materia"
|
REPO_DIR="/opt/materia"
|
||||||
GITEA_REPO="ssh://git@git.padelnomics.io:2222/deemanone/materia.git"
|
GITEA_REPO="ssh://git@git.padelnomics.io:2222/deemanone/beanflows.git"
|
||||||
UV="/home/${SERVICE_USER}/.local/bin/uv"
|
UV="/home/${SERVICE_USER}/.local/bin/uv"
|
||||||
|
|
||||||
[ "$(id -u)" = "0" ] || { echo "ERROR: Run as root"; exit 1; }
|
[ "$(id -u)" = "0" ] || { echo "ERROR: Run as root"; exit 1; }
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Single-server local-first setup for BeanFlows.coffee on Hetzner NVMe.
|
|||||||
```
|
```
|
||||||
Hetzner Server (NVMe)
|
Hetzner Server (NVMe)
|
||||||
├── beanflows_service (system user, nologin)
|
├── beanflows_service (system user, nologin)
|
||||||
│ ├── ~/.ssh/materia_deploy # ed25519 deploy key for GitLab read access
|
│ ├── ~/.ssh/beanflows_deploy # ed25519 deploy key for Gitea read access
|
||||||
│ └── ~/.config/sops/age/keys.txt # age keypair (auto-discovered by SOPS)
|
│ └── ~/.config/sops/age/keys.txt # age keypair (auto-discovered by SOPS)
|
||||||
├── /opt/materia/ # Git repo (owned by beanflows_service, latest release tag)
|
├── /opt/materia/ # Git repo (owned by beanflows_service, latest release tag)
|
||||||
├── /opt/materia/.env # Decrypted from .env.prod.sops at deploy time
|
├── /opt/materia/.env # Decrypted from .env.prod.sops at deploy time
|
||||||
@@ -37,11 +37,11 @@ bash infra/setup_server.sh
|
|||||||
|
|
||||||
This creates the `beanflows_service` user, data directories, installs all tools (git, curl, age, sops, rclone, uv), generates an ed25519 SSH deploy key and an age keypair (both as the service user). It prints both public keys.
|
This creates the `beanflows_service` user, data directories, installs all tools (git, curl, age, sops, rclone, uv), generates an ed25519 SSH deploy key and an age keypair (both as the service user). It prints both public keys.
|
||||||
|
|
||||||
### 2. Add keys to GitLab and SOPS
|
### 2. Add keys to Gitea and SOPS
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Add the SSH deploy key to GitLab:
|
# Add the SSH deploy key to Gitea:
|
||||||
# → Repository Settings → Deploy Keys → Add key (read-only)
|
# → git.padelnomics.io → beanflows repo → Settings → Deploy Keys → Add key (read-only)
|
||||||
|
|
||||||
# Add the server age public key to .sops.yaml on your workstation,
|
# Add the server age public key to .sops.yaml on your workstation,
|
||||||
# then re-encrypt prod secrets to include the server key:
|
# then re-encrypt prod secrets to include the server key:
|
||||||
@@ -87,8 +87,8 @@ SOPS auto-discovers the service user's age key at `~/.config/sops/age/keys.txt`
|
|||||||
|
|
||||||
No SSH keys or deploy credentials in CI.
|
No SSH keys or deploy credentials in CI.
|
||||||
|
|
||||||
1. CI runs tests (`test:cli`, `test:sqlmesh`, `test:web`)
|
1. CI runs tests (`test-cli`, `test-sqlmesh`, `test-web`)
|
||||||
2. On master, CI creates tag `v${CI_PIPELINE_IID}` using built-in `CI_JOB_TOKEN`
|
2. On master, CI creates tag `v${github.run_number}` using built-in `github.token`
|
||||||
3. Supervisor polls for new tags every 60s
|
3. Supervisor polls for new tags every 60s
|
||||||
4. When a new tag appears: `git checkout --detach <tag>` + `uv sync --all-packages`
|
4. When a new tag appears: `git checkout --detach <tag>` + `uv sync --all-packages`
|
||||||
5. If `web/` files changed: `./web/deploy.sh` (Docker blue/green + health check)
|
5. If `web/` files changed: `./web/deploy.sh` (Docker blue/green + health check)
|
||||||
|
|||||||
@@ -90,11 +90,13 @@ exclude = [
|
|||||||
"notebooks",
|
"notebooks",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
line-length = 100
|
||||||
indent-width = 4
|
indent-width = 4
|
||||||
|
|
||||||
target-version = "py313"
|
target-version = "py313"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
|
select = ["E", "F", "I", "UP"]
|
||||||
|
|
||||||
ignore = [
|
ignore = [
|
||||||
"E501", # line too long (handled by formatter)
|
"E501", # line too long (handled by formatter)
|
||||||
|
|||||||
18
scripts/hooks/pre-commit
Normal file
18
scripts/hooks/pre-commit
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Pre-commit hook: ruff lint + auto-fix.
|
||||||
|
# Install: make install-hooks
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
REPO_ROOT="$(git rev-parse --show-toplevel)"
|
||||||
|
RUFF="$REPO_ROOT/.venv/bin/ruff"
|
||||||
|
|
||||||
|
if [[ ! -x "$RUFF" ]]; then
|
||||||
|
echo "pre-commit: ruff not found at $RUFF — run 'uv sync' first" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "→ ruff check"
|
||||||
|
"$RUFF" check --fix "$REPO_ROOT"
|
||||||
|
|
||||||
|
# Re-stage any files ruff fixed so they land in the commit.
|
||||||
|
git diff --name-only | xargs -r git add
|
||||||
@@ -27,7 +27,7 @@ def test_secrets_test_command(mock_secrets):
|
|||||||
"""Test secrets test command."""
|
"""Test secrets test command."""
|
||||||
result = runner.invoke(app, ["secrets", "test"])
|
result = runner.invoke(app, ["secrets", "test"])
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert "ESC connection successful" in result.stdout
|
assert "SOPS decryption successful" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
def test_secrets_list_command(mock_secrets):
|
def test_secrets_list_command(mock_secrets):
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from unittest.mock import MagicMock
|
|||||||
|
|
||||||
from cftc_cot.normalize import find_csv_inner_filename, normalize_zipped_csv
|
from cftc_cot.normalize import find_csv_inner_filename, normalize_zipped_csv
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# normalize.py
|
# normalize.py
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -121,7 +120,7 @@ def test_extract_cot_year_skips_existing_file(tmp_path, monkeypatch):
|
|||||||
mock_head.headers = {"etag": f'"{etag}"'}
|
mock_head.headers = {"etag": f'"{etag}"'}
|
||||||
mock_session.head.return_value = mock_head
|
mock_session.head.return_value = mock_head
|
||||||
|
|
||||||
result = cot_execute.extract_cot_year(2024, mock_session)
|
result = cot_execute.extract_cot_year(2024, mock_session, cot_execute.COT_URL_FUTURES_ONLY, "cot")
|
||||||
|
|
||||||
assert result == 0
|
assert result == 0
|
||||||
mock_session.get.assert_not_called() # No download should occur
|
mock_session.get.assert_not_called() # No download should occur
|
||||||
@@ -141,7 +140,7 @@ def test_extract_cot_year_returns_false_on_404(tmp_path, monkeypatch):
|
|||||||
mock_head.status_code = 404
|
mock_head.status_code = 404
|
||||||
mock_session.head.return_value = mock_head
|
mock_session.head.return_value = mock_head
|
||||||
|
|
||||||
result = cot_execute.extract_cot_year(2006, mock_session)
|
result = cot_execute.extract_cot_year(2006, mock_session, cot_execute.COT_URL_FUTURES_ONLY, "cot")
|
||||||
|
|
||||||
assert result == 0
|
assert result == 0
|
||||||
mock_session.get.assert_not_called()
|
mock_session.get.assert_not_called()
|
||||||
|
|||||||
@@ -1,14 +1,11 @@
|
|||||||
"""Tests for ICE extraction: format detection, XLS parsing, API client."""
|
"""Tests for ICE extraction: format detection, XLS parsing, API client."""
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import gzip
|
|
||||||
import io
|
import io
|
||||||
import struct
|
from unittest.mock import MagicMock
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import xlwt # noqa: F401 — needed to create XLS fixtures; skip tests if missing
|
import xlwt # noqa: F401 — needed to create XLS fixtures; skip tests if missing
|
||||||
|
|
||||||
from ice_stocks.ice_api import fetch_report_listings, find_latest_report
|
from ice_stocks.ice_api import fetch_report_listings, find_latest_report
|
||||||
from ice_stocks.xls_parse import OLE2_MAGIC, detect_file_format, xls_to_rows
|
from ice_stocks.xls_parse import OLE2_MAGIC, detect_file_format, xls_to_rows
|
||||||
|
|
||||||
@@ -96,7 +93,7 @@ def test_fetch_report_listings_parses_response():
|
|||||||
_make_api_row("Certified Stock Aging Report", "/dl/aging.xls"),
|
_make_api_row("Certified Stock Aging Report", "/dl/aging.xls"),
|
||||||
])
|
])
|
||||||
|
|
||||||
from ice_stocks.ice_api import ICE_BASE_URL, fetch_report_listings
|
from ice_stocks.ice_api import ICE_BASE_URL
|
||||||
rows = fetch_report_listings(mock_session, product_id=2)
|
rows = fetch_report_listings(mock_session, product_id=2)
|
||||||
|
|
||||||
assert len(rows) == 2
|
assert len(rows) == 2
|
||||||
@@ -113,7 +110,6 @@ def test_fetch_report_listings_prepends_base_url_for_absolute():
|
|||||||
_make_api_row("Test", "https://other.example.com/file.xls"),
|
_make_api_row("Test", "https://other.example.com/file.xls"),
|
||||||
])
|
])
|
||||||
|
|
||||||
from ice_stocks.ice_api import fetch_report_listings
|
|
||||||
rows = fetch_report_listings(mock_session, product_id=2)
|
rows = fetch_report_listings(mock_session, product_id=2)
|
||||||
assert rows[0]["download_url"] == "https://other.example.com/file.xls"
|
assert rows[0]["download_url"] == "https://other.example.com/file.xls"
|
||||||
|
|
||||||
|
|||||||
@@ -36,14 +36,6 @@ dev-dependencies = [
|
|||||||
"ruff>=0.3.0",
|
"ruff>=0.3.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
line-length = 100
|
|
||||||
target-version = "py311"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
select = ["E", "F", "I", "UP"]
|
|
||||||
ignore = ["E501"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
asyncio_mode = "auto"
|
asyncio_mode = "auto"
|
||||||
testpaths = ["tests"]
|
testpaths = ["tests"]
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ def fetch_country_data_from_duckdb() -> list[dict]:
|
|||||||
)
|
)
|
||||||
SELECT * FROM ranked LIMIT 30
|
SELECT * FROM ranked LIMIT 30
|
||||||
""").fetchall()
|
""").fetchall()
|
||||||
cols = [d[0] for d in conn.execute("""
|
_ = [d[0] for d in conn.execute("""
|
||||||
WITH latest AS (SELECT MAX(market_year) AS max_year FROM serving.commodity_metrics
|
WITH latest AS (SELECT MAX(market_year) AS max_year FROM serving.commodity_metrics
|
||||||
WHERE commodity_code = 711100 AND country_code IS NOT NULL)
|
WHERE commodity_code = 711100 AND country_code IS NOT NULL)
|
||||||
SELECT country_name, country_code, market_year, production * 1000,
|
SELECT country_name, country_code, market_year, production * 1000,
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from quart import Blueprint, flash, g, redirect, render_template, request, url_for
|
from quart import Blueprint, flash, g, redirect, render_template, request, url_for
|
||||||
|
|
||||||
from ..core import execute, fetch_all, fetch_one, csrf_protect
|
from ..core import csrf_protect, execute, fetch_all, fetch_one
|
||||||
|
|
||||||
bp = Blueprint(
|
bp = Blueprint(
|
||||||
"cms",
|
"cms",
|
||||||
@@ -71,7 +71,7 @@ async def list_template_data(template_id: int) -> list[dict]:
|
|||||||
|
|
||||||
async def generate_article_from_data(data_row: dict, tmpl: dict) -> int | None:
|
async def generate_article_from_data(data_row: dict, tmpl: dict) -> int | None:
|
||||||
"""Generate (or regenerate) a single article from a template_data row."""
|
"""Generate (or regenerate) a single article from a template_data row."""
|
||||||
from jinja2 import Environment, BaseLoader
|
from jinja2 import BaseLoader, Environment
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(data_row["data_json"])
|
data = json.loads(data_row["data_json"])
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from quart import Blueprint, flash, g, redirect, render_template, request, session, url_for
|
from quart import Blueprint, flash, g, redirect, render_template, request, session, url_for
|
||||||
|
|
||||||
from ..core import config, csrf_protect, execute, fetch_all, fetch_one
|
from ..core import csrf_protect, execute, fetch_all, fetch_one
|
||||||
|
|
||||||
# Blueprint with its own template folder
|
# Blueprint with its own template folder
|
||||||
bp = Blueprint(
|
bp = Blueprint(
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ ALLOWED_METRICS = frozenset({
|
|||||||
|
|
||||||
_local = threading.local()
|
_local = threading.local()
|
||||||
_db_path: str = ""
|
_db_path: str = ""
|
||||||
|
_conn: duckdb.DuckDBPyConnection | None = None # test override: set to bypass _db_path / _local
|
||||||
|
|
||||||
|
|
||||||
def open_analytics_db() -> None:
|
def open_analytics_db() -> None:
|
||||||
@@ -110,12 +111,15 @@ async def fetch_analytics(sql: str, params: list | None = None) -> list[dict]:
|
|||||||
"""Run a read-only DuckDB query off the event loop. Returns list of dicts.
|
"""Run a read-only DuckDB query off the event loop. Returns list of dicts.
|
||||||
Returns empty list if analytics DB is not configured (SERVING_DUCKDB_PATH unset
|
Returns empty list if analytics DB is not configured (SERVING_DUCKDB_PATH unset
|
||||||
or file missing at startup) — dashboard routes degrade gracefully.
|
or file missing at startup) — dashboard routes degrade gracefully.
|
||||||
|
|
||||||
|
If the module-level _conn is set (test override), it is used directly in place
|
||||||
|
of the per-thread _get_conn() path.
|
||||||
"""
|
"""
|
||||||
if not _db_path:
|
if _conn is None and not _db_path:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _query():
|
def _query():
|
||||||
conn = _get_conn()
|
conn = _conn if _conn is not None else _get_conn()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
result = cursor.execute(sql, params or [])
|
result = cursor.execute(sql, params or [])
|
||||||
columns = [desc[0] for desc in result.description]
|
columns = [desc[0] for desc in result.description]
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ async def list_commodities():
|
|||||||
@api_key_required(scopes=["read"])
|
@api_key_required(scopes=["read"])
|
||||||
async def commodity_metrics(code: int):
|
async def commodity_metrics(code: int):
|
||||||
"""Time series metrics for a commodity. Query params: metrics, start_year, end_year."""
|
"""Time series metrics for a commodity. Query params: metrics, start_year, end_year."""
|
||||||
raw_metrics = request.args.getlist("metrics") or ["Production", "Exports", "Imports", "Ending_Stocks"]
|
raw_metrics = request.args.getlist("metrics") or ["production", "exports", "imports", "ending_stocks"]
|
||||||
metrics = [m for m in raw_metrics if m in analytics.ALLOWED_METRICS]
|
metrics = [m for m in raw_metrics if m in analytics.ALLOWED_METRICS]
|
||||||
if not metrics:
|
if not metrics:
|
||||||
return jsonify({"error": f"No valid metrics. Allowed: {sorted(analytics.ALLOWED_METRICS)}"}), 400
|
return jsonify({"error": f"No valid metrics. Allowed: {sorted(analytics.ALLOWED_METRICS)}"}), 400
|
||||||
@@ -152,7 +152,7 @@ async def commodity_metrics(code: int):
|
|||||||
@api_key_required(scopes=["read"])
|
@api_key_required(scopes=["read"])
|
||||||
async def commodity_countries(code: int):
|
async def commodity_countries(code: int):
|
||||||
"""Country ranking for a commodity. Query params: metric, limit."""
|
"""Country ranking for a commodity. Query params: metric, limit."""
|
||||||
metric = request.args.get("metric", "Production")
|
metric = request.args.get("metric", "production")
|
||||||
if metric not in analytics.ALLOWED_METRICS:
|
if metric not in analytics.ALLOWED_METRICS:
|
||||||
return jsonify({"error": f"Invalid metric. Allowed: {sorted(analytics.ALLOWED_METRICS)}"}), 400
|
return jsonify({"error": f"Invalid metric. Allowed: {sorted(analytics.ALLOWED_METRICS)}"}), 400
|
||||||
|
|
||||||
@@ -368,7 +368,7 @@ async def commodity_metrics_csv(code: int):
|
|||||||
return jsonify({"error": "CSV export requires a Starter or Pro plan"}), 403
|
return jsonify({"error": "CSV export requires a Starter or Pro plan"}), 403
|
||||||
|
|
||||||
raw_metrics = request.args.getlist("metrics") or [
|
raw_metrics = request.args.getlist("metrics") or [
|
||||||
"Production", "Exports", "Imports", "Ending_Stocks", "Total_Distribution",
|
"production", "exports", "imports", "ending_stocks", "total_distribution",
|
||||||
]
|
]
|
||||||
metrics = [m for m in raw_metrics if m in analytics.ALLOWED_METRICS]
|
metrics = [m for m in raw_metrics if m in analytics.ALLOWED_METRICS]
|
||||||
if not metrics:
|
if not metrics:
|
||||||
|
|||||||
@@ -105,8 +105,7 @@ def create_app() -> Quart:
|
|||||||
# Health check
|
# Health check
|
||||||
@app.route("/health")
|
@app.route("/health")
|
||||||
async def health():
|
async def health():
|
||||||
from .analytics import _db_path as serving_db_path
|
from .analytics import _db_path as serving_db_path, fetch_analytics
|
||||||
from .analytics import fetch_analytics
|
|
||||||
from .core import fetch_one
|
from .core import fetch_one
|
||||||
result = {"status": "healthy", "sqlite": "ok", "duckdb": "ok"}
|
result = {"status": "healthy", "sqlite": "ok", "duckdb": "ok"}
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -2,13 +2,20 @@
|
|||||||
Auth domain: magic link authentication, user management, decorators.
|
Auth domain: magic link authentication, user management, decorators.
|
||||||
"""
|
"""
|
||||||
import secrets
|
import secrets
|
||||||
from functools import wraps
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from functools import wraps
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from quart import Blueprint, render_template, request, redirect, url_for, session, flash, g
|
from quart import Blueprint, flash, g, redirect, render_template, request, session, url_for
|
||||||
|
|
||||||
from ..core import config, fetch_one, fetch_all, execute, csrf_protect, waitlist_gate, capture_waitlist_email
|
from ..core import (
|
||||||
|
capture_waitlist_email,
|
||||||
|
config,
|
||||||
|
csrf_protect,
|
||||||
|
execute,
|
||||||
|
fetch_one,
|
||||||
|
waitlist_gate,
|
||||||
|
)
|
||||||
|
|
||||||
# Blueprint with its own template folder
|
# Blueprint with its own template folder
|
||||||
bp = Blueprint(
|
bp = Blueprint(
|
||||||
|
|||||||
@@ -4,21 +4,49 @@ Payment provider: paddle
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from quart import Blueprint, render_template, request, redirect, url_for, flash, g, jsonify, session
|
from quart import Blueprint, flash, g, jsonify, redirect, render_template, request, session, url_for
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
from ..core import config, fetch_one, fetch_all, execute
|
|
||||||
|
|
||||||
from ..core import verify_hmac_signature
|
|
||||||
|
|
||||||
from ..auth.routes import login_required
|
from ..auth.routes import login_required
|
||||||
|
from ..core import config, execute, fetch_one
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Billing event hook system
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
_billing_hooks: dict[str, list] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def on_billing_event(*event_types: str):
|
||||||
|
"""Decorator: register a handler for one or more billing event types."""
|
||||||
|
def decorator(func):
|
||||||
|
for event_type in event_types:
|
||||||
|
_billing_hooks.setdefault(event_type, []).append(func)
|
||||||
|
return func
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
async def _fire_hooks(event_type: str, data: dict) -> None:
|
||||||
|
"""Fire all registered hooks for an event type, isolating per-hook failures."""
|
||||||
|
for hook in _billing_hooks.get(event_type, []):
|
||||||
|
try:
|
||||||
|
await hook(event_type, data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Hook %s failed for event %s: %s", hook.__name__, event_type, e)
|
||||||
|
|
||||||
|
|
||||||
|
def _paddle_client():
|
||||||
|
"""Return a configured Paddle SDK client."""
|
||||||
|
from paddle_billing import Client, Environment, Options
|
||||||
|
env = Environment.SANDBOX if config.PADDLE_ENVIRONMENT == "sandbox" else Environment.PRODUCTION
|
||||||
|
return Client(config.PADDLE_API_KEY, options=Options(environment=env))
|
||||||
|
|
||||||
|
|
||||||
# Blueprint with its own template folder
|
# Blueprint with its own template folder
|
||||||
@@ -195,31 +223,21 @@ async def success():
|
|||||||
@bp.route("/checkout/<plan>", methods=["POST"])
|
@bp.route("/checkout/<plan>", methods=["POST"])
|
||||||
@login_required
|
@login_required
|
||||||
async def checkout(plan: str):
|
async def checkout(plan: str):
|
||||||
"""Create Paddle checkout via API."""
|
"""Create Paddle checkout via SDK."""
|
||||||
price_id = config.PADDLE_PRICES.get(plan)
|
price_id = config.PADDLE_PRICES.get(plan)
|
||||||
if not price_id:
|
if not price_id:
|
||||||
await flash("Invalid plan selected.", "error")
|
await flash("Invalid plan selected.", "error")
|
||||||
return redirect(url_for("billing.pricing"))
|
return redirect(url_for("billing.pricing"))
|
||||||
|
|
||||||
async with httpx.AsyncClient() as client:
|
from paddle_billing.Resources.Transactions.Operations import CreateTransaction
|
||||||
response = await client.post(
|
txn = _paddle_client().transactions.create(
|
||||||
"https://api.paddle.com/transactions",
|
CreateTransaction(
|
||||||
headers={
|
items=[{"price_id": price_id, "quantity": 1}],
|
||||||
"Authorization": f"Bearer {config.PADDLE_API_KEY}",
|
custom_data={"user_id": str(g.user["id"]), "plan": plan},
|
||||||
"Content-Type": "application/json",
|
checkout={"url": f"{config.BASE_URL}/billing/success"},
|
||||||
},
|
|
||||||
json={
|
|
||||||
"items": [{"price_id": price_id, "quantity": 1}],
|
|
||||||
"custom_data": {"user_id": str(g.user["id"]), "plan": plan},
|
|
||||||
"checkout": {
|
|
||||||
"url": f"{config.BASE_URL}/billing/success",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
)
|
||||||
|
return redirect(txn.checkout.url)
|
||||||
checkout_url = response.json()["data"]["checkout"]["url"]
|
|
||||||
return redirect(checkout_url)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/manage", methods=["POST"])
|
@bp.route("/manage", methods=["POST"])
|
||||||
@@ -232,13 +250,8 @@ async def manage():
|
|||||||
return redirect(url_for("dashboard.settings"))
|
return redirect(url_for("dashboard.settings"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with httpx.AsyncClient() as client:
|
subscription = _paddle_client().subscriptions.get(sub["provider_subscription_id"])
|
||||||
response = await client.get(
|
portal_url = subscription.management_urls.update_payment_method
|
||||||
f"https://api.paddle.com/subscriptions/{sub['provider_subscription_id']}",
|
|
||||||
headers={"Authorization": f"Bearer {config.PADDLE_API_KEY}"},
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
portal_url = response.json()["data"]["management_urls"]["update_payment_method"]
|
|
||||||
except Exception:
|
except Exception:
|
||||||
await flash("Could not reach the billing portal. Please try again or contact support.", "error")
|
await flash("Could not reach the billing portal. Please try again or contact support.", "error")
|
||||||
return redirect(url_for("dashboard.settings"))
|
return redirect(url_for("dashboard.settings"))
|
||||||
@@ -249,28 +262,27 @@ async def manage():
|
|||||||
@bp.route("/cancel", methods=["POST"])
|
@bp.route("/cancel", methods=["POST"])
|
||||||
@login_required
|
@login_required
|
||||||
async def cancel():
|
async def cancel():
|
||||||
"""Cancel subscription via Paddle API."""
|
"""Cancel subscription via Paddle SDK."""
|
||||||
sub = await get_subscription(g.user["id"])
|
sub = await get_subscription(g.user["id"])
|
||||||
if sub and sub.get("provider_subscription_id"):
|
if sub and sub.get("provider_subscription_id"):
|
||||||
async with httpx.AsyncClient() as client:
|
from paddle_billing.Resources.Subscriptions.Operations import CancelSubscription
|
||||||
await client.post(
|
_paddle_client().subscriptions.cancel(
|
||||||
f"https://api.paddle.com/subscriptions/{sub['provider_subscription_id']}/cancel",
|
sub["provider_subscription_id"],
|
||||||
headers={
|
CancelSubscription(effective_from="next_billing_period"),
|
||||||
"Authorization": f"Bearer {config.PADDLE_API_KEY}",
|
)
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
json={"effective_from": "next_billing_period"},
|
|
||||||
)
|
|
||||||
return redirect(url_for("dashboard.settings"))
|
return redirect(url_for("dashboard.settings"))
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/webhook/paddle", methods=["POST"])
|
@bp.route("/webhook/paddle", methods=["POST"])
|
||||||
async def webhook():
|
async def webhook():
|
||||||
"""Handle Paddle webhooks."""
|
"""Handle Paddle webhooks."""
|
||||||
|
import paddle_billing
|
||||||
payload = await request.get_data()
|
payload = await request.get_data()
|
||||||
sig = request.headers.get("Paddle-Signature", "")
|
sig = request.headers.get("Paddle-Signature", "")
|
||||||
|
|
||||||
if not verify_hmac_signature(payload, sig, config.PADDLE_WEBHOOK_SECRET):
|
try:
|
||||||
|
paddle_billing.Notifications.Verifier().verify(payload, config.PADDLE_WEBHOOK_SECRET, sig)
|
||||||
|
except Exception:
|
||||||
return jsonify({"error": "Invalid signature"}), 400
|
return jsonify({"error": "Invalid signature"}), 400
|
||||||
|
|
||||||
event = json.loads(payload)
|
event = json.loads(payload)
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ async def _serve_article(url_path: str):
|
|||||||
body_html = ""
|
body_html = ""
|
||||||
if article.get("body_template") and article.get("data_json"):
|
if article.get("body_template") and article.get("data_json"):
|
||||||
try:
|
try:
|
||||||
from jinja2 import Environment, BaseLoader
|
from jinja2 import BaseLoader, Environment
|
||||||
data = json.loads(article["data_json"])
|
data = json.loads(article["data_json"])
|
||||||
env = Environment(loader=BaseLoader())
|
env = Environment(loader=BaseLoader())
|
||||||
body_html = env.from_string(article["body_template"]).render(**data)
|
body_html = env.from_string(article["body_template"]).render(**data)
|
||||||
|
|||||||
@@ -77,22 +77,15 @@ class Config:
|
|||||||
RESEND_AUDIENCE_WAITLIST: str = os.getenv("RESEND_AUDIENCE_WAITLIST", "")
|
RESEND_AUDIENCE_WAITLIST: str = os.getenv("RESEND_AUDIENCE_WAITLIST", "")
|
||||||
|
|
||||||
PLAN_FEATURES: dict = {
|
PLAN_FEATURES: dict = {
|
||||||
"free": ["dashboard", "coffee_only", "limited_history"],
|
"free": ["basic"],
|
||||||
"starter": ["dashboard", "coffee_only", "full_history", "export", "api"],
|
"starter": ["basic", "export"],
|
||||||
"pro": [
|
"pro": ["basic", "export", "api", "priority_support"],
|
||||||
"dashboard",
|
|
||||||
"all_commodities",
|
|
||||||
"full_history",
|
|
||||||
"export",
|
|
||||||
"api",
|
|
||||||
"priority_support",
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PLAN_LIMITS: dict = {
|
PLAN_LIMITS: dict = {
|
||||||
"free": {"commodities": 1, "history_years": 5, "api_calls": 0},
|
"free": {"items": 100, "api_calls": 0},
|
||||||
"starter": {"commodities": 1, "history_years": -1, "api_calls": 10000},
|
"starter": {"items": 1000, "api_calls": 10000},
|
||||||
"pro": {"commodities": -1, "history_years": -1, "api_calls": -1}, # -1 = unlimited
|
"pro": {"items": -1, "api_calls": -1}, # -1 = unlimited
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ async def index():
|
|||||||
user = g.user
|
user = g.user
|
||||||
plan = (g.get("subscription") or {}).get("plan", "free")
|
plan = (g.get("subscription") or {}).get("plan", "free")
|
||||||
|
|
||||||
if analytics._db_path:
|
if analytics._db_path or analytics._conn is not None:
|
||||||
results = await asyncio.gather(
|
results = await asyncio.gather(
|
||||||
analytics.get_price_latest(analytics.COFFEE_TICKER),
|
analytics.get_price_latest(analytics.COFFEE_TICKER),
|
||||||
analytics.get_cot_positioning_latest(analytics.COFFEE_CFTC_CODE),
|
analytics.get_cot_positioning_latest(analytics.COFFEE_CFTC_CODE),
|
||||||
@@ -208,7 +208,7 @@ async def supply():
|
|||||||
current_year = datetime.date.today().year
|
current_year = datetime.date.today().year
|
||||||
start_year = current_year - rng["years"]
|
start_year = current_year - rng["years"]
|
||||||
|
|
||||||
if analytics._db_path:
|
if analytics._db_path or analytics._conn is not None:
|
||||||
results = await asyncio.gather(
|
results = await asyncio.gather(
|
||||||
analytics.get_global_time_series(
|
analytics.get_global_time_series(
|
||||||
analytics.COFFEE_COMMODITY_CODE,
|
analytics.COFFEE_COMMODITY_CODE,
|
||||||
@@ -267,7 +267,7 @@ async def positioning():
|
|||||||
cot_weeks = rng["weeks"]
|
cot_weeks = rng["weeks"]
|
||||||
|
|
||||||
options_delta = None
|
options_delta = None
|
||||||
if analytics._db_path:
|
if analytics._db_path or analytics._conn is not None:
|
||||||
gather_coros = [
|
gather_coros = [
|
||||||
analytics.get_price_latest(analytics.COFFEE_TICKER),
|
analytics.get_price_latest(analytics.COFFEE_TICKER),
|
||||||
analytics.get_price_time_series(analytics.COFFEE_TICKER, limit=price_limit),
|
analytics.get_price_time_series(analytics.COFFEE_TICKER, limit=price_limit),
|
||||||
@@ -322,7 +322,7 @@ async def warehouse():
|
|||||||
stocks_latest = stocks_trend = aging_latest = byport_latest = byport_trend = None
|
stocks_latest = stocks_trend = aging_latest = byport_latest = byport_trend = None
|
||||||
stocks_trend = aging_latest = byport_trend = []
|
stocks_trend = aging_latest = byport_trend = []
|
||||||
|
|
||||||
if analytics._db_path:
|
if analytics._db_path or analytics._conn is not None:
|
||||||
if view == "stocks":
|
if view == "stocks":
|
||||||
results = await asyncio.gather(
|
results = await asyncio.gather(
|
||||||
analytics.get_ice_stocks_latest(),
|
analytics.get_ice_stocks_latest(),
|
||||||
@@ -416,7 +416,7 @@ async def weather():
|
|||||||
rng = RANGE_MAP[range_key]
|
rng = RANGE_MAP[range_key]
|
||||||
days = rng["days"]
|
days = rng["days"]
|
||||||
|
|
||||||
if analytics._db_path:
|
if analytics._db_path or analytics._conn is not None:
|
||||||
if location_id:
|
if location_id:
|
||||||
results = await asyncio.gather(
|
results = await asyncio.gather(
|
||||||
analytics.get_weather_stress_latest(),
|
analytics.get_weather_stress_latest(),
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
"""
|
"""
|
||||||
Public domain: landing page, marketing pages, legal pages, feedback, sitemap.
|
Public domain: landing page, marketing pages, legal pages, feedback, sitemap.
|
||||||
"""
|
"""
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from quart import Blueprint, render_template, request, g, make_response
|
from quart import Blueprint, g, make_response, render_template, request
|
||||||
|
|
||||||
from ..core import config, execute, fetch_all, check_rate_limit, csrf_protect
|
from ..core import check_rate_limit, config, csrf_protect, execute, fetch_all
|
||||||
|
|
||||||
# Blueprint with its own template folder
|
# Blueprint with its own template folder
|
||||||
bp = Blueprint(
|
bp = Blueprint(
|
||||||
|
|||||||
@@ -13,8 +13,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from paddle_billing import Client as PaddleClient
|
from paddle_billing import Client as PaddleClient, Environment, Options
|
||||||
from paddle_billing import Environment, Options
|
|
||||||
from paddle_billing.Entities.Shared import CurrencyCode, Money, TaxCategory
|
from paddle_billing.Entities.Shared import CurrencyCode, Money, TaxCategory
|
||||||
from paddle_billing.Resources.Prices.Operations import CreatePrice
|
from paddle_billing.Resources.Prices.Operations import CreatePrice
|
||||||
from paddle_billing.Resources.Products.Operations import CreateProduct
|
from paddle_billing.Resources.Products.Operations import CreateProduct
|
||||||
|
|||||||
@@ -6,8 +6,7 @@ import json
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from .core import config, init_db, fetch_one, fetch_all, execute, send_email
|
from .core import config, execute, fetch_all, init_db, send_email
|
||||||
|
|
||||||
|
|
||||||
# Task handlers registry
|
# Task handlers registry
|
||||||
HANDLERS: dict[str, callable] = {}
|
HANDLERS: dict[str, callable] = {}
|
||||||
|
|||||||
@@ -1,20 +1,15 @@
|
|||||||
"""
|
"""
|
||||||
Shared test fixtures for the BeanFlows test suite.
|
Shared test fixtures for the BeanFlows test suite.
|
||||||
"""
|
"""
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
import aiosqlite
|
import aiosqlite
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
from beanflows import core
|
from beanflows import core
|
||||||
from beanflows.app import create_app
|
from beanflows.app import create_app
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_PATH = Path(__file__).parent.parent / "src" / "beanflows" / "migrations" / "schema.sql"
|
SCHEMA_PATH = Path(__file__).parent.parent / "src" / "beanflows" / "migrations" / "schema.sql"
|
||||||
|
|
||||||
|
|
||||||
@@ -287,12 +282,20 @@ def mock_analytics(monkeypatch):
|
|||||||
"market_year": 2025, "production": 30000.0, "production_yoy_pct": -1.2},
|
"market_year": 2025, "production": 30000.0, "production_yoy_pct": -1.2},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
_commodities = [
|
||||||
|
{"commodity_code": 711100, "commodity_name": "Coffee, Green"},
|
||||||
|
{"commodity_code": 711200, "commodity_name": "Coffee, Roasted"},
|
||||||
|
]
|
||||||
|
|
||||||
async def _ts(*a, **kw): return _time_series
|
async def _ts(*a, **kw): return _time_series
|
||||||
async def _top(*a, **kw): return _top_producers
|
async def _top(*a, **kw): return _top_producers
|
||||||
async def _stu(*a, **kw): return _stu_trend
|
async def _stu(*a, **kw): return _stu_trend
|
||||||
async def _bal(*a, **kw): return _balance
|
async def _bal(*a, **kw): return _balance
|
||||||
async def _yoy(*a, **kw): return _yoy_data
|
async def _yoy(*a, **kw): return _yoy_data
|
||||||
async def _cmp(*a, **kw): return []
|
async def _cmp(*a, **kw): return []
|
||||||
|
async def _com(*a, **kw): return _commodities
|
||||||
|
async def _none(*a, **kw): return None
|
||||||
|
async def _empty(*a, **kw): return []
|
||||||
|
|
||||||
monkeypatch.setattr(analytics, "get_global_time_series", _ts)
|
monkeypatch.setattr(analytics, "get_global_time_series", _ts)
|
||||||
monkeypatch.setattr(analytics, "get_top_countries", _top)
|
monkeypatch.setattr(analytics, "get_top_countries", _top)
|
||||||
@@ -300,5 +303,15 @@ def mock_analytics(monkeypatch):
|
|||||||
monkeypatch.setattr(analytics, "get_supply_demand_balance", _bal)
|
monkeypatch.setattr(analytics, "get_supply_demand_balance", _bal)
|
||||||
monkeypatch.setattr(analytics, "get_production_yoy_by_country", _yoy)
|
monkeypatch.setattr(analytics, "get_production_yoy_by_country", _yoy)
|
||||||
monkeypatch.setattr(analytics, "get_country_comparison", _cmp)
|
monkeypatch.setattr(analytics, "get_country_comparison", _cmp)
|
||||||
|
monkeypatch.setattr(analytics, "get_available_commodities", _com)
|
||||||
|
# Pulse-page analytics
|
||||||
|
monkeypatch.setattr(analytics, "get_price_latest", _none)
|
||||||
|
monkeypatch.setattr(analytics, "get_price_time_series", _empty)
|
||||||
|
monkeypatch.setattr(analytics, "get_cot_positioning_latest", _none)
|
||||||
|
monkeypatch.setattr(analytics, "get_cot_index_trend", _empty)
|
||||||
|
monkeypatch.setattr(analytics, "get_ice_stocks_latest", _none)
|
||||||
|
monkeypatch.setattr(analytics, "get_ice_stocks_trend", _empty)
|
||||||
|
monkeypatch.setattr(analytics, "get_weather_stress_latest", _none)
|
||||||
|
monkeypatch.setattr(analytics, "get_weather_stress_trend", _empty)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ import asyncio
|
|||||||
import duckdb
|
import duckdb
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
# ── Fixtures ────────────────────────────────────────────────────────────────
|
# ── Fixtures ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -70,13 +70,13 @@ async def test_commodity_metrics(client, db, test_user, mock_analytics):
|
|||||||
"""GET /commodities/<code>/metrics returns time series."""
|
"""GET /commodities/<code>/metrics returns time series."""
|
||||||
raw_key = await _create_api_key_for_user(db, test_user["id"])
|
raw_key = await _create_api_key_for_user(db, test_user["id"])
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
"/api/v1/commodities/711100/metrics?metrics=Production&metrics=Exports",
|
"/api/v1/commodities/711100/metrics?metrics=production&metrics=exports",
|
||||||
headers={"Authorization": f"Bearer {raw_key}"},
|
headers={"Authorization": f"Bearer {raw_key}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = await response.get_json()
|
data = await response.get_json()
|
||||||
assert data["commodity_code"] == 711100
|
assert data["commodity_code"] == 711100
|
||||||
assert "Production" in data["metrics"]
|
assert "production" in data["metrics"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -95,12 +95,12 @@ async def test_commodity_countries(client, db, test_user, mock_analytics):
|
|||||||
"""GET /commodities/<code>/countries returns ranking."""
|
"""GET /commodities/<code>/countries returns ranking."""
|
||||||
raw_key = await _create_api_key_for_user(db, test_user["id"])
|
raw_key = await _create_api_key_for_user(db, test_user["id"])
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
"/api/v1/commodities/711100/countries?metric=Production&limit=5",
|
"/api/v1/commodities/711100/countries?metric=production&limit=5",
|
||||||
headers={"Authorization": f"Bearer {raw_key}"},
|
headers={"Authorization": f"Bearer {raw_key}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = await response.get_json()
|
data = await response.get_json()
|
||||||
assert data["metric"] == "Production"
|
assert data["metric"] == "production"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|||||||
@@ -2,12 +2,7 @@
|
|||||||
Unit tests for billing SQL helpers, feature/limit access, and plan determination.
|
Unit tests for billing SQL helpers, feature/limit access, and plan determination.
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
from hypothesis import HealthCheck, given
|
|
||||||
from hypothesis import settings as h_settings
|
|
||||||
from hypothesis import strategies as st
|
|
||||||
|
|
||||||
from beanflows.billing.routes import (
|
from beanflows.billing.routes import (
|
||||||
|
|
||||||
can_access_feature,
|
can_access_feature,
|
||||||
get_billing_customer,
|
get_billing_customer,
|
||||||
get_subscription,
|
get_subscription,
|
||||||
@@ -19,6 +14,7 @@ from beanflows.billing.routes import (
|
|||||||
upsert_subscription,
|
upsert_subscription,
|
||||||
)
|
)
|
||||||
from beanflows.core import config
|
from beanflows.core import config
|
||||||
|
from hypothesis import HealthCheck, given, settings as h_settings, strategies as st
|
||||||
|
|
||||||
# ════════════════════════════════════════════════════════════
|
# ════════════════════════════════════════════════════════════
|
||||||
# get_subscription
|
# get_subscription
|
||||||
@@ -66,7 +62,7 @@ class TestUpsertSubscription:
|
|||||||
status="active",
|
status="active",
|
||||||
provider_subscription_id="sub_same",
|
provider_subscription_id="sub_same",
|
||||||
)
|
)
|
||||||
returned_id = await upsert_subscription(
|
await upsert_subscription(
|
||||||
user_id=test_user["id"],
|
user_id=test_user["id"],
|
||||||
plan="pro",
|
plan="pro",
|
||||||
status="active",
|
status="active",
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
Tests for the billing event hook system.
|
Tests for the billing event hook system.
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from beanflows.billing.routes import _billing_hooks, _fire_hooks, on_billing_event
|
from beanflows.billing.routes import _billing_hooks, _fire_hooks, on_billing_event
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from unittest.mock import MagicMock
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
CHECKOUT_METHOD = "POST"
|
CHECKOUT_METHOD = "POST"
|
||||||
CHECKOUT_PLAN = "starter"
|
CHECKOUT_PLAN = "starter"
|
||||||
|
|
||||||
@@ -145,9 +144,8 @@ class TestCancelRoute:
|
|||||||
# subscription_required decorator
|
# subscription_required decorator
|
||||||
# ════════════════════════════════════════════════════════════
|
# ════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
from quart import Blueprint # noqa: E402
|
|
||||||
|
|
||||||
from beanflows.auth.routes import subscription_required # noqa: E402
|
from beanflows.auth.routes import subscription_required # noqa: E402
|
||||||
|
from quart import Blueprint # noqa: E402
|
||||||
|
|
||||||
test_bp = Blueprint("test", __name__)
|
test_bp = Blueprint("test", __name__)
|
||||||
|
|
||||||
|
|||||||
@@ -5,14 +5,9 @@ Covers signature verification, event parsing, subscription lifecycle transitions
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from conftest import make_webhook_payload, sign_payload
|
|
||||||
|
|
||||||
from hypothesis import HealthCheck, given
|
|
||||||
from hypothesis import settings as h_settings
|
|
||||||
from hypothesis import strategies as st
|
|
||||||
|
|
||||||
from beanflows.billing.routes import get_billing_customer, get_subscription
|
from beanflows.billing.routes import get_billing_customer, get_subscription
|
||||||
|
from conftest import make_webhook_payload, sign_payload
|
||||||
|
from hypothesis import HealthCheck, given, settings as h_settings, strategies as st
|
||||||
|
|
||||||
WEBHOOK_PATH = "/billing/webhook/paddle"
|
WEBHOOK_PATH = "/billing/webhook/paddle"
|
||||||
SIG_HEADER = "Paddle-Signature"
|
SIG_HEADER = "Paddle-Signature"
|
||||||
|
|||||||
@@ -18,10 +18,9 @@ async def test_dashboard_loads(auth_client, mock_analytics):
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
body = (await response.get_data(as_text=True))
|
body = (await response.get_data(as_text=True))
|
||||||
assert "Coffee Dashboard" in body
|
assert "Pulse" in body
|
||||||
assert "Global Supply" in body
|
assert "Stock-to-Use Ratio" in body
|
||||||
assert "Stock-to-Use" in body
|
assert "KC=F Close" in body
|
||||||
assert "Top Producing Countries" in body
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -30,8 +29,8 @@ async def test_dashboard_shows_metric_cards(auth_client, mock_analytics):
|
|||||||
response = await auth_client.get("/dashboard/")
|
response = await auth_client.get("/dashboard/")
|
||||||
body = (await response.get_data(as_text=True))
|
body = (await response.get_data(as_text=True))
|
||||||
|
|
||||||
# Latest production from mock: 172,000
|
assert "MM Net Position" in body
|
||||||
assert "172,000" in body
|
assert "Certified Stocks" in body
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -40,8 +39,7 @@ async def test_dashboard_yoy_table(auth_client, mock_analytics):
|
|||||||
response = await auth_client.get("/dashboard/")
|
response = await auth_client.get("/dashboard/")
|
||||||
body = (await response.get_data(as_text=True))
|
body = (await response.get_data(as_text=True))
|
||||||
|
|
||||||
assert "Brazil" in body
|
assert "Global Supply" in body
|
||||||
assert "Vietnam" in body
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -59,7 +57,7 @@ async def test_dashboard_free_plan_no_csv_export(auth_client, mock_analytics):
|
|||||||
response = await auth_client.get("/dashboard/")
|
response = await auth_client.get("/dashboard/")
|
||||||
body = (await response.get_data(as_text=True))
|
body = (await response.get_data(as_text=True))
|
||||||
|
|
||||||
assert "CSV export available on Trader" in body
|
assert "Upgrade" in body
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|||||||
@@ -3,16 +3,14 @@ Tests for role-based access control: role_required decorator, grant/revoke/ensur
|
|||||||
and admin route protection.
|
and admin route protection.
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
from quart import Blueprint
|
from beanflows import core
|
||||||
|
|
||||||
from beanflows.auth.routes import (
|
from beanflows.auth.routes import (
|
||||||
ensure_admin_role,
|
ensure_admin_role,
|
||||||
grant_role,
|
grant_role,
|
||||||
revoke_role,
|
revoke_role,
|
||||||
role_required,
|
role_required,
|
||||||
)
|
)
|
||||||
from beanflows import core
|
from quart import Blueprint
|
||||||
|
|
||||||
|
|
||||||
# ════════════════════════════════════════════════════════════
|
# ════════════════════════════════════════════════════════════
|
||||||
# grant_role / revoke_role
|
# grant_role / revoke_role
|
||||||
|
|||||||
Reference in New Issue
Block a user