feat: v9c Phase 2 - Backend Non-Blocking Logging (12 Endpoints)
All checks were successful
Deploy Development / deploy (push) Successful in 34s
Build Test / lint-backend (push) Successful in 1s
Build Test / build-frontend (push) Successful in 13s

PHASE 2: Backend Non-Blocking Logging - KOMPLETT

Instrumentierte Endpoints (12):
- Data: weight, circumference, caliper, nutrition, activity, photos (6)
- AI: insights/run/{slug}, insights/pipeline (2)
- Export: csv, json, zip (3)
- Import: zip (1)

Pattern implementiert:
- check_feature_access() VOR Operation (non-blocking)
- [FEATURE-LIMIT] Logging wenn Limit überschritten
- increment_feature_usage() NACH Operation
- Alte Permission-Checks bleiben aktiv

Features geprüft:
- weight_entries, circumference_entries, caliper_entries
- nutrition_entries, activity_entries, photos
- ai_calls, ai_pipeline
- data_export, data_import

Monitoring: 1-2 Wochen Log-Only-Phase
Logs zeigen: Wie oft würde blockiert werden?
Nächste Phase: Frontend Display (Usage-Counter)

Phase 1 (Cleanup) + Phase 2 (Logging) vollständig!

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Lars 2026-03-20 21:59:33 +01:00
parent 73bea5ee86
commit ddcd2f4350
9 changed files with 220 additions and 14 deletions

View File

@ -6,16 +6,18 @@ Handles workout/activity logging, statistics, and Apple Health CSV import.
import csv import csv
import io import io
import uuid import uuid
import logging
from typing import Optional from typing import Optional
from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from models import ActivityEntry from models import ActivityEntry
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/activity", tags=["activity"]) router = APIRouter(prefix="/api/activity", tags=["activity"])
logger = logging.getLogger(__name__)
@router.get("") @router.get("")
@ -33,6 +35,15 @@ def list_activity(limit: int=200, x_profile_id: Optional[str]=Header(default=Non
def create_activity(e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): def create_activity(e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Create new activity entry.""" """Create new activity entry."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'activity_entries')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"activity_entries {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
eid = str(uuid.uuid4()) eid = str(uuid.uuid4())
d = e.model_dump() d = e.model_dump()
with get_db() as conn: with get_db() as conn:
@ -44,6 +55,10 @@ def create_activity(e: ActivityEntry, x_profile_id: Optional[str]=Header(default
(eid,pid,d['date'],d['start_time'],d['end_time'],d['activity_type'],d['duration_min'], (eid,pid,d['date'],d['start_time'],d['end_time'],d['activity_type'],d['duration_min'],
d['kcal_active'],d['kcal_resting'],d['hr_avg'],d['hr_max'],d['distance_km'], d['kcal_active'],d['kcal_resting'],d['hr_avg'],d['hr_max'],d['distance_km'],
d['rpe'],d['source'],d['notes'])) d['rpe'],d['source'],d['notes']))
# Phase 2: Increment usage counter (always for new entries)
increment_feature_usage(pid, 'activity_entries')
return {"id":eid,"date":e.date} return {"id":eid,"date":e.date}

View File

@ -4,16 +4,18 @@ Caliper/Skinfold Tracking Endpoints for Mitai Jinkendo
Handles body fat measurements via skinfold caliper (4 methods supported). Handles body fat measurements via skinfold caliper (4 methods supported).
""" """
import uuid import uuid
import logging
from typing import Optional from typing import Optional
from fastapi import APIRouter, Header, Depends from fastapi import APIRouter, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from models import CaliperEntry from models import CaliperEntry
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/caliper", tags=["caliper"]) router = APIRouter(prefix="/api/caliper", tags=["caliper"])
logger = logging.getLogger(__name__)
@router.get("") @router.get("")
@ -31,17 +33,30 @@ def list_caliper(limit: int=100, x_profile_id: Optional[str]=Header(default=None
def upsert_caliper(e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): def upsert_caliper(e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Create or update caliper entry (upsert by date).""" """Create or update caliper entry (upsert by date)."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'caliper_entries')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"caliper_entries {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT id FROM caliper_log WHERE profile_id=%s AND date=%s", (pid,e.date)) cur.execute("SELECT id FROM caliper_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone() ex = cur.fetchone()
d = e.model_dump() d = e.model_dump()
is_new_entry = not ex
if ex: if ex:
# UPDATE existing entry
eid = ex['id'] eid = ex['id']
sets = ', '.join(f"{k}=%s" for k in d if k!='date') sets = ', '.join(f"{k}=%s" for k in d if k!='date')
cur.execute(f"UPDATE caliper_log SET {sets} WHERE id=%s", cur.execute(f"UPDATE caliper_log SET {sets} WHERE id=%s",
[v for k,v in d.items() if k!='date']+[eid]) [v for k,v in d.items() if k!='date']+[eid])
else: else:
# INSERT new entry
eid = str(uuid.uuid4()) eid = str(uuid.uuid4())
cur.execute("""INSERT INTO caliper_log cur.execute("""INSERT INTO caliper_log
(id,profile_id,date,sf_method,sf_chest,sf_axilla,sf_triceps,sf_subscap,sf_suprailiac, (id,profile_id,date,sf_method,sf_chest,sf_axilla,sf_triceps,sf_subscap,sf_suprailiac,
@ -50,6 +65,10 @@ def upsert_caliper(e: CaliperEntry, x_profile_id: Optional[str]=Header(default=N
(eid,pid,d['date'],d['sf_method'],d['sf_chest'],d['sf_axilla'],d['sf_triceps'], (eid,pid,d['date'],d['sf_method'],d['sf_chest'],d['sf_axilla'],d['sf_triceps'],
d['sf_subscap'],d['sf_suprailiac'],d['sf_abdomen'],d['sf_thigh'],d['sf_calf_med'], d['sf_subscap'],d['sf_suprailiac'],d['sf_abdomen'],d['sf_thigh'],d['sf_calf_med'],
d['sf_lowerback'],d['sf_biceps'],d['body_fat_pct'],d['lean_mass'],d['fat_mass'],d['notes'])) d['sf_lowerback'],d['sf_biceps'],d['body_fat_pct'],d['lean_mass'],d['fat_mass'],d['notes']))
# Phase 2: Increment usage counter (only for new entries)
increment_feature_usage(pid, 'caliper_entries')
return {"id":eid,"date":e.date} return {"id":eid,"date":e.date}

View File

@ -4,16 +4,18 @@ Circumference Tracking Endpoints for Mitai Jinkendo
Handles body circumference measurements (8 measurement points). Handles body circumference measurements (8 measurement points).
""" """
import uuid import uuid
import logging
from typing import Optional from typing import Optional
from fastapi import APIRouter, Header, Depends from fastapi import APIRouter, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from models import CircumferenceEntry from models import CircumferenceEntry
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/circumferences", tags=["circumference"]) router = APIRouter(prefix="/api/circumferences", tags=["circumference"])
logger = logging.getLogger(__name__)
@router.get("") @router.get("")
@ -31,23 +33,40 @@ def list_circs(limit: int=100, x_profile_id: Optional[str]=Header(default=None),
def upsert_circ(e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): def upsert_circ(e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Create or update circumference entry (upsert by date).""" """Create or update circumference entry (upsert by date)."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'circumference_entries')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"circumference_entries {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT id FROM circumference_log WHERE profile_id=%s AND date=%s", (pid,e.date)) cur.execute("SELECT id FROM circumference_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone() ex = cur.fetchone()
d = e.model_dump() d = e.model_dump()
is_new_entry = not ex
if ex: if ex:
# UPDATE existing entry
eid = ex['id'] eid = ex['id']
sets = ', '.join(f"{k}=%s" for k in d if k!='date') sets = ', '.join(f"{k}=%s" for k in d if k!='date')
cur.execute(f"UPDATE circumference_log SET {sets} WHERE id=%s", cur.execute(f"UPDATE circumference_log SET {sets} WHERE id=%s",
[v for k,v in d.items() if k!='date']+[eid]) [v for k,v in d.items() if k!='date']+[eid])
else: else:
# INSERT new entry
eid = str(uuid.uuid4()) eid = str(uuid.uuid4())
cur.execute("""INSERT INTO circumference_log cur.execute("""INSERT INTO circumference_log
(id,profile_id,date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes,photo_id,created) (id,profile_id,date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes,photo_id,created)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)""", VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)""",
(eid,pid,d['date'],d['c_neck'],d['c_chest'],d['c_waist'],d['c_belly'], (eid,pid,d['date'],d['c_neck'],d['c_chest'],d['c_waist'],d['c_belly'],
d['c_hip'],d['c_thigh'],d['c_calf'],d['c_arm'],d['notes'],d['photo_id'])) d['c_hip'],d['c_thigh'],d['c_calf'],d['c_arm'],d['notes'],d['photo_id']))
# Phase 2: Increment usage counter (only for new entries)
increment_feature_usage(pid, 'circumference_entries')
return {"id":eid,"date":e.date} return {"id":eid,"date":e.date}

View File

@ -7,6 +7,7 @@ import os
import csv import csv
import io import io
import json import json
import logging
import zipfile import zipfile
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@ -17,10 +18,11 @@ from fastapi import APIRouter, HTTPException, Header, Depends
from fastapi.responses import StreamingResponse, Response from fastapi.responses import StreamingResponse, Response
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/export", tags=["export"]) router = APIRouter(prefix="/api/export", tags=["export"])
logger = logging.getLogger(__name__)
PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos")) PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos"))
@ -30,7 +32,16 @@ def export_csv(x_profile_id: Optional[str]=Header(default=None), session: dict=D
"""Export all data as CSV.""" """Export all data as CSV."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Check export permission # Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'data_export')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"data_export {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# NOTE: Phase 2 does NOT block - just logs!
# Old permission check (keep for now)
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,)) cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,))
@ -74,6 +85,10 @@ def export_csv(x_profile_id: Optional[str]=Header(default=None), session: dict=D
writer.writerow(["Training", r['date'], r['activity_type'], f"{float(r['duration_min'])}min {float(r['kcal_active'])}kcal"]) writer.writerow(["Training", r['date'], r['activity_type'], f"{float(r['duration_min'])}min {float(r['kcal_active'])}kcal"])
output.seek(0) output.seek(0)
# Phase 2: Increment usage counter
increment_feature_usage(pid, 'data_export')
return StreamingResponse( return StreamingResponse(
iter([output.getvalue()]), iter([output.getvalue()]),
media_type="text/csv", media_type="text/csv",
@ -86,7 +101,15 @@ def export_json(x_profile_id: Optional[str]=Header(default=None), session: dict=
"""Export all data as JSON.""" """Export all data as JSON."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Check export permission # Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'data_export')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"data_export {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# Old permission check (keep for now)
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,)) cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,))
@ -126,6 +149,10 @@ def export_json(x_profile_id: Optional[str]=Header(default=None), session: dict=
return str(obj) return str(obj)
json_str = json.dumps(data, indent=2, default=decimal_handler) json_str = json.dumps(data, indent=2, default=decimal_handler)
# Phase 2: Increment usage counter
increment_feature_usage(pid, 'data_export')
return Response( return Response(
content=json_str, content=json_str,
media_type="application/json", media_type="application/json",
@ -138,7 +165,15 @@ def export_zip(x_profile_id: Optional[str]=Header(default=None), session: dict=D
"""Export all data as ZIP (CSV + JSON + photos) per specification.""" """Export all data as ZIP (CSV + JSON + photos) per specification."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Check export permission & get profile # Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'data_export')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"data_export {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# Old permission check & get profile
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,)) cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
@ -297,6 +332,10 @@ Datumsformat: YYYY-MM-DD
zip_buffer.seek(0) zip_buffer.seek(0)
filename = f"mitai-export-{profile_name.replace(' ','-')}-{export_date}.zip" filename = f"mitai-export-{profile_name.replace(' ','-')}-{export_date}.zip"
# Phase 2: Increment usage counter
increment_feature_usage(pid, 'data_export')
return StreamingResponse( return StreamingResponse(
iter([zip_buffer.getvalue()]), iter([zip_buffer.getvalue()]),
media_type="application/zip", media_type="application/zip",

View File

@ -8,6 +8,7 @@ import csv
import io import io
import json import json
import uuid import uuid
import logging
import zipfile import zipfile
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@ -16,10 +17,11 @@ from datetime import datetime
from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends
from db import get_db, get_cursor from db import get_db, get_cursor
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/import", tags=["import"]) router = APIRouter(prefix="/api/import", tags=["import"])
logger = logging.getLogger(__name__)
PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos")) PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos"))
@ -41,6 +43,15 @@ async def import_zip(
""" """
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'data_import')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"data_import {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# NOTE: Phase 2 does NOT block - just logs!
# Read uploaded file # Read uploaded file
content = await file.read() content = await file.read()
zip_buffer = io.BytesIO(content) zip_buffer = io.BytesIO(content)
@ -254,6 +265,9 @@ async def import_zip(
conn.rollback() conn.rollback()
raise HTTPException(500, f"Import fehlgeschlagen: {str(e)}") raise HTTPException(500, f"Import fehlgeschlagen: {str(e)}")
# Phase 2: Increment usage counter
increment_feature_usage(pid, 'data_import')
return { return {
"ok": True, "ok": True,
"message": "Import erfolgreich", "message": "Import erfolgreich",

View File

@ -6,6 +6,7 @@ Handles AI analysis execution, prompt management, and usage tracking.
import os import os
import json import json
import uuid import uuid
import logging
import httpx import httpx
from typing import Optional from typing import Optional
from datetime import datetime from datetime import datetime
@ -13,10 +14,11 @@ from datetime import datetime
from fastapi import APIRouter, HTTPException, Header, Depends from fastapi import APIRouter, HTTPException, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth, require_admin from auth import require_auth, require_admin, check_feature_access, increment_feature_usage
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api", tags=["insights"]) router = APIRouter(prefix="/api", tags=["insights"])
logger = logging.getLogger(__name__)
OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY", "") OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY", "")
OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "anthropic/claude-sonnet-4") OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "anthropic/claude-sonnet-4")
@ -251,6 +253,17 @@ def delete_ai_insight(scope: str, x_profile_id: Optional[str]=Header(default=Non
async def analyze_with_prompt(slug: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): async def analyze_with_prompt(slug: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Run AI analysis with specified prompt template.""" """Run AI analysis with specified prompt template."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'ai_calls')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"ai_calls {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# NOTE: Phase 2 does NOT block - just logs!
# Old check (keep for now, but will be replaced in Phase 4)
check_ai_limit(pid) check_ai_limit(pid)
# Get prompt template # Get prompt template
@ -300,7 +313,12 @@ async def analyze_with_prompt(slug: str, x_profile_id: Optional[str]=Header(defa
cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)", cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(str(uuid.uuid4()), pid, slug, content)) (str(uuid.uuid4()), pid, slug, content))
# Phase 2: Increment new feature usage counter
increment_feature_usage(pid, 'ai_calls')
# Old usage tracking (keep for now)
inc_ai_usage(pid) inc_ai_usage(pid)
return {"scope": slug, "content": content} return {"scope": slug, "content": content}
@ -308,6 +326,25 @@ async def analyze_with_prompt(slug: str, x_profile_id: Optional[str]=Header(defa
async def analyze_pipeline(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): async def analyze_pipeline(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Run 3-stage pipeline analysis.""" """Run 3-stage pipeline analysis."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check pipeline feature access (boolean - enabled/disabled)
access_pipeline = check_feature_access(pid, 'ai_pipeline')
if not access_pipeline['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"ai_pipeline {access_pipeline['reason']}"
)
# NOTE: Phase 2 does NOT block - just logs!
# Also check ai_calls (pipeline uses API calls too)
access_calls = check_feature_access(pid, 'ai_calls')
if not access_calls['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"ai_calls {access_calls['reason']} (used: {access_calls['used']}, limit: {access_calls['limit']})"
)
# Old check (keep for now)
check_ai_limit(pid) check_ai_limit(pid)
data = _get_profile_data(pid) data = _get_profile_data(pid)
@ -436,7 +473,13 @@ async def analyze_pipeline(x_profile_id: Optional[str]=Header(default=None), ses
cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,'pipeline',%s,CURRENT_TIMESTAMP)", cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,'pipeline',%s,CURRENT_TIMESTAMP)",
(str(uuid.uuid4()), pid, final_content)) (str(uuid.uuid4()), pid, final_content))
# Phase 2: Increment ai_calls usage (pipeline uses multiple API calls)
# Note: We increment once per pipeline run, not per individual call
increment_feature_usage(pid, 'ai_calls')
# Old usage tracking (keep for now)
inc_ai_usage(pid) inc_ai_usage(pid)
return {"scope": "pipeline", "content": final_content, "stage1": stage1_results} return {"scope": "pipeline", "content": final_content, "stage1": stage1_results}

View File

@ -6,16 +6,18 @@ Handles nutrition data, FDDB CSV import, correlations, and weekly aggregates.
import csv import csv
import io import io
import uuid import uuid
import logging
from typing import Optional from typing import Optional
from datetime import datetime from datetime import datetime
from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends from fastapi import APIRouter, HTTPException, UploadFile, File, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/nutrition", tags=["nutrition"]) router = APIRouter(prefix="/api/nutrition", tags=["nutrition"])
logger = logging.getLogger(__name__)
# ── Helper ──────────────────────────────────────────────────────────────────── # ── Helper ────────────────────────────────────────────────────────────────────
@ -30,6 +32,16 @@ def _pf(s):
async def import_nutrition_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): async def import_nutrition_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Import FDDB nutrition CSV.""" """Import FDDB nutrition CSV."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
# Note: CSV import can create many entries - we check once before import
access = check_feature_access(pid, 'nutrition_entries')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"nutrition_entries {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
raw = await file.read() raw = await file.read()
try: text = raw.decode('utf-8') try: text = raw.decode('utf-8')
except: text = raw.decode('latin-1') except: text = raw.decode('latin-1')
@ -52,20 +64,30 @@ async def import_nutrition_csv(file: UploadFile=File(...), x_profile_id: Optiona
days[iso]['protein_g'] += _pf(row.get('protein_g',0)) days[iso]['protein_g'] += _pf(row.get('protein_g',0))
count+=1 count+=1
inserted=0 inserted=0
new_entries=0
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
for iso,vals in days.items(): for iso,vals in days.items():
kcal=round(vals['kcal'],1); fat=round(vals['fat_g'],1) kcal=round(vals['kcal'],1); fat=round(vals['fat_g'],1)
carbs=round(vals['carbs_g'],1); prot=round(vals['protein_g'],1) carbs=round(vals['carbs_g'],1); prot=round(vals['protein_g'],1)
cur.execute("SELECT id FROM nutrition_log WHERE profile_id=%s AND date=%s",(pid,iso)) cur.execute("SELECT id FROM nutrition_log WHERE profile_id=%s AND date=%s",(pid,iso))
if cur.fetchone(): is_new = not cur.fetchone()
if not is_new:
# UPDATE existing
cur.execute("UPDATE nutrition_log SET kcal=%s,protein_g=%s,fat_g=%s,carbs_g=%s WHERE profile_id=%s AND date=%s", cur.execute("UPDATE nutrition_log SET kcal=%s,protein_g=%s,fat_g=%s,carbs_g=%s WHERE profile_id=%s AND date=%s",
(kcal,prot,fat,carbs,pid,iso)) (kcal,prot,fat,carbs,pid,iso))
else: else:
# INSERT new
cur.execute("INSERT INTO nutrition_log (id,profile_id,date,kcal,protein_g,fat_g,carbs_g,source,created) VALUES (%s,%s,%s,%s,%s,%s,%s,'csv',CURRENT_TIMESTAMP)", cur.execute("INSERT INTO nutrition_log (id,profile_id,date,kcal,protein_g,fat_g,carbs_g,source,created) VALUES (%s,%s,%s,%s,%s,%s,%s,'csv',CURRENT_TIMESTAMP)",
(str(uuid.uuid4()),pid,iso,kcal,prot,fat,carbs)) (str(uuid.uuid4()),pid,iso,kcal,prot,fat,carbs))
new_entries += 1
inserted+=1 inserted+=1
return {"rows_parsed":count,"days_imported":inserted,
# Phase 2: Increment usage counter for each new entry created
for _ in range(new_entries):
increment_feature_usage(pid, 'nutrition_entries')
return {"rows_parsed":count,"days_imported":inserted,"new_entries":new_entries,
"date_range":{"from":min(days) if days else None,"to":max(days) if days else None}} "date_range":{"from":min(days) if days else None,"to":max(days) if days else None}}

View File

@ -5,6 +5,7 @@ Handles progress photo uploads and retrieval.
""" """
import os import os
import uuid import uuid
import logging
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@ -13,10 +14,11 @@ from fastapi.responses import FileResponse
import aiofiles import aiofiles
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth, require_auth_flexible from auth import require_auth, require_auth_flexible, check_feature_access, increment_feature_usage
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/photos", tags=["photos"]) router = APIRouter(prefix="/api/photos", tags=["photos"])
logger = logging.getLogger(__name__)
PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos")) PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos"))
PHOTOS_DIR.mkdir(parents=True, exist_ok=True) PHOTOS_DIR.mkdir(parents=True, exist_ok=True)
@ -27,6 +29,15 @@ async def upload_photo(file: UploadFile=File(...), date: str="",
x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Upload progress photo.""" """Upload progress photo."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'photos')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"photos {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
fid = str(uuid.uuid4()) fid = str(uuid.uuid4())
ext = Path(file.filename).suffix or '.jpg' ext = Path(file.filename).suffix or '.jpg'
path = PHOTOS_DIR / f"{fid}{ext}" path = PHOTOS_DIR / f"{fid}{ext}"
@ -35,6 +46,10 @@ async def upload_photo(file: UploadFile=File(...), date: str="",
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("INSERT INTO photos (id,profile_id,date,path,created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)", cur.execute("INSERT INTO photos (id,profile_id,date,path,created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(fid,pid,date,str(path))) (fid,pid,date,str(path)))
# Phase 2: Increment usage counter
increment_feature_usage(pid, 'photos')
return {"id":fid,"date":date} return {"id":fid,"date":date}

View File

@ -4,16 +4,18 @@ Weight Tracking Endpoints for Mitai Jinkendo
Handles weight log CRUD operations and statistics. Handles weight log CRUD operations and statistics.
""" """
import uuid import uuid
import logging
from typing import Optional from typing import Optional
from fastapi import APIRouter, Header, Depends from fastapi import APIRouter, Header, Depends
from db import get_db, get_cursor, r2d from db import get_db, get_cursor, r2d
from auth import require_auth from auth import require_auth, check_feature_access, increment_feature_usage
from models import WeightEntry from models import WeightEntry
from routers.profiles import get_pid from routers.profiles import get_pid
router = APIRouter(prefix="/api/weight", tags=["weight"]) router = APIRouter(prefix="/api/weight", tags=["weight"])
logger = logging.getLogger(__name__)
@router.get("") @router.get("")
@ -31,17 +33,35 @@ def list_weight(limit: int=365, x_profile_id: Optional[str]=Header(default=None)
def upsert_weight(e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): def upsert_weight(e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Create or update weight entry (upsert by date).""" """Create or update weight entry (upsert by date)."""
pid = get_pid(x_profile_id) pid = get_pid(x_profile_id)
# Phase 2: Check feature access (non-blocking, log only)
access = check_feature_access(pid, 'weight_entries')
if not access['allowed']:
logger.warning(
f"[FEATURE-LIMIT] User {pid} would be blocked: "
f"weight_entries {access['reason']} (used: {access['used']}, limit: {access['limit']})"
)
# NOTE: Phase 2 does NOT block - just logs!
with get_db() as conn: with get_db() as conn:
cur = get_cursor(conn) cur = get_cursor(conn)
cur.execute("SELECT id FROM weight_log WHERE profile_id=%s AND date=%s", (pid,e.date)) cur.execute("SELECT id FROM weight_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone() ex = cur.fetchone()
is_new_entry = not ex
if ex: if ex:
# UPDATE existing entry
cur.execute("UPDATE weight_log SET weight=%s,note=%s WHERE id=%s", (e.weight,e.note,ex['id'])) cur.execute("UPDATE weight_log SET weight=%s,note=%s WHERE id=%s", (e.weight,e.note,ex['id']))
wid = ex['id'] wid = ex['id']
else: else:
# INSERT new entry
wid = str(uuid.uuid4()) wid = str(uuid.uuid4())
cur.execute("INSERT INTO weight_log (id,profile_id,date,weight,note,created) VALUES (%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)", cur.execute("INSERT INTO weight_log (id,profile_id,date,weight,note,created) VALUES (%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(wid,pid,e.date,e.weight,e.note)) (wid,pid,e.date,e.weight,e.note))
# Phase 2: Increment usage counter (only for new entries)
increment_feature_usage(pid, 'weight_entries')
return {"id":wid,"date":e.date,"weight":e.weight} return {"id":wid,"date":e.date,"weight":e.weight}