import os, csv, io, uuid from pathlib import Path from typing import Optional from datetime import datetime from fastapi import FastAPI, HTTPException, UploadFile, File, Header, Query, Depends from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse, FileResponse from pydantic import BaseModel import sqlite3, aiofiles import bcrypt from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.util import get_remote_address from slowapi.errors import RateLimitExceeded from starlette.requests import Request DATA_DIR = Path(os.getenv("DATA_DIR", "./data")) PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos")) DB_PATH = DATA_DIR / "bodytrack.db" DATA_DIR.mkdir(parents=True, exist_ok=True) PHOTOS_DIR.mkdir(parents=True, exist_ok=True) OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY", "") OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "anthropic/claude-sonnet-4") ANTHROPIC_KEY = os.getenv("ANTHROPIC_API_KEY", "") app = FastAPI(title="Mitai Jinkendo API", version="3.0.0") limiter = Limiter(key_func=get_remote_address) app.state.limiter = limiter app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) app.add_middleware( CORSMiddleware, allow_origins=os.getenv("ALLOWED_ORIGINS", "*").split(","), allow_credentials=True, allow_methods=["GET","POST","PUT","DELETE","OPTIONS"], allow_headers=["*"], ) def get_db(): conn = sqlite3.connect(DB_PATH) conn.row_factory = sqlite3.Row return conn def r2d(row): return dict(row) if row else None AVATAR_COLORS = ['#1D9E75','#378ADD','#D85A30','#EF9F27','#7F77DD','#D4537E','#639922','#888780'] def init_db(): with get_db() as conn: conn.executescript(""" -- Profiles (multi-user) CREATE TABLE IF NOT EXISTS profiles ( id TEXT PRIMARY KEY, name TEXT NOT NULL DEFAULT 'Nutzer', avatar_color TEXT DEFAULT '#1D9E75', photo_id TEXT, sex TEXT DEFAULT 'm', dob TEXT, height REAL DEFAULT 178, goal_weight REAL, goal_bf_pct REAL, role TEXT DEFAULT 'user', pin_hash TEXT, auth_type TEXT DEFAULT 'pin', session_days INTEGER DEFAULT 30, ai_enabled INTEGER DEFAULT 1, ai_limit_day INTEGER, export_enabled INTEGER DEFAULT 1, email TEXT, created TEXT DEFAULT (datetime('now')), updated TEXT DEFAULT (datetime('now')) ); CREATE TABLE IF NOT EXISTS sessions ( token TEXT PRIMARY KEY, profile_id TEXT NOT NULL, expires_at TEXT NOT NULL, created TEXT DEFAULT (datetime('now')) ); CREATE TABLE IF NOT EXISTS ai_usage ( id TEXT PRIMARY KEY, profile_id TEXT NOT NULL, date TEXT NOT NULL, call_count INTEGER DEFAULT 0 ); CREATE UNIQUE INDEX IF NOT EXISTS idx_ai_usage ON ai_usage(profile_id, date); -- Weight CREATE TABLE IF NOT EXISTS weight_log ( id TEXT PRIMARY KEY, profile_id TEXT NOT NULL, date TEXT NOT NULL, weight REAL NOT NULL, note TEXT, source TEXT DEFAULT 'manual', created TEXT DEFAULT (datetime('now')) ); -- Circumferences CREATE TABLE IF NOT EXISTS circumference_log ( id TEXT PRIMARY KEY, profile_id TEXT, date TEXT NOT NULL, c_neck REAL, c_chest REAL, c_waist REAL, c_belly REAL, c_hip REAL, c_thigh REAL, c_calf REAL, c_arm REAL, notes TEXT, photo_id TEXT, created TEXT DEFAULT (datetime('now')) ); -- Caliper CREATE TABLE IF NOT EXISTS caliper_log ( id TEXT PRIMARY KEY, profile_id TEXT, date TEXT NOT NULL, sf_method TEXT DEFAULT 'jackson3', sf_chest REAL, sf_axilla REAL, sf_triceps REAL, sf_subscap REAL, sf_suprailiac REAL, sf_abdomen REAL, sf_thigh REAL, sf_calf_med REAL, sf_lowerback REAL, sf_biceps REAL, body_fat_pct REAL, lean_mass REAL, fat_mass REAL, notes TEXT, created TEXT DEFAULT (datetime('now')) ); -- Nutrition CREATE TABLE IF NOT EXISTS nutrition_log ( id TEXT PRIMARY KEY, profile_id TEXT, date TEXT NOT NULL, kcal REAL, protein_g REAL, fat_g REAL, carbs_g REAL, source TEXT DEFAULT 'csv', created TEXT DEFAULT (datetime('now')) ); -- Activity CREATE TABLE IF NOT EXISTS activity_log ( id TEXT PRIMARY KEY, profile_id TEXT, date TEXT NOT NULL, start_time TEXT, end_time TEXT, activity_type TEXT NOT NULL, duration_min REAL, kcal_active REAL, kcal_resting REAL, hr_avg REAL, hr_max REAL, distance_km REAL, rpe INTEGER, source TEXT DEFAULT 'manual', notes TEXT, created TEXT DEFAULT (datetime('now')) ); -- Photos CREATE TABLE IF NOT EXISTS photos ( id TEXT PRIMARY KEY, profile_id TEXT, date TEXT, path TEXT, created TEXT DEFAULT (datetime('now')) ); -- AI insights CREATE TABLE IF NOT EXISTS ai_insights ( id TEXT PRIMARY KEY, profile_id TEXT, scope TEXT, content TEXT, created TEXT DEFAULT (datetime('now')) ); CREATE TABLE IF NOT EXISTS ai_prompts ( id TEXT PRIMARY KEY, name TEXT NOT NULL, slug TEXT NOT NULL UNIQUE, description TEXT, template TEXT NOT NULL, active INTEGER DEFAULT 1, sort_order INTEGER DEFAULT 0, created TEXT DEFAULT (datetime('now')) ); -- Legacy tables (kept for migration) CREATE TABLE IF NOT EXISTS profile ( id INTEGER PRIMARY KEY, name TEXT, sex TEXT, dob TEXT, height REAL, updated TEXT ); CREATE TABLE IF NOT EXISTS measurements ( id TEXT PRIMARY KEY, date TEXT, weight REAL, c_neck REAL, c_chest REAL, c_waist REAL, c_belly REAL, c_hip REAL, c_thigh REAL, c_calf REAL, c_arm REAL, sf_method TEXT, sf_chest REAL, sf_axilla REAL, sf_triceps REAL, sf_subscap REAL, sf_suprailiac REAL, sf_abdomen REAL, sf_thigh REAL, sf_calf_med REAL, sf_lowerback REAL, sf_biceps REAL, body_fat_pct REAL, lean_mass REAL, fat_mass REAL, notes TEXT, photo_id TEXT, created TEXT ); """) conn.commit() _safe_alters(conn) _migrate(conn) _seed_pipeline_prompts(conn) def _safe_alters(conn): """Add missing columns to existing tables safely.""" alters = [ ("weight_log", "profile_id TEXT"), ("weight_log", "source TEXT DEFAULT 'manual'"), ("circumference_log","profile_id TEXT"), ("caliper_log", "profile_id TEXT"), ("nutrition_log", "profile_id TEXT"), ("activity_log", "profile_id TEXT"), ("photos", "profile_id TEXT"), ("photos", "date TEXT"), ("ai_insights", "profile_id TEXT"), ("profiles", "goal_weight REAL"), ("profiles", "goal_bf_pct REAL"), ("profiles", "role TEXT DEFAULT 'user'"), ("profiles", "pin_hash TEXT"), ("profiles", "auth_type TEXT DEFAULT 'pin'"), ("profiles", "session_days INTEGER DEFAULT 30"), ("profiles", "ai_enabled INTEGER DEFAULT 1"), ("profiles", "ai_limit_day INTEGER"), ("profiles", "export_enabled INTEGER DEFAULT 1"), ("profiles", "email TEXT"), ] for table, col_def in alters: try: conn.execute(f"ALTER TABLE {table} ADD COLUMN {col_def}"); conn.commit() except: pass def _migrate(conn): """Migrate old single-user data → first profile.""" # Ensure default profile exists existing = conn.execute("SELECT id FROM profiles LIMIT 1").fetchone() if existing: default_pid = existing['id'] else: # Try to get name from legacy profile table legacy = conn.execute("SELECT * FROM profile WHERE id=1").fetchone() default_pid = str(uuid.uuid4()) name = legacy['name'] if legacy and legacy['name'] else 'Lars' sex = legacy['sex'] if legacy else 'm' dob = legacy['dob'] if legacy else None height = legacy['height'] if legacy else 178 conn.execute("""INSERT INTO profiles (id,name,avatar_color,sex,dob,height,created,updated) VALUES (?,?,?,?,?,?,datetime('now'),datetime('now'))""", (default_pid, name, AVATAR_COLORS[0], sex, dob, height)) conn.commit() print(f"Created default profile: {name} ({default_pid})") # Migrate legacy weight_log (no profile_id) orphans = conn.execute("SELECT * FROM weight_log WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE weight_log SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy circumference_log orphans = conn.execute("SELECT * FROM circumference_log WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE circumference_log SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy caliper_log orphans = conn.execute("SELECT * FROM caliper_log WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE caliper_log SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy nutrition_log orphans = conn.execute("SELECT * FROM nutrition_log WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE nutrition_log SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy activity_log orphans = conn.execute("SELECT * FROM activity_log WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE activity_log SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy ai_insights orphans = conn.execute("SELECT * FROM ai_insights WHERE profile_id IS NULL").fetchall() for r in orphans: conn.execute("UPDATE ai_insights SET profile_id=? WHERE id=?", (default_pid, r['id'])) # Migrate legacy measurements table meas = conn.execute("SELECT * FROM measurements").fetchall() for r in meas: d = dict(r) date = d.get('date','') if not date: continue if d.get('weight'): if not conn.execute("SELECT id FROM weight_log WHERE profile_id=? AND date=?", (default_pid,date)).fetchone(): conn.execute("INSERT OR IGNORE INTO weight_log (id,profile_id,date,weight,source,created) VALUES (?,?,?,?,'migrated',datetime('now'))", (str(uuid.uuid4()), default_pid, date, d['weight'])) circ_keys = ['c_neck','c_chest','c_waist','c_belly','c_hip','c_thigh','c_calf','c_arm'] if any(d.get(k) for k in circ_keys): if not conn.execute("SELECT id FROM circumference_log WHERE profile_id=? AND date=?", (default_pid,date)).fetchone(): conn.execute("""INSERT OR IGNORE INTO circumference_log (id,profile_id,date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes,photo_id,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,datetime('now'))""", (str(uuid.uuid4()),default_pid,date,d.get('c_neck'),d.get('c_chest'),d.get('c_waist'), d.get('c_belly'),d.get('c_hip'),d.get('c_thigh'),d.get('c_calf'),d.get('c_arm'), d.get('notes'),d.get('photo_id'))) sf_keys = ['sf_chest','sf_axilla','sf_triceps','sf_subscap','sf_suprailiac', 'sf_abdomen','sf_thigh','sf_calf_med','sf_lowerback','sf_biceps'] if any(d.get(k) for k in sf_keys) or d.get('body_fat_pct'): if not conn.execute("SELECT id FROM caliper_log WHERE profile_id=? AND date=?", (default_pid,date)).fetchone(): conn.execute("""INSERT OR IGNORE INTO caliper_log (id,profile_id,date,sf_method,sf_chest,sf_axilla,sf_triceps,sf_subscap,sf_suprailiac, sf_abdomen,sf_thigh,sf_calf_med,sf_lowerback,sf_biceps,body_fat_pct,lean_mass,fat_mass,notes,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,datetime('now'))""", (str(uuid.uuid4()),default_pid,date,d.get('sf_method','jackson3'), d.get('sf_chest'),d.get('sf_axilla'),d.get('sf_triceps'),d.get('sf_subscap'), d.get('sf_suprailiac'),d.get('sf_abdomen'),d.get('sf_thigh'),d.get('sf_calf_med'), d.get('sf_lowerback'),d.get('sf_biceps'),d.get('body_fat_pct'),d.get('lean_mass'), d.get('fat_mass'),d.get('notes'))) conn.commit() # Ensure first profile is admin first = conn.execute("SELECT id FROM profiles ORDER BY created LIMIT 1").fetchone() if first: conn.execute("UPDATE profiles SET role='admin', ai_enabled=1, export_enabled=1 WHERE id=?", (first['id'],)) conn.commit() print("Migration complete") _seed_prompts(conn) def _seed_prompts(conn): """Insert default prompts if table is empty.""" count = conn.execute("SELECT COUNT(*) FROM ai_prompts").fetchone()[0] if count > 0: return defaults = [ ("Gesamtanalyse", "gesamt", "Vollständige Analyse aller verfügbaren Daten", """Du bist ein Gesundheits- und Ernährungsanalyst. Erstelle eine strukturierte Analyse auf Deutsch (400-500 Wörter). PROFIL: {{name}} · {{geschlecht}} · {{height}} cm Ziele: Gewicht {{goal_weight}} kg · KF {{goal_bf_pct}}% GEWICHT: {{weight_trend}} CALIPER: {{caliper_summary}} UMFÄNGE: {{circ_summary}} ERNÄHRUNG: {{nutrition_summary}} AKTIVITÄT: {{activity_summary}} Struktur (alle Abschnitte vollständig ausschreiben): ⚖️ **Gewichts- & Körperzusammensetzung** 🍽️ **Ernährungsanalyse** 🏋️ **Aktivität & Energiebilanz** 🎯 **Zielabgleich** 💪 **Empfehlungen** (3 konkrete Punkte) Sachlich, motivierend, Zahlen zitieren, keine Diagnosen.""", 1, 0), ("Körperkomposition", "koerper", "Fokus auf Gewicht, Körperfett und Magermasse", """Analysiere ausschließlich die Körperzusammensetzung auf Deutsch (200-250 Wörter). PROFIL: {{name}} · {{geschlecht}} · {{height}} cm · Ziel-KF: {{goal_bf_pct}}% GEWICHT: {{weight_trend}} CALIPER: {{caliper_summary}} UMFÄNGE: {{circ_summary}} Abschnitte: ⚖️ **Gewichtstrend** – Entwicklung und Bewertung 🫧 **Körperfett** – Kategorie, Trend, Abstand zum Ziel 💪 **Magermasse** – Erhalt oder Aufbau? 📏 **Umfänge** – Relevante Veränderungen Präzise, zahlenbasiert, keine Diagnosen.""", 1, 1), ("Ernährung & Kalorien", "ernaehrung", "Fokus auf Kalorienbilanz und Makronährstoffe", """Analysiere die Ernährungsdaten auf Deutsch (200-250 Wörter). PROFIL: {{name}} · {{geschlecht}} · {{height}} cm · Gewicht: {{weight_aktuell}} kg ERNÄHRUNG: {{nutrition_detail}} Protein-Ziel: {{protein_ziel_low}}–{{protein_ziel_high}}g/Tag AKTIVITÄT (Kalorienverbrauch): {{activity_kcal_summary}} Abschnitte: 🍽️ **Kalorienbilanz** – Aufnahme vs. Verbrauch, Defizit/Überschuss 🥩 **Proteinversorgung** – Ist vs. Soll, Konsequenzen 📊 **Makroverteilung** – Bewertung Fett/KH/Protein 📅 **Muster** – Regelmäßigkeit, Schwankungen Zahlenbasiert, konkret, keine Diagnosen.""", 1, 2), ("Aktivität & Training", "aktivitaet", "Fokus auf Trainingsvolumen und Energieverbrauch", """Analysiere die Aktivitätsdaten auf Deutsch (200-250 Wörter). PROFIL: {{name}} · {{geschlecht}} AKTIVITÄT: {{activity_detail}} GEWICHT: {{weight_trend}} Abschnitte: 🏋️ **Trainingsvolumen** – Häufigkeit, Dauer, Typen 🔥 **Energieverbrauch** – Aktive Kalorien, Durchschnitt ❤️ **Intensität** – Herzfrequenz-Analyse 📈 **Trend** – Trainingsregelmäßigkeit 💡 **Empfehlung** – 1-2 konkrete Punkte Motivierend, zahlenbasiert, keine Diagnosen.""", 1, 3), ("Gesundheitsindikatoren", "gesundheit", "WHR, WHtR, BMI und weitere Kennzahlen", """Berechne und bewerte die Gesundheitsindikatoren auf Deutsch (200-250 Wörter). PROFIL: {{name}} · {{geschlecht}} · {{height}} cm GEWICHT: {{weight_aktuell}} kg UMFÄNGE: {{circ_summary}} CALIPER: {{caliper_summary}} Berechne und bewerte: 📐 **WHR** (Taille/Hüfte) – Ziel: <0,90 M / <0,85 F 📏 **WHtR** (Taille/Größe) – Ziel: <0,50 ⚖️ **BMI** – Einordnung mit Kontext 💪 **FFMI** – Muskelmasse-Index (falls KF-Daten vorhanden) 🎯 **Gesamtbewertung** – Ampel-System (grün/gelb/rot) Sachlich, evidenzbasiert, keine Diagnosen.""", 1, 4), ("Fortschritt zu Zielen", "ziele", "Wie weit bin ich von meinen Zielen entfernt?", """Bewerte den Fortschritt zu den gesetzten Zielen auf Deutsch (200-250 Wörter). PROFIL: {{name}} Ziel-Gewicht: {{goal_weight}} kg · Ziel-KF: {{goal_bf_pct}}% AKTUELL: Gewicht {{weight_aktuell}} kg · KF {{kf_aktuell}}% TREND: {{weight_trend}} Abschnitte: 🎯 **Zielerreichung** – Abstand zu Gewichts- und KF-Ziel 📈 **Tempo** – Hochrechnung: Wann wird das Ziel erreicht? ✅ **Was läuft gut** – Positive Entwicklungen ⚠️ **Was bremst** – Hindernisse 🗺️ **Nächste Schritte** – 2-3 konkrete Maßnahmen Realistisch, motivierend, zahlenbasiert.""", 1, 5), ] for name, slug, desc, template, active, sort in defaults: conn.execute( "INSERT OR IGNORE INTO ai_prompts (id,name,slug,description,template,active,sort_order,created) VALUES (?,?,?,?,?,?,?,datetime('now'))", (str(__import__('uuid').uuid4()), name, slug, desc, template, active, sort) ) conn.commit() print(f"Seeded {len(defaults)} default prompts") def _seed_pipeline_prompts(conn): """Seed pipeline stage prompts if not present.""" pipeline_defaults = [ ("Pipeline: Körper-Analyse (JSON)", "pipeline_body", "⚠️ JSON-Output – Stufe 1 der mehrstufigen Analyse. Format muss erhalten bleiben!", """Analysiere diese Körperdaten und gib NUR ein JSON-Objekt zurück (kein Text drumherum). Profil: {{name}} {{geschlecht}} {{height}}cm {{age}}J Gewicht: {{weight_trend}} Caliper: {{caliper_summary}} Umfänge: {{circ_summary}} Ziele: Gewicht {{goal_weight}}kg KF {{goal_bf_pct}}% Pflichtformat: {"gewicht_trend": "sinkend|steigend|stabil", "gewicht_delta_30d": , "kf_aktuell": , "kf_trend": "sinkend|steigend|stabil|unbekannt", "magermasse_delta": , "whr_status": "gut|grenzwertig|erhoeht|unbekannt", "whtr_status": "optimal|gut|erhoeht|unbekannt", "koerper_bewertung": "<1 Satz>", "koerper_auffaelligkeiten": "<1 Satz oder null>"}""", 1, 10), ("Pipeline: Ernährungs-Analyse (JSON)", "pipeline_nutrition", "⚠️ JSON-Output – Stufe 1 der mehrstufigen Analyse. Format muss erhalten bleiben!", """Analysiere diese Ernährungsdaten und gib NUR ein JSON-Objekt zurück. Ø {{kcal_avg}}kcal Ø {{protein_avg}}g Protein Ø {{fat_avg}}g Fett Ø {{carb_avg}}g KH ({{nutrition_days}} Tage) Protein-Ziel: {{protein_ziel_low}}–{{protein_ziel_high}}g/Tag Körpergewicht: {{weight_aktuell}}kg Pflichtformat: {"kcal_avg": , "protein_avg": , "protein_ziel_erreicht": , "protein_defizit_g": , "kalorienbilanz": "defizit|ausgeglichen|ueberschuss", "makro_bewertung": "gut|ausgewogen|proteinarm|kohlenhydratlastig|fettlastig", "ernaehrung_bewertung": "<1 Satz>", "ernaehrung_empfehlung": "<1 konkreter Tipp>"}""", 1, 11), ("Pipeline: Aktivitäts-Analyse (JSON)", "pipeline_activity", "⚠️ JSON-Output – Stufe 1 der mehrstufigen Analyse. Format muss erhalten bleiben!", """Analysiere diese Aktivitätsdaten und gib NUR ein JSON-Objekt zurück. {{activity_detail}} Pflichtformat: {"trainings_anzahl": , "kcal_gesamt": , "konsistenz": "hoch|mittel|niedrig", "haupttrainingsart": "", "aktivitaet_bewertung": "<1 Satz>", "aktivitaet_empfehlung": "<1 konkreter Tipp>"}""", 1, 12), ("Pipeline: Synthese (Gesamtanalyse)", "pipeline_synthesis", "Stufe 2 – Narrative Gesamtanalyse aus den JSON-Summaries der Stufe 1", """Du bist ein Gesundheits- und Fitnesscoach. Erstelle eine vollständige, personalisierte Analyse für {{name}} auf Deutsch (450–550 Wörter). DATENZUSAMMENFASSUNGEN AUS STUFE 1: Körper: {{stage1_body}} Ernährung: {{stage1_nutrition}} Aktivität: {{stage1_activity}} Protein-Ziel: {{protein_ziel_low}}–{{protein_ziel_high}}g/Tag Schreibe alle Abschnitte vollständig aus: ⚖️ **Gewichts- & Körperzusammensetzung** 🍽️ **Ernährungsanalyse** 🏋️ **Aktivität & Energiebilanz** 🔗 **Zusammenhänge** (Verbindungen zwischen Ernährung, Training, Körper) 💪 **3 Empfehlungen** (nummeriert, konkret, datenbasiert) Sachlich, motivierend, Zahlen zitieren, keine Diagnosen.""", 1, 13), ("Pipeline: Zielabgleich", "pipeline_goals", "Stufe 3 – Fortschrittsbewertung zu gesetzten Zielen (nur wenn Ziele definiert)", """Kurze Ziel-Bewertung für {{name}} (100–150 Wörter, Deutsch): Ziel-Gewicht: {{goal_weight}}kg | Ziel-KF: {{goal_bf_pct}}% Körper-Summary: {{stage1_body}} 🎯 **Zielfortschritt** Abstand zu den Zielen, realistisches Zeitfenster, 1–2 nächste konkrete Schritte.""", 1, 14), ] for name, slug, desc, template, active, sort in pipeline_defaults: conn.execute( "INSERT OR IGNORE INTO ai_prompts (id,name,slug,description,template,active,sort_order,created) VALUES (?,?,?,?,?,?,?,datetime('now'))", (str(__import__('uuid').uuid4()), name, slug, desc, template, active, sort) ) conn.commit() print(f"Seeded {len(pipeline_defaults)} pipeline prompts") init_db() # ── Helper: get profile_id from header ─────────────────────────────────────── def get_pid(x_profile_id: Optional[str] = Header(default=None)) -> str: """Get profile_id - from header for legacy endpoints.""" if x_profile_id: return x_profile_id with get_db() as conn: row = conn.execute("SELECT id FROM profiles ORDER BY created LIMIT 1").fetchone() if row: return row['id'] raise HTTPException(400, "Kein Profil gefunden") # ── Models ──────────────────────────────────────────────────────────────────── class ProfileCreate(BaseModel): name: str avatar_color: Optional[str] = '#1D9E75' sex: Optional[str] = 'm' dob: Optional[str] = None height: Optional[float] = 178 goal_weight: Optional[float] = None goal_bf_pct: Optional[float] = None class ProfileUpdate(BaseModel): name: Optional[str] = None avatar_color: Optional[str] = None sex: Optional[str] = None dob: Optional[str] = None height: Optional[float] = None goal_weight: Optional[float] = None goal_bf_pct: Optional[float] = None class WeightEntry(BaseModel): date: str; weight: float; note: Optional[str]=None class CircumferenceEntry(BaseModel): date: str c_neck: Optional[float]=None; c_chest: Optional[float]=None c_waist: Optional[float]=None; c_belly: Optional[float]=None c_hip: Optional[float]=None; c_thigh: Optional[float]=None c_calf: Optional[float]=None; c_arm: Optional[float]=None notes: Optional[str]=None; photo_id: Optional[str]=None class CaliperEntry(BaseModel): date: str; sf_method: Optional[str]='jackson3' sf_chest: Optional[float]=None; sf_axilla: Optional[float]=None sf_triceps: Optional[float]=None; sf_subscap: Optional[float]=None sf_suprailiac: Optional[float]=None; sf_abdomen: Optional[float]=None sf_thigh: Optional[float]=None; sf_calf_med: Optional[float]=None sf_lowerback: Optional[float]=None; sf_biceps: Optional[float]=None body_fat_pct: Optional[float]=None; lean_mass: Optional[float]=None fat_mass: Optional[float]=None; notes: Optional[str]=None class ActivityEntry(BaseModel): date: str; start_time: Optional[str]=None; end_time: Optional[str]=None activity_type: str; duration_min: Optional[float]=None kcal_active: Optional[float]=None; kcal_resting: Optional[float]=None hr_avg: Optional[float]=None; hr_max: Optional[float]=None distance_km: Optional[float]=None; rpe: Optional[int]=None source: Optional[str]='manual'; notes: Optional[str]=None class NutritionDay(BaseModel): date: str; kcal: Optional[float]=None; protein_g: Optional[float]=None fat_g: Optional[float]=None; carbs_g: Optional[float]=None # ── Profiles ────────────────────────────────────────────────────────────────── import hashlib, secrets from datetime import timedelta def hash_pin(pin: str) -> str: """Hash password with bcrypt. Falls back gracefully from legacy SHA256.""" return bcrypt.hashpw(pin.encode(), bcrypt.gensalt()).decode() def verify_pin(pin: str, stored_hash: str) -> bool: """Verify password - supports both bcrypt and legacy SHA256.""" if not stored_hash: return False # Detect bcrypt hash (starts with $2b$ or $2a$) if stored_hash.startswith('$2'): return bcrypt.checkpw(pin.encode(), stored_hash.encode()) # Legacy SHA256 fallback - auto-upgrade on successful login import hashlib return hashlib.sha256(pin.encode()).hexdigest() == stored_hash def make_token() -> str: return secrets.token_urlsafe(32) def get_session(token: str): if not token: return None with get_db() as conn: row = conn.execute( "SELECT s.*, p.role, p.name, p.ai_enabled, p.ai_limit_day, p.export_enabled " "FROM sessions s JOIN profiles p ON s.profile_id=p.id " "WHERE s.token=? AND s.expires_at > datetime('now')", (token,) ).fetchone() return r2d(row) def require_auth(x_auth_token: Optional[str]=Header(default=None)): session = get_session(x_auth_token) if not session: raise HTTPException(401, "Nicht eingeloggt") return session def require_admin(x_auth_token: Optional[str]=Header(default=None)): session = get_session(x_auth_token) if not session: raise HTTPException(401, "Nicht eingeloggt") if session['role'] != 'admin': raise HTTPException(403, "Nur für Admins") return session @app.get("/api/profiles") def list_profiles(session=Depends(require_auth)): with get_db() as conn: rows = conn.execute("SELECT * FROM profiles ORDER BY created").fetchall() return [r2d(r) for r in rows] @app.post("/api/profiles") def create_profile(p: ProfileCreate, session=Depends(require_auth)): pid = str(uuid.uuid4()) with get_db() as conn: conn.execute("""INSERT INTO profiles (id,name,avatar_color,sex,dob,height,goal_weight,goal_bf_pct,created,updated) VALUES (?,?,?,?,?,?,?,?,datetime('now'),datetime('now'))""", (pid,p.name,p.avatar_color,p.sex,p.dob,p.height,p.goal_weight,p.goal_bf_pct)) conn.commit() with get_db() as conn: return r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone()) @app.get("/api/profiles/{pid}") def get_profile(pid: str, session=Depends(require_auth)): with get_db() as conn: row = conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone() if not row: raise HTTPException(404, "Profil nicht gefunden") return r2d(row) @app.put("/api/profiles/{pid}") def update_profile(pid: str, p: ProfileUpdate, session=Depends(require_auth)): with get_db() as conn: data = {k:v for k,v in p.model_dump().items() if v is not None} data['updated'] = datetime.now().isoformat() conn.execute(f"UPDATE profiles SET {', '.join(f'{k}=?' for k in data)} WHERE id=?", list(data.values())+[pid]) conn.commit() return get_profile(pid) @app.delete("/api/profiles/{pid}") def delete_profile(pid: str, session=Depends(require_auth)): with get_db() as conn: count = conn.execute("SELECT COUNT(*) FROM profiles").fetchone()[0] if count <= 1: raise HTTPException(400, "Letztes Profil kann nicht gelöscht werden") for table in ['weight_log','circumference_log','caliper_log','nutrition_log','activity_log','ai_insights']: conn.execute(f"DELETE FROM {table} WHERE profile_id=?", (pid,)) conn.execute("DELETE FROM profiles WHERE id=?", (pid,)) conn.commit() return {"ok": True} @app.get("/api/profile") def get_active_profile(x_profile_id: Optional[str] = Header(default=None), session: dict = Depends(require_auth)): """Legacy endpoint – returns active profile.""" pid = get_pid(x_profile_id) return get_profile(pid) @app.put("/api/profile") def update_active_profile(p: ProfileUpdate, x_profile_id: Optional[str] = Header(default=None), session: dict = Depends(require_auth)): pid = get_pid(x_profile_id) return update_profile(pid, p) # ── Weight ──────────────────────────────────────────────────────────────────── @app.get("/api/weight") def list_weight(limit: int=365, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM weight_log WHERE profile_id=? ORDER BY date DESC LIMIT ?", (pid,limit)).fetchall()] @app.post("/api/weight") def upsert_weight(e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: ex = conn.execute("SELECT id FROM weight_log WHERE profile_id=? AND date=?", (pid,e.date)).fetchone() if ex: conn.execute("UPDATE weight_log SET weight=?,note=? WHERE id=?", (e.weight,e.note,ex['id'])) wid = ex['id'] else: wid = str(uuid.uuid4()) conn.execute("INSERT INTO weight_log (id,profile_id,date,weight,note,created) VALUES (?,?,?,?,?,datetime('now'))", (wid,pid,e.date,e.weight,e.note)) conn.commit() return {"id":wid,"date":e.date,"weight":e.weight} @app.put("/api/weight/{wid}") def update_weight(wid: str, e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("UPDATE weight_log SET date=?,weight=?,note=? WHERE id=? AND profile_id=?", (e.date,e.weight,e.note,wid,pid)); conn.commit() return {"id":wid} @app.delete("/api/weight/{wid}") def delete_weight(wid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("DELETE FROM weight_log WHERE id=? AND profile_id=?", (wid,pid)); conn.commit() return {"ok":True} @app.get("/api/weight/stats") def weight_stats(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: rows = conn.execute("SELECT date,weight FROM weight_log WHERE profile_id=? ORDER BY date DESC LIMIT 90", (pid,)).fetchall() if not rows: return {"count":0,"latest":None,"prev":None,"min":None,"max":None,"avg_7d":None} w=[r['weight'] for r in rows] return {"count":len(rows),"latest":{"date":rows[0]['date'],"weight":rows[0]['weight']}, "prev":{"date":rows[1]['date'],"weight":rows[1]['weight']} if len(rows)>1 else None, "min":min(w),"max":max(w),"avg_7d":round(sum(w[:7])/min(7,len(w)),2)} # ── Circumferences ──────────────────────────────────────────────────────────── @app.get("/api/circumferences") def list_circs(limit: int=100, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM circumference_log WHERE profile_id=? ORDER BY date DESC LIMIT ?", (pid,limit)).fetchall()] @app.post("/api/circumferences") def upsert_circ(e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: ex = conn.execute("SELECT id FROM circumference_log WHERE profile_id=? AND date=?", (pid,e.date)).fetchone() d = e.model_dump() if ex: eid = ex['id'] sets = ', '.join(f"{k}=?" for k in d if k!='date') conn.execute(f"UPDATE circumference_log SET {sets} WHERE id=?", [v for k,v in d.items() if k!='date']+[eid]) else: eid = str(uuid.uuid4()) conn.execute("""INSERT INTO circumference_log (id,profile_id,date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes,photo_id,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,datetime('now'))""", (eid,pid,d['date'],d['c_neck'],d['c_chest'],d['c_waist'],d['c_belly'], d['c_hip'],d['c_thigh'],d['c_calf'],d['c_arm'],d['notes'],d['photo_id'])) conn.commit() return {"id":eid,"date":e.date} @app.put("/api/circumferences/{eid}") def update_circ(eid: str, e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: d = e.model_dump() conn.execute(f"UPDATE circumference_log SET {', '.join(f'{k}=?' for k in d)} WHERE id=? AND profile_id=?", list(d.values())+[eid,pid]); conn.commit() return {"id":eid} @app.delete("/api/circumferences/{eid}") def delete_circ(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("DELETE FROM circumference_log WHERE id=? AND profile_id=?", (eid,pid)); conn.commit() return {"ok":True} # ── Caliper ─────────────────────────────────────────────────────────────────── @app.get("/api/caliper") def list_caliper(limit: int=100, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM caliper_log WHERE profile_id=? ORDER BY date DESC LIMIT ?", (pid,limit)).fetchall()] @app.post("/api/caliper") def upsert_caliper(e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: ex = conn.execute("SELECT id FROM caliper_log WHERE profile_id=? AND date=?", (pid,e.date)).fetchone() d = e.model_dump() if ex: eid = ex['id'] sets = ', '.join(f"{k}=?" for k in d if k!='date') conn.execute(f"UPDATE caliper_log SET {sets} WHERE id=?", [v for k,v in d.items() if k!='date']+[eid]) else: eid = str(uuid.uuid4()) conn.execute("""INSERT INTO caliper_log (id,profile_id,date,sf_method,sf_chest,sf_axilla,sf_triceps,sf_subscap,sf_suprailiac, sf_abdomen,sf_thigh,sf_calf_med,sf_lowerback,sf_biceps,body_fat_pct,lean_mass,fat_mass,notes,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,datetime('now'))""", (eid,pid,d['date'],d['sf_method'],d['sf_chest'],d['sf_axilla'],d['sf_triceps'], d['sf_subscap'],d['sf_suprailiac'],d['sf_abdomen'],d['sf_thigh'],d['sf_calf_med'], d['sf_lowerback'],d['sf_biceps'],d['body_fat_pct'],d['lean_mass'],d['fat_mass'],d['notes'])) conn.commit() return {"id":eid,"date":e.date} @app.put("/api/caliper/{eid}") def update_caliper(eid: str, e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: d = e.model_dump() conn.execute(f"UPDATE caliper_log SET {', '.join(f'{k}=?' for k in d)} WHERE id=? AND profile_id=?", list(d.values())+[eid,pid]); conn.commit() return {"id":eid} @app.delete("/api/caliper/{eid}") def delete_caliper(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("DELETE FROM caliper_log WHERE id=? AND profile_id=?", (eid,pid)); conn.commit() return {"ok":True} # ── Activity ────────────────────────────────────────────────────────────────── @app.get("/api/activity") def list_activity(limit: int=200, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM activity_log WHERE profile_id=? ORDER BY date DESC, start_time DESC LIMIT ?", (pid,limit)).fetchall()] @app.post("/api/activity") def create_activity(e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) eid = str(uuid.uuid4()) d = e.model_dump() with get_db() as conn: conn.execute("""INSERT INTO activity_log (id,profile_id,date,start_time,end_time,activity_type,duration_min,kcal_active,kcal_resting, hr_avg,hr_max,distance_km,rpe,source,notes,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,datetime('now'))""", (eid,pid,d['date'],d['start_time'],d['end_time'],d['activity_type'],d['duration_min'], d['kcal_active'],d['kcal_resting'],d['hr_avg'],d['hr_max'],d['distance_km'], d['rpe'],d['source'],d['notes'])) conn.commit() return {"id":eid,"date":e.date} @app.put("/api/activity/{eid}") def update_activity(eid: str, e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: d = e.model_dump() conn.execute(f"UPDATE activity_log SET {', '.join(f'{k}=?' for k in d)} WHERE id=? AND profile_id=?", list(d.values())+[eid,pid]); conn.commit() return {"id":eid} @app.delete("/api/activity/{eid}") def delete_activity(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("DELETE FROM activity_log WHERE id=? AND profile_id=?", (eid,pid)); conn.commit() return {"ok":True} @app.get("/api/activity/stats") def activity_stats(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: rows = [r2d(r) for r in conn.execute( "SELECT * FROM activity_log WHERE profile_id=? ORDER BY date DESC LIMIT 30", (pid,)).fetchall()] if not rows: return {"count":0,"total_kcal":0,"total_min":0,"by_type":{}} total_kcal=sum(r.get('kcal_active') or 0 for r in rows) total_min=sum(r.get('duration_min') or 0 for r in rows) by_type={} for r in rows: t=r['activity_type']; by_type.setdefault(t,{'count':0,'kcal':0,'min':0}) by_type[t]['count']+=1; by_type[t]['kcal']+=r.get('kcal_active') or 0 by_type[t]['min']+=r.get('duration_min') or 0 return {"count":len(rows),"total_kcal":round(total_kcal),"total_min":round(total_min),"by_type":by_type} @app.post("/api/activity/import-csv") async def import_activity_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None)): pid = get_pid(x_profile_id) raw = await file.read() try: text = raw.decode('utf-8') except: text = raw.decode('latin-1') if text.startswith('\ufeff'): text = text[1:] if not text.strip(): raise HTTPException(400,"Leere Datei") reader = csv.DictReader(io.StringIO(text)) inserted = skipped = 0 with get_db() as conn: for row in reader: wtype = row.get('Workout Type','').strip() start = row.get('Start','').strip() if not wtype or not start: continue try: date = start[:10] except: continue dur = row.get('Duration','').strip() duration_min = None if dur: try: p = dur.split(':') duration_min = round(int(p[0])*60+int(p[1])+int(p[2])/60,1) except: pass def kj(v): try: return round(float(v)/4.184) if v else None except: return None def tf(v): try: return round(float(v),1) if v else None except: return None try: conn.execute("""INSERT INTO activity_log (id,profile_id,date,start_time,end_time,activity_type,duration_min,kcal_active,kcal_resting, hr_avg,hr_max,distance_km,source,created) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,'apple_health',datetime('now'))""", (str(uuid.uuid4()),pid,date,start,row.get('End',''),wtype,duration_min, kj(row.get('Aktive Energie (kJ)','')),kj(row.get('Ruheeinträge (kJ)','')), tf(row.get('Durchschn. Herzfrequenz (count/min)','')), tf(row.get('Max. Herzfrequenz (count/min)','')), tf(row.get('Distanz (km)','')))) inserted+=1 except: skipped+=1 conn.commit() return {"inserted":inserted,"skipped":skipped,"message":f"{inserted} Trainings importiert"} # ── Photos ──────────────────────────────────────────────────────────────────── @app.post("/api/photos") async def upload_photo(file: UploadFile=File(...), date: str="", x_profile_id: Optional[str]=Header(default=None)): pid = get_pid(x_profile_id) fid = str(uuid.uuid4()) ext = Path(file.filename).suffix or '.jpg' path = PHOTOS_DIR / f"{fid}{ext}" async with aiofiles.open(path,'wb') as f: await f.write(await file.read()) with get_db() as conn: conn.execute("INSERT INTO photos (id,profile_id,date,path,created) VALUES (?,?,?,?,datetime('now'))", (fid,pid,date,str(path))); conn.commit() return {"id":fid,"date":date} @app.get("/api/photos/{fid}") def get_photo(fid: str): with get_db() as conn: row = conn.execute("SELECT path FROM photos WHERE id=?", (fid,)).fetchone() if not row: raise HTTPException(404) return FileResponse(row['path']) @app.get("/api/photos") def list_photos(x_profile_id: Optional[str]=Header(default=None)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM photos WHERE profile_id=? ORDER BY created DESC LIMIT 100", (pid,)).fetchall()] # ── Nutrition ───────────────────────────────────────────────────────────────── def _pf(s): try: return float(str(s).replace(',','.').strip()) except: return 0.0 @app.post("/api/nutrition/import-csv") async def import_nutrition_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None)): pid = get_pid(x_profile_id) raw = await file.read() try: text = raw.decode('utf-8') except: text = raw.decode('latin-1') if text.startswith('\ufeff'): text = text[1:] if not text.strip(): raise HTTPException(400,"Leere Datei") reader = csv.DictReader(io.StringIO(text), delimiter=';') days: dict = {} count = 0 for row in reader: rd = row.get('datum_tag_monat_jahr_stunde_minute','').strip().strip('"') if not rd: continue try: p = rd.split(' ')[0].split('.') iso = f"{p[2]}-{p[1]}-{p[0]}" except: continue days.setdefault(iso,{'kcal':0,'fat_g':0,'carbs_g':0,'protein_g':0}) days[iso]['kcal'] += _pf(row.get('kj',0))/4.184 days[iso]['fat_g'] += _pf(row.get('fett_g',0)) days[iso]['carbs_g'] += _pf(row.get('kh_g',0)) days[iso]['protein_g'] += _pf(row.get('protein_g',0)) count+=1 inserted=0 with get_db() as conn: for iso,vals in days.items(): kcal=round(vals['kcal'],1); fat=round(vals['fat_g'],1) carbs=round(vals['carbs_g'],1); prot=round(vals['protein_g'],1) if conn.execute("SELECT id FROM nutrition_log WHERE profile_id=? AND date=?",(pid,iso)).fetchone(): conn.execute("UPDATE nutrition_log SET kcal=?,protein_g=?,fat_g=?,carbs_g=? WHERE profile_id=? AND date=?", (kcal,prot,fat,carbs,pid,iso)) else: conn.execute("INSERT INTO nutrition_log (id,profile_id,date,kcal,protein_g,fat_g,carbs_g,source,created) VALUES (?,?,?,?,?,?,?,'csv',datetime('now'))", (str(uuid.uuid4()),pid,iso,kcal,prot,fat,carbs)) inserted+=1 conn.commit() return {"rows_parsed":count,"days_imported":inserted, "date_range":{"from":min(days) if days else None,"to":max(days) if days else None}} @app.get("/api/nutrition") def list_nutrition(limit: int=365, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM nutrition_log WHERE profile_id=? ORDER BY date DESC LIMIT ?", (pid,limit)).fetchall()] @app.get("/api/nutrition/correlations") def nutrition_correlations(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: nutr={r['date']:r2d(r) for r in conn.execute("SELECT * FROM nutrition_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()} wlog={r['date']:r['weight'] for r in conn.execute("SELECT date,weight FROM weight_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()} cals=sorted([r2d(r) for r in conn.execute("SELECT date,lean_mass,body_fat_pct FROM caliper_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()],key=lambda x:x['date']) all_dates=sorted(set(list(nutr)+list(wlog))) mi,last_cal,cal_by_date=0,{},{} for d in all_dates: while mi 1: ca_prev = calipers[1] ca_summary += f" | Vorher: {ca_prev.get('body_fat_pct')}% ({ca_prev.get('date')})" # Build compact circ summary ci_summary = "" if circs: c = circs[0] ci_summary = f"Taille: {c.get('c_waist')} · Hüfte: {c.get('c_hip')} · Bauch: {c.get('c_belly')} · Brust: {c.get('c_chest')} cm ({c.get('date')})" prompt = f"""Du bist ein Gesundheits- und Ernährungsanalyst. Erstelle eine strukturierte Analyse auf Deutsch (400-500 Wörter). PROFIL: {profile.get('name')} · {'männlich' if profile.get('sex')=='m' else 'weiblich'} · {profile.get('height')} cm Ziele: Gewicht {profile.get('goal_weight','–')} kg · KF {profile.get('goal_bf_pct','–')}% GEWICHT: {w_summary} CALIPER: {ca_summary} UMFÄNGE: {ci_summary} ERNÄHRUNG ({nutr_summary}): {nutr_detail} Protein-Ziel: {pt_low}–{pt_high}g/Tag AKTIVITÄT: {act_summary} Struktur (jeden Abschnitt vollständig ausschreiben): ⚖️ **Gewichts- & Körperzusammensetzung** 🍽️ **Ernährungsanalyse** 🏋️ **Aktivität & Energiebilanz** 🎯 **Zielabgleich** 💪 **Empfehlungen** (3 konkrete Punkte) Sachlich, motivierend, Zahlen zitieren, keine Diagnosen. Alle 5 Abschnitte vollständig ausschreiben.""" if OPENROUTER_KEY: import httpx resp=httpx.post("https://openrouter.ai/api/v1/chat/completions", headers={"Authorization":f"Bearer {OPENROUTER_KEY}"}, json={"model":OPENROUTER_MODEL,"messages":[{"role":"user","content":prompt}],"max_tokens":2500}) text=resp.json()['choices'][0]['message']['content'] elif ANTHROPIC_KEY: import anthropic client=anthropic.Anthropic(api_key=ANTHROPIC_KEY) msg=client.messages.create(model="claude-sonnet-4-20250514",max_tokens=2500, messages=[{"role":"user","content":prompt}]) text=msg.content[0].text else: raise HTTPException(400,"Kein API-Key") iid=str(uuid.uuid4()) with get_db() as conn: conn.execute("INSERT INTO ai_insights (id,profile_id,scope,content,created) VALUES (?,?,?,?,datetime('now'))", (iid,pid,'trend',text)); conn.commit() return {"id":iid,"content":text} except HTTPException: raise except Exception as e: raise HTTPException(500,f"AI-Fehler: {e}") @app.delete("/api/insights/{iid}") def delete_insight(iid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: conn.execute("DELETE FROM ai_insights WHERE id=? AND profile_id=?", (iid,pid)); conn.commit() return {"ok": True} @app.get("/api/insights/latest") def latest_insights_by_scope(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): """Return the most recent insight per scope/slug.""" pid = get_pid(x_profile_id) with get_db() as conn: rows = conn.execute( """SELECT * FROM ai_insights WHERE profile_id=? AND id IN ( SELECT id FROM ai_insights i2 WHERE i2.profile_id=ai_insights.profile_id AND i2.scope=ai_insights.scope ORDER BY created DESC LIMIT 1 ) ORDER BY scope""", (pid,) ).fetchall() return [r2d(r) for r in rows] @app.get("/api/insights") def list_insights(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: return [r2d(r) for r in conn.execute( "SELECT * FROM ai_insights WHERE profile_id=? ORDER BY created DESC LIMIT 20",(pid,)).fetchall()] # ── Export ──────────────────────────────────────────────────────────────────── import zipfile, json as json_lib def _get_export_data(pid: str, conn): profile = r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone()) or {} weights = [r2d(r) for r in conn.execute("SELECT date,weight,note,source FROM weight_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()] circs = [r2d(r) for r in conn.execute("SELECT date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes FROM circumference_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()] calipers = [r2d(r) for r in conn.execute("SELECT date,sf_method,body_fat_pct,lean_mass,fat_mass,notes FROM caliper_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()] nutr = [r2d(r) for r in conn.execute("SELECT date,kcal,protein_g,fat_g,carbs_g,source FROM nutrition_log WHERE profile_id=? ORDER BY date",(pid,)).fetchall()] activity = [r2d(r) for r in conn.execute("SELECT date,activity_type,duration_min,kcal_active,hr_avg,hr_max,distance_km,rpe,source,notes FROM activity_log WHERE profile_id=? ORDER BY date DESC",(pid,)).fetchall()] insights = [r2d(r) for r in conn.execute("SELECT created,scope,content FROM ai_insights WHERE profile_id=? ORDER BY created DESC",(pid,)).fetchall()] return profile, weights, circs, calipers, nutr, activity, insights def _make_csv(rows, fields=None): if not rows: return "" out = io.StringIO() f = fields or list(rows[0].keys()) wr = csv.DictWriter(out, fieldnames=f, extrasaction='ignore') wr.writeheader(); wr.writerows(rows) return out.getvalue() @app.get("/api/export/zip") def export_zip(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: profile, weights, circs, calipers, nutr, activity, insights = _get_export_data(pid, conn) name = profile.get('name','profil').lower().replace(' ','_') date = datetime.now().strftime('%Y%m%d') filename = f"bodytrack_{name}_{date}.zip" buf = io.BytesIO() with zipfile.ZipFile(buf, 'w', zipfile.ZIP_DEFLATED) as zf: # Profile JSON prof_export = {k:v for k,v in profile.items() if k not in ['id','photo_id']} zf.writestr("profil.json", json_lib.dumps(prof_export, ensure_ascii=False, indent=2)) # CSVs if weights: zf.writestr("gewicht.csv", _make_csv(weights)) if circs: zf.writestr("umfaenge.csv", _make_csv(circs)) if calipers: zf.writestr("caliper.csv", _make_csv(calipers)) if nutr: zf.writestr("ernaehrung.csv", _make_csv(nutr)) if activity: zf.writestr("aktivitaet.csv", _make_csv(activity)) # KI-Auswertungen als Klartext if insights: txt = "" for ins in insights: txt += f"{'='*60}\n" txt += f"Datum: {ins['created'][:16]}\n" txt += f"{'='*60}\n" txt += ins['content'] + "\n\n" zf.writestr("ki_auswertungen.txt", txt) buf.seek(0) return StreamingResponse(iter([buf.read()]), media_type="application/zip", headers={"Content-Disposition": f"attachment; filename={filename}"}) @app.get("/api/export/json") def export_json(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): pid = get_pid(x_profile_id) with get_db() as conn: profile, weights, circs, calipers, nutr, activity, insights = _get_export_data(pid, conn) name = profile.get('name','profil').lower().replace(' ','_') date = datetime.now().strftime('%Y%m%d') filename = f"bodytrack_{name}_{date}.json" data = { "export_version": "1.0", "exported_at": datetime.now().isoformat(), "profile": {k:v for k,v in profile.items() if k not in ['id','photo_id']}, "gewicht": weights, "umfaenge": circs, "caliper": calipers, "ernaehrung": nutr, "aktivitaet": activity, "ki_auswertungen": [{"datum":i['created'],"inhalt":i['content']} for i in insights], } return StreamingResponse( iter([json_lib.dumps(data, ensure_ascii=False, indent=2)]), media_type="application/json", headers={"Content-Disposition": f"attachment; filename={filename}"} ) @app.get("/api/export/csv") def export_csv(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): """Legacy single-file CSV export.""" pid = get_pid(x_profile_id) with get_db() as conn: profile, weights, circs, calipers, nutr, activity, _ = _get_export_data(pid, conn) out = io.StringIO() for label, rows in [("GEWICHT",weights),("UMFAENGE",circs),("CALIPER",calipers),("ERNAEHRUNG",nutr),("AKTIVITAET",activity)]: out.write(f"=== {label} ===\n") if rows: out.write(_make_csv(rows)) out.write("\n") out.seek(0) name = profile.get('name','export').lower().replace(' ','_') return StreamingResponse(iter([out.getvalue()]), media_type="text/csv", headers={"Content-Disposition": f"attachment; filename=bodytrack_{name}.csv"}) # ── Routes: AI Prompts ──────────────────────────────────────────────────────── class PromptUpdate(BaseModel): name: Optional[str] = None description: Optional[str] = None template: Optional[str] = None active: Optional[int] = None sort_order: Optional[int] = None @app.get("/api/prompts") def list_prompts(session=Depends(require_auth)): with get_db() as conn: rows = conn.execute("SELECT * FROM ai_prompts ORDER BY sort_order, name").fetchall() return [r2d(r) for r in rows] @app.put("/api/prompts/{pid}") def update_prompt(pid: str, p: PromptUpdate, x_auth_token: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): require_admin(x_auth_token) with get_db() as conn: data = {k:v for k,v in p.model_dump().items() if v is not None} if not data: return {"ok": True} conn.execute(f"UPDATE ai_prompts SET {', '.join(f'{k}=?' for k in data)} WHERE id=?", list(data.values())+[pid]) conn.commit() return {"ok": True} @app.post("/api/prompts/{pid}/reset") def reset_prompt(pid: str, session=Depends(require_auth)): """Reset prompt to default by re-seeding.""" with get_db() as conn: _seed_prompts(conn) return {"ok": True} @app.post("/api/insights/run/{slug}") def run_insight(slug: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): """Run a specific prompt by slug.""" pid = get_pid(x_profile_id) check_ai_limit(pid) with get_db() as conn: prompt_row = conn.execute("SELECT * FROM ai_prompts WHERE slug=?", (slug,)).fetchone() if not prompt_row: raise HTTPException(404, f"Prompt '{slug}' nicht gefunden") template = prompt_row['template'] profile = r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone()) or {} weights = [r2d(r) for r in conn.execute("SELECT * FROM weight_log WHERE profile_id=? ORDER BY date DESC LIMIT 14",(pid,)).fetchall()] calipers = [r2d(r) for r in conn.execute("SELECT * FROM caliper_log WHERE profile_id=? ORDER BY date DESC LIMIT 5",(pid,)).fetchall()] circs = [r2d(r) for r in conn.execute("SELECT * FROM circumference_log WHERE profile_id=? ORDER BY date DESC LIMIT 5",(pid,)).fetchall()] nutrition = [r2d(r) for r in conn.execute("SELECT * FROM nutrition_log WHERE profile_id=? ORDER BY date DESC LIMIT 14",(pid,)).fetchall()] activities= [r2d(r) for r in conn.execute("SELECT * FROM activity_log WHERE profile_id=? ORDER BY date DESC LIMIT 20",(pid,)).fetchall()] # Build template variables vars = { "name": profile.get('name',''), "geschlecht": 'männlich' if profile.get('sex')=='m' else 'weiblich', "height": str(profile.get('height','')), "goal_weight": str(profile.get('goal_weight','–')), "goal_bf_pct": str(profile.get('goal_bf_pct','–')), "weight_aktuell": str(weights[0]['weight']) if weights else '–', "kf_aktuell": str(calipers[0].get('body_fat_pct','–')) if calipers else '–', } # Weight trend if weights: w_first=weights[-1]; w_last=weights[0] diff=round(w_last['weight']-w_first['weight'],1) vars["weight_trend"] = f"{w_first['date']}: {w_first['weight']}kg → {w_last['date']}: {w_last['weight']}kg (Δ{diff:+.1f}kg über {len(weights)} Einträge)" else: vars["weight_trend"] = "Keine Daten" # Caliper if calipers: ca=calipers[0] vars["caliper_summary"] = f"KF: {ca.get('body_fat_pct')}% · Mager: {ca.get('lean_mass')}kg · Fett: {ca.get('fat_mass')}kg ({ca.get('date')})" if len(calipers)>1: prev=calipers[1]; diff=round((ca.get('body_fat_pct') or 0)-(prev.get('body_fat_pct') or 0),1) vars["caliper_summary"] += f" | Vorher: {prev.get('body_fat_pct')}% (Δ{diff:+.1f}%)" else: vars["caliper_summary"] = "Keine Messungen" # Circumferences if circs: c=circs[0] parts=[f"{k.replace('c_','').capitalize()}: {c[k]}cm" for k in ['c_waist','c_hip','c_belly','c_chest','c_arm'] if c.get(k)] vars["circ_summary"] = f"{' · '.join(parts)} ({c.get('date')})" else: vars["circ_summary"] = "Keine Messungen" # Nutrition if nutrition: avg_kcal=round(sum(n['kcal'] or 0 for n in nutrition)/len(nutrition)) avg_prot=round(sum(n['protein_g'] or 0 for n in nutrition)/len(nutrition),1) avg_fat =round(sum(n['fat_g'] or 0 for n in nutrition)/len(nutrition),1) avg_carb=round(sum(n['carbs_g'] or 0 for n in nutrition)/len(nutrition),1) vars["nutrition_summary"] = f"{len(nutrition)} Tage · Ø {avg_kcal} kcal · Ø {avg_prot}g Protein · Ø {avg_fat}g Fett · Ø {avg_carb}g KH" vars["nutrition_detail"] = str([{"date":n['date'],"kcal":round(n['kcal'] or 0),"protein_g":n['protein_g'],"fat_g":n['fat_g'],"carbs_g":n['carbs_g']} for n in nutrition]) latest_w = weights[0]['weight'] if weights else 80 vars["protein_ziel_low"] = str(round(latest_w*1.6,0)) vars["protein_ziel_high"] = str(round(latest_w*2.2,0)) else: vars["nutrition_summary"] = "Keine Ernährungsdaten" vars["nutrition_detail"] = "[]" vars["protein_ziel_low"] = "–" vars["protein_ziel_high"] = "–" # Activity if activities: total_kcal=round(sum(a.get('kcal_active') or 0 for a in activities)) total_min=round(sum(a.get('duration_min') or 0 for a in activities)) types={} for a in activities: t=a['activity_type']; types.setdefault(t,0); types[t]+=1 vars["activity_summary"] = f"{len(activities)} Trainings · {total_kcal} kcal · {total_min} Min · {types}" vars["activity_kcal_summary"] = f"Ø {round(total_kcal/len(activities))} kcal/Training · {total_kcal} kcal gesamt ({len(activities)} Einheiten)" vars["activity_detail"] = str([{"date":a['date'],"type":a['activity_type'],"min":a.get('duration_min'),"kcal":a.get('kcal_active'),"hr_avg":round(a['hr_avg']) if a.get('hr_avg') else None} for a in activities]) else: vars["activity_summary"] = "Keine Aktivitätsdaten" vars["activity_kcal_summary"] = "Keine Daten" vars["activity_detail"] = "[]" # Fill template prompt = template for key, val in vars.items(): prompt = prompt.replace(f"{{{{{key}}}}}", val) try: if OPENROUTER_KEY: import httpx resp=httpx.post("https://openrouter.ai/api/v1/chat/completions", headers={"Authorization":f"Bearer {OPENROUTER_KEY}"}, json={"model":OPENROUTER_MODEL,"messages":[{"role":"user","content":prompt}],"max_tokens":2500}, timeout=60) text=resp.json()['choices'][0]['message']['content'] elif ANTHROPIC_KEY: import anthropic client=anthropic.Anthropic(api_key=ANTHROPIC_KEY) msg=client.messages.create(model="claude-sonnet-4-20250514",max_tokens=2500, messages=[{"role":"user","content":prompt}]) text=msg.content[0].text else: raise HTTPException(400,"Kein API-Key") iid=str(uuid.uuid4()) with get_db() as conn: conn.execute("INSERT INTO ai_insights (id,profile_id,scope,content,created) VALUES (?,?,?,?,datetime('now'))", (iid,pid,slug,text)); conn.commit() return {"id":iid,"content":text,"scope":slug} except HTTPException: raise except Exception as e: raise HTTPException(500,f"AI-Fehler: {e}") # Keep legacy endpoint working @app.post("/api/insights/trend") def insight_trend_legacy(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): return run_insight("gesamt", x_profile_id) # ── Multi-Stage AI Pipeline ─────────────────────────────────────────────────── import concurrent.futures def _call_ai(prompt: str, max_tokens: int = 600, json_mode: bool = False) -> str: """Single AI call – used by pipeline stages.""" system = "Du bist ein präziser Datenanalyst. " + ( "Antworte NUR mit validem JSON, ohne Kommentare oder Markdown-Backticks." if json_mode else "Antworte auf Deutsch, sachlich und motivierend." ) if OPENROUTER_KEY: import httpx, json as json_lib resp = httpx.post("https://openrouter.ai/api/v1/chat/completions", headers={"Authorization": f"Bearer {OPENROUTER_KEY}"}, json={"model": OPENROUTER_MODEL, "messages": [{"role":"system","content":system},{"role":"user","content":prompt}], "max_tokens": max_tokens}, timeout=60) return resp.json()['choices'][0]['message']['content'] elif ANTHROPIC_KEY: import anthropic client = anthropic.Anthropic(api_key=ANTHROPIC_KEY) msg = client.messages.create( model="claude-sonnet-4-20250514", max_tokens=max_tokens, system=system, messages=[{"role":"user","content":prompt}]) return msg.content[0].text raise HTTPException(400, "Kein API-Key konfiguriert") @app.post("/api/insights/pipeline") def run_pipeline(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)): # Pipeline counts as 3 calls (stage1 x3 + stage2 + stage3 = 5, but we count 3) """ 3-stage parallel AI pipeline: Stage 1 (parallel): body_summary, nutrition_summary, activity_summary → compact JSON Stage 2 (sequential): full narrative synthesis from summaries Stage 3 (sequential): goal progress assessment Final result saved as scope='pipeline' """ pid = get_pid(x_profile_id) check_ai_limit(pid) # counts as 1 (pipeline run) import json as json_lib with get_db() as conn: profile = r2d(conn.execute("SELECT * FROM profiles WHERE id=?",(pid,)).fetchone()) or {} weights = [r2d(r) for r in conn.execute("SELECT date,weight FROM weight_log WHERE profile_id=? ORDER BY date DESC LIMIT 14",(pid,)).fetchall()] calipers = [r2d(r) for r in conn.execute("SELECT date,body_fat_pct,lean_mass,fat_mass FROM caliper_log WHERE profile_id=? ORDER BY date DESC LIMIT 5",(pid,)).fetchall()] circs = [r2d(r) for r in conn.execute("SELECT date,c_waist,c_hip,c_belly FROM circumference_log WHERE profile_id=? ORDER BY date DESC LIMIT 5",(pid,)).fetchall()] nutrition = [r2d(r) for r in conn.execute("SELECT date,kcal,protein_g,fat_g,carbs_g FROM nutrition_log WHERE profile_id=? ORDER BY date DESC LIMIT 14",(pid,)).fetchall()] activities= [r2d(r) for r in conn.execute("SELECT date,activity_type,duration_min,kcal_active,hr_avg FROM activity_log WHERE profile_id=? ORDER BY date DESC LIMIT 20",(pid,)).fetchall()] name = profile.get('name','') sex = profile.get('sex','m') height = profile.get('height',178) age = round((datetime.now()-datetime.strptime(profile['dob'],'%Y-%m-%d')).days/365.25) if profile.get('dob') else 30 g_weight= profile.get('goal_weight','–') g_bf = profile.get('goal_bf_pct','–') # Weight summary w_trend = "" if weights: first=weights[-1]; last=weights[0] diff=round(last['weight']-first['weight'],1) w_trend=f"{first['date']}: {first['weight']}kg → {last['date']}: {last['weight']}kg (Δ{diff:+.1f}kg)" # Caliper summary ca_sum = "" if calipers: c=calipers[0] ca_sum=f"KF {c.get('body_fat_pct')}% Mager {c.get('lean_mass')}kg Fett {c.get('fat_mass')}kg ({c.get('date')})" # Circ summary ci_sum = "" if circs: c=circs[0] ci_sum=f"Taille {c.get('c_waist')}cm Hüfte {c.get('c_hip')}cm Bauch {c.get('c_belly')}cm" # Nutrition summary avg_kcal=avg_prot=avg_fat=avg_carb=None if nutrition: n=len(nutrition) avg_kcal=round(sum(x['kcal'] or 0 for x in nutrition)/n) avg_prot=round(sum(x['protein_g'] or 0 for x in nutrition)/n,1) avg_fat =round(sum(x['fat_g'] or 0 for x in nutrition)/n,1) avg_carb=round(sum(x['carbs_g'] or 0 for x in nutrition)/n,1) pt_low=round((weights[0]['weight'] if weights else 80)*1.6) pt_high=round((weights[0]['weight'] if weights else 80)*2.2) # Activity summary act_sum="" if activities: total_kcal=round(sum(a.get('kcal_active') or 0 for a in activities)) total_min=round(sum(a.get('duration_min') or 0 for a in activities)) types={} for a in activities: t=a['activity_type']; types.setdefault(t,0); types[t]+=1 act_sum=f"{len(activities)} Einheiten {total_kcal}kcal {total_min}min Typen:{types}" # ── Load pipeline prompts from DB ───────────────────────────────────── with get_db() as conn: p_rows = {r['slug']:r['template'] for r in conn.execute( "SELECT slug,template FROM ai_prompts WHERE slug LIKE 'pipeline_%'" ).fetchall()} def fill(template, extra={}): """Fill template variables.""" vars = { 'name': name, 'geschlecht': 'männlich' if sex=='m' else 'weiblich', 'height': str(height), 'age': str(age), 'weight_trend': w_trend or 'Keine Daten', 'caliper_summary':ca_sum or 'Keine Daten', 'circ_summary': ci_sum or 'Keine Daten', 'goal_weight': str(g_weight), 'goal_bf_pct': str(g_bf), 'kcal_avg': str(avg_kcal or '–'), 'protein_avg': str(avg_prot or '–'), 'fat_avg': str(avg_fat or '–'), 'carb_avg': str(avg_carb or '–'), 'nutrition_days': str(len(nutrition)), 'weight_aktuell': str(weights[0]['weight'] if weights else '–'), 'protein_ziel_low': str(pt_low), 'protein_ziel_high': str(pt_high), 'activity_detail': act_sum or 'Keine Daten', } vars.update(extra) result = template for k, v in vars.items(): result = result.replace(f'{{{{{k}}}}}', v) return result # ── Stage 1: Three parallel JSON analysis calls ──────────────────────── default_body = f"""Analysiere diese Körperdaten und gib NUR ein JSON-Objekt zurück. Profil: {sex} {height}cm {age}J Gewicht: {w_trend} Caliper: {ca_sum} Umfänge: {ci_sum} Ziele: {g_weight}kg KF {g_bf}% {{"gewicht_trend":"sinkend|steigend|stabil","gewicht_delta_30d":,"kf_aktuell":,"kf_trend":"sinkend|steigend|stabil","whr_status":"gut|grenzwertig|erhoeht","koerper_bewertung":"<1 Satz>","koerper_auffaelligkeiten":"<1 Satz>"}}""" default_nutr = f"""Analysiere Ernährungsdaten und gib NUR JSON zurück. Ø {avg_kcal}kcal {avg_prot}g P {avg_fat}g F {avg_carb}g KH ({len(nutrition)} Tage) Protein-Ziel {pt_low}–{pt_high}g {{"kcal_avg":{avg_kcal},"protein_avg":{avg_prot},"protein_ziel_erreicht":,"kalorienbilanz":"defizit|ausgeglichen|ueberschuss","ernaehrung_bewertung":"<1 Satz>","ernaehrung_empfehlung":"<1 Tipp>"}}""" if nutrition else '{{"keine_daten":true}}' default_act = f"""Analysiere Aktivitätsdaten und gib NUR JSON zurück. {act_sum} {{"trainings_anzahl":,"kcal_gesamt":,"konsistenz":"hoch|mittel|niedrig","haupttrainingsart":"","aktivitaet_bewertung":"<1 Satz>","aktivitaet_empfehlung":"<1 Tipp>"}}""" if activities else '{{"keine_daten":true}}' prompt_body = fill(p_rows.get('pipeline_body', default_body)) prompt_nutr = fill(p_rows.get('pipeline_nutrition', default_nutr)) prompt_act = fill(p_rows.get('pipeline_activity', default_act)) # Run stage 1 in parallel try: with concurrent.futures.ThreadPoolExecutor(max_workers=3) as ex: f_body = ex.submit(_call_ai, prompt_body, 400, True) f_nutr = ex.submit(_call_ai, prompt_nutr, 300, True) f_act = ex.submit(_call_ai, prompt_act, 250, True) body_json = f_body.result(timeout=45) nutr_json = f_nutr.result(timeout=45) act_json = f_act.result(timeout=45) except Exception as e: raise HTTPException(500, f"Stage-1-Fehler: {e}") # Clean JSON (remove potential markdown fences) def clean_json(s): s = s.strip() if s.startswith("```"): s = s.split("\n",1)[1].rsplit("```",1)[0] return s # ── Stage 2: Narrative synthesis ────────────────────────────────────── default_synthesis = f"""Du bist Gesundheitscoach. Erstelle vollständige Analyse für {name} auf Deutsch (450–550 Wörter). Körper: {clean_json(body_json)} Ernährung: {clean_json(nutr_json)} Aktivität: {clean_json(act_json)} Protein-Ziel: {pt_low}–{pt_high}g/Tag ⚖️ **Gewichts- & Körperzusammensetzung** 🍽️ **Ernährungsanalyse** 🏋️ **Aktivität & Energiebilanz** 🔗 **Zusammenhänge** 💪 **3 Empfehlungen** Sachlich, motivierend, Zahlen zitieren, keine Diagnosen.""" synth_template = p_rows.get('pipeline_synthesis', default_synthesis) prompt_synthesis = fill(synth_template, { 'stage1_body': clean_json(body_json), 'stage1_nutrition': clean_json(nutr_json), 'stage1_activity': clean_json(act_json), }) try: synthesis = _call_ai(prompt_synthesis, 2000, False) except Exception as e: raise HTTPException(500, f"Stage-2-Fehler: {e}") # ── Stage 3: Goal assessment (only if goals defined) ─────────────────── goal_text = "" if g_weight != '–' or g_bf != '–': default_goals = f"""Ziel-Bewertung für {name} (100–150 Wörter): Ziel: {g_weight}kg KF {g_bf}% | Körper: {clean_json(body_json)} 🎯 **Zielfortschritt** Abstand, Zeitfenster, nächste Schritte.""" goals_template = p_rows.get('pipeline_goals', default_goals) prompt_goals = fill(goals_template, { 'stage1_body': clean_json(body_json), }) try: goal_text = "\n\n" + _call_ai(prompt_goals, 400, False) except Exception as e: goal_text = f"\n\n🎯 **Zielfortschritt**\n(Fehler: {e})" final_text = synthesis + goal_text # Save result iid = str(uuid.uuid4()) with get_db() as conn: conn.execute("INSERT INTO ai_insights (id,profile_id,scope,content,created) VALUES (?,?,?,?,datetime('now'))", (iid, pid, 'pipeline', final_text)) conn.commit() return { "id": iid, "content": final_text, "scope": "pipeline", "stage1": { "body": clean_json(body_json), "nutrition": clean_json(nutr_json), "activity": clean_json(act_json), } } # ── Auth ────────────────────────────────────────────────────────────────────── class LoginRequest(BaseModel): email: Optional[str] = None name: Optional[str] = None profile_id: Optional[str] = None pin: Optional[str] = None class SetupRequest(BaseModel): name: str pin: str auth_type: Optional[str] = 'pin' session_days: Optional[int] = 30 avatar_color: Optional[str] = '#1D9E75' sex: Optional[str] = 'm' height: Optional[float] = 178 class ProfilePermissions(BaseModel): role: Optional[str] = None ai_enabled: Optional[int] = None ai_limit_day: Optional[int] = None export_enabled: Optional[int] = None auth_type: Optional[str] = None session_days: Optional[int] = None @app.get("/api/auth/status") def auth_status(): """Check if any profiles exist (for first-run setup detection).""" with get_db() as conn: count = conn.execute("SELECT COUNT(*) FROM profiles").fetchone()[0] has_pin = conn.execute("SELECT COUNT(*) FROM profiles WHERE pin_hash IS NOT NULL").fetchone()[0] return {"needs_setup": count == 0, "has_auth": has_pin > 0, "profile_count": count} @app.post("/api/auth/setup") def first_setup(req: SetupRequest): """First-run: create admin profile.""" with get_db() as conn: count = conn.execute("SELECT COUNT(*) FROM profiles").fetchone()[0] # Allow setup if no profiles OR no profile has a PIN yet has_pin = conn.execute("SELECT COUNT(*) FROM profiles WHERE pin_hash IS NOT NULL").fetchone()[0] if count > 0 and has_pin > 0: raise HTTPException(400, "Setup bereits abgeschlossen") pid = str(uuid.uuid4()) conn.execute("""INSERT INTO profiles (id,name,avatar_color,sex,height,role,pin_hash,auth_type,session_days, ai_enabled,export_enabled,created,updated) VALUES (?,?,?,?,?,'admin',?,?,?,1,1,datetime('now'),datetime('now'))""", (pid, req.name, req.avatar_color, req.sex, req.height, hash_pin(req.pin), req.auth_type, req.session_days)) # Create session token = make_token() expires = (datetime.now()+timedelta(days=req.session_days)).isoformat() conn.execute("INSERT INTO sessions (token,profile_id,expires_at) VALUES (?,?,?)", (token, pid, expires)) conn.commit() return {"token": token, "profile_id": pid, "role": "admin"} @app.post("/api/auth/login") @limiter.limit("5/minute") def login(request: Request, req: LoginRequest): """Login via email or username + password. Auto-upgrades SHA256 to bcrypt.""" with get_db() as conn: # Support login via email OR name profile = None if req.email: profile = r2d(conn.execute( "SELECT * FROM profiles WHERE LOWER(email)=?", (req.email.strip().lower(),)).fetchone()) if not profile and req.name: profile = r2d(conn.execute( "SELECT * FROM profiles WHERE LOWER(name)=?", (req.name.strip().lower(),)).fetchone()) # Legacy: support profile_id for self-hosted if not profile and req.profile_id: profile = r2d(conn.execute( "SELECT * FROM profiles WHERE id=?", (req.profile_id,)).fetchone()) if not profile: raise HTTPException(401, "Ungültige E-Mail oder Passwort") # Verify password if not profile.get('pin_hash'): # No password set - allow for legacy/setup pass elif not verify_pin(req.pin or "", profile['pin_hash']): raise HTTPException(401, "Ungültige E-Mail oder Passwort") else: # Auto-upgrade SHA256 → bcrypt on successful login if profile['pin_hash'] and not profile['pin_hash'].startswith('$2'): new_hash = hash_pin(req.pin) conn.execute("UPDATE profiles SET pin_hash=? WHERE id=?", (new_hash, profile['id'])) conn.commit() # Create session token = make_token() days = profile.get('session_days') or 30 expires = (datetime.now() + timedelta(days=days)).isoformat() conn.execute( "INSERT INTO sessions (token, profile_id, expires_at, created) " "VALUES (?, ?, ?, datetime('now'))", (token, profile['id'], expires)) conn.commit() return { "token": token, "profile_id": profile['id'], "name": profile['name'], "role": profile['role'], "expires_at": expires } @app.post("/api/auth/logout") def logout(x_auth_token: Optional[str]=Header(default=None)): if x_auth_token: with get_db() as conn: conn.execute("DELETE FROM sessions WHERE token=?", (x_auth_token,)); conn.commit() return {"ok": True} @app.get("/api/auth/me") def get_me(session=Depends(require_auth)): with get_db() as conn: profile = r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (session['profile_id'],)).fetchone()) return {**profile, "role": session['role']} @app.put("/api/auth/pin") def change_pin(data: dict, session=Depends(require_auth)): new_pin = data.get('pin','') if len(new_pin) < 4: raise HTTPException(400, "PIN mind. 4 Zeichen") with get_db() as conn: conn.execute("UPDATE profiles SET pin_hash=? WHERE id=?", (hash_pin(new_pin), session['profile_id'])); conn.commit() return {"ok": True} # ── Admin: Profile permissions ──────────────────────────────────────────────── @app.put("/api/admin/profiles/{pid}/permissions") def set_permissions(pid: str, p: ProfilePermissions, session=Depends(require_admin)): with get_db() as conn: data = {k:v for k,v in p.model_dump().items() if v is not None} if not data: return {"ok": True} conn.execute(f"UPDATE profiles SET {', '.join(f'{k}=?' for k in data)} WHERE id=?", list(data.values())+[pid]) conn.commit() return {"ok": True} @app.get("/api/admin/profiles") def admin_list_profiles(session=Depends(require_admin)): with get_db() as conn: rows = conn.execute("SELECT * FROM profiles ORDER BY created").fetchall() # Include AI usage today today = datetime.now().strftime('%Y-%m-%d') usage = {r['profile_id']:r['call_count'] for r in conn.execute( "SELECT profile_id, call_count FROM ai_usage WHERE date=?", (today,)).fetchall()} result = [] for r in rows: d = r2d(r) d['ai_calls_today'] = usage.get(d['id'], 0) result.append(d) return result @app.delete("/api/admin/profiles/{pid}") def admin_delete_profile(pid: str, session=Depends(require_admin)): if pid == session['profile_id']: raise HTTPException(400, "Eigenes Profil kann nicht gelöscht werden") with get_db() as conn: target = r2d(conn.execute("SELECT role FROM profiles WHERE id=?", (pid,)).fetchone()) if target and target['role'] == 'admin': admin_count = conn.execute("SELECT COUNT(*) FROM profiles WHERE role='admin'").fetchone()[0] if admin_count <= 1: raise HTTPException(400, "Letzter Admin kann nicht gelöscht werden. Erst einen anderen Admin ernennen.") with get_db() as conn: for table in ['weight_log','circumference_log','caliper_log', 'nutrition_log','activity_log','ai_insights','sessions']: conn.execute(f"DELETE FROM {table} WHERE profile_id=?", (pid,)) conn.execute("DELETE FROM profiles WHERE id=?", (pid,)) conn.commit() return {"ok": True} @app.post("/api/admin/profiles") def admin_create_profile(p: SetupRequest, session=Depends(require_admin)): pid = str(uuid.uuid4()) with get_db() as conn: conn.execute("""INSERT INTO profiles (id,name,avatar_color,sex,height,role,pin_hash,auth_type,session_days, ai_enabled,export_enabled,created,updated) VALUES (?,?,?,?,?,'user',?,?,?,1,1,datetime('now'),datetime('now'))""", (pid, p.name, p.avatar_color, p.sex, p.height, hash_pin(p.pin), p.auth_type, p.session_days)) conn.commit() with get_db() as conn: return r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone()) # ── AI Usage tracking ───────────────────────────────────────────────────────── def check_ai_limit(pid: str): """Check and increment AI usage. Raises 429 if limit exceeded.""" with get_db() as conn: profile = r2d(conn.execute("SELECT ai_enabled, ai_limit_day, role FROM profiles WHERE id=?", (pid,)).fetchone()) if not profile: raise HTTPException(404) if not profile.get('ai_enabled'): raise HTTPException(403, "KI-Zugang für dieses Profil nicht aktiviert") today = datetime.now().strftime('%Y-%m-%d') limit = profile.get('ai_limit_day') if limit: usage_row = conn.execute("SELECT call_count FROM ai_usage WHERE profile_id=? AND date=?", (pid,today)).fetchone() count = usage_row['call_count'] if usage_row else 0 if count >= limit: raise HTTPException(429, f"Tages-Limit von {limit} KI-Calls erreicht") # Increment conn.execute("""INSERT INTO ai_usage (id,profile_id,date,call_count) VALUES (?,?,?,1) ON CONFLICT(profile_id,date) DO UPDATE SET call_count=call_count+1""", (str(uuid.uuid4()), pid, today)) conn.commit() # Admin email update for profiles @app.put("/api/admin/profiles/{pid}/email") def admin_set_email(pid: str, data: dict, session=Depends(require_admin)): email = data.get('email','').strip() with get_db() as conn: conn.execute("UPDATE profiles SET email=? WHERE id=?", (email or None, pid)) conn.commit() return {"ok": True} # Admin PIN reset for other profiles @app.put("/api/admin/profiles/{pid}/pin") def admin_set_pin(pid: str, data: dict, session=Depends(require_admin)): new_pin = data.get('pin','') if len(new_pin) < 4: raise HTTPException(400, "PIN mind. 4 Zeichen") with get_db() as conn: conn.execute("UPDATE profiles SET pin_hash=? WHERE id=?", (hash_pin(new_pin), pid)) conn.commit() return {"ok": True} # ── E-Mail Infrastructure ───────────────────────────────────────────────────── import smtplib from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart SMTP_HOST = os.getenv('SMTP_HOST', '') SMTP_PORT = int(os.getenv('SMTP_PORT', '587')) SMTP_USER = os.getenv('SMTP_USER', '') SMTP_PASS = os.getenv('SMTP_PASS', '') SMTP_FROM = os.getenv('SMTP_FROM', SMTP_USER) APP_URL = os.getenv('APP_URL', 'http://localhost:3002') def send_email(to: str, subject: str, html: str, text: str = '') -> bool: """Send email via configured SMTP. Returns True on success.""" if not SMTP_HOST or not SMTP_USER: print(f"[EMAIL] SMTP not configured – would send to {to}: {subject}") return False try: msg = MIMEMultipart('alternative') msg['Subject'] = subject msg['From'] = f"Mitai Jinkendo <{SMTP_FROM}>" msg['To'] = to if text: msg.attach(MIMEText(text, 'plain', 'utf-8')) msg.attach(MIMEText(html, 'html', 'utf-8')) with smtplib.SMTP(SMTP_HOST, SMTP_PORT, timeout=15) as s: s.ehlo() s.starttls() s.login(SMTP_USER, SMTP_PASS) s.sendmail(SMTP_FROM, [to], msg.as_string()) print(f"[EMAIL] Sent to {to}: {subject}") return True except Exception as e: print(f"[EMAIL] Error sending to {to}: {e}") return False def email_html_wrapper(content_html: str, title: str) -> str: return f"""
{title}
{content_html}
""" # ── Password Recovery ───────────────────────────────────────────────────────── import random, string def generate_recovery_token() -> str: return ''.join(random.choices(string.ascii_letters + string.digits, k=32)) @app.post("/api/auth/forgot-password") @limiter.limit("3/minute") def forgot_password(request: Request, data: dict): """Send recovery email if profile has email configured.""" email = data.get('email','').strip().lower() if not email: raise HTTPException(400, "E-Mail erforderlich") with get_db() as conn: profile = conn.execute( "SELECT * FROM profiles WHERE LOWER(email)=?", (email,) ).fetchone() if not profile: # Don't reveal if email exists return {"ok": True, "message": "Falls ein Konto existiert, wurde eine E-Mail gesendet."} profile = r2d(profile) # Generate token, valid 1 hour token = generate_recovery_token() expires = (datetime.now()+timedelta(hours=1)).isoformat() conn.execute( "INSERT OR REPLACE INTO sessions (token, profile_id, expires_at, created) " "VALUES (?, ?, ?, datetime('now'))", (f"recovery_{token}", profile['id'], expires) ) conn.commit() reset_url = f"{APP_URL}/reset-password?token={token}" html = email_html_wrapper(f"""

Hallo {profile['name']},

du hast eine Passwort-Zurücksetzung für dein Mitai Jinkendo-Konto angefordert.

Passwort zurücksetzen

Dieser Link ist 1 Stunde gültig.
Falls du das nicht angefordert hast, ignoriere diese E-Mail.


Oder kopiere diesen Link:
{reset_url}

""", "Passwort zurücksetzen") sent = send_email(email, "Mitai Jinkendo – Passwort zurücksetzen", html) return {"ok": True, "message": "Falls ein Konto existiert, wurde eine E-Mail gesendet.", "sent": sent} @app.post("/api/auth/reset-password") @limiter.limit("3/minute") def reset_password(request: Request, data: dict): """Reset password using recovery token.""" token = data.get('token','') new_pin = data.get('pin','') if not token or len(new_pin) < 4: raise HTTPException(400, "Token und neues Passwort erforderlich") with get_db() as conn: session = conn.execute( "SELECT * FROM sessions WHERE token=? AND expires_at > datetime('now')", (f"recovery_{token}",) ).fetchone() if not session: raise HTTPException(400, "Ungültiger oder abgelaufener Token") session = r2d(session) conn.execute("UPDATE profiles SET pin_hash=? WHERE id=?", (hash_pin(new_pin), session['profile_id'])) conn.execute("DELETE FROM sessions WHERE token=?", (f"recovery_{token}",)) conn.commit() return {"ok": True} # ── E-Mail Settings ─────────────────────────────────────────────────────────── @app.get("/api/admin/email/status") def email_status(session=Depends(require_admin)): return { "configured": bool(SMTP_HOST and SMTP_USER), "smtp_host": SMTP_HOST, "smtp_port": SMTP_PORT, "smtp_user": SMTP_USER, "from": SMTP_FROM, "app_url": APP_URL, } @app.post("/api/admin/email/test") def email_test(data: dict, session=Depends(require_admin)): """Send a test email.""" to = data.get('to','') if not to: raise HTTPException(400, "Empfänger-E-Mail fehlt") html = email_html_wrapper("""

Das ist eine Test-E-Mail von Mitai Jinkendo.

✓ E-Mail-Versand funktioniert korrekt!

""", "Test-E-Mail") sent = send_email(to, "Mitai Jinkendo – Test-E-Mail", html) if not sent: raise HTTPException(500, "E-Mail konnte nicht gesendet werden. SMTP-Konfiguration prüfen.") return {"ok": True} @app.post("/api/admin/email/weekly-summary/{pid}") def send_weekly_summary(pid: str, session=Depends(require_admin)): """Send weekly summary to a profile (if email configured).""" with get_db() as conn: profile = r2d(conn.execute("SELECT * FROM profiles WHERE id=?", (pid,)).fetchone()) if not profile or not profile.get('email'): raise HTTPException(400, "Profil hat keine E-Mail-Adresse") # Gather last 7 days data weights = [r2d(r) for r in conn.execute( "SELECT date,weight FROM weight_log WHERE profile_id=? AND date>=date('now','-7 days') ORDER BY date", (pid,)).fetchall()] nutr = [r2d(r) for r in conn.execute( "SELECT kcal,protein_g FROM nutrition_log WHERE profile_id=? AND date>=date('now','-7 days')", (pid,)).fetchall()] acts = conn.execute( "SELECT COUNT(*) FROM activity_log WHERE profile_id=? AND date>=date('now','-7 days')", (pid,)).fetchone()[0] w_text = f"{weights[0]['weight']} kg → {weights[-1]['weight']} kg" if len(weights)>=2 else "Keine Daten" n_text = f"Ø {round(sum(n['kcal'] or 0 for n in nutr)/len(nutr))} kcal" if nutr else "Keine Daten" w_delta = round(weights[-1]['weight']-weights[0]['weight'],1) if len(weights)>=2 else None if w_delta is not None: color = "#1D9E75" if w_delta <= 0 else "#D85A30" sign = "+" if w_delta > 0 else "" delta_html = f"{sign}{w_delta} kg" else: delta_html = "" html = email_html_wrapper(f"""

Hallo {profile['name']}, hier ist deine Wochenzusammenfassung:

⚖️ Gewicht {w_text} {delta_html}
🍽️ Ernährung {n_text}
🏋️ Trainings {acts}× diese Woche
App öffnen """, "Deine Wochenzusammenfassung") sent = send_email(profile['email'], f"Mitai Jinkendo – Woche vom {datetime.now().strftime('%d.%m.%Y')}", html) if not sent: raise HTTPException(500, "Senden fehlgeschlagen") return {"ok": True}