mitai-jinkendo/backend/main.py
Lars b6f8b11685
All checks were successful
Deploy Development / deploy (push) Successful in 58s
Build Test / lint-backend (push) Successful in 0s
Build Test / build-frontend (push) Successful in 12s
fix: handle datetime.date object for birth_year in ZIP export
PostgreSQL returns dob as datetime.date object, not string.
Changed from prof['dob'][:4] to prof['dob'].year

Error was: TypeError: 'datetime.date' object is not subscriptable

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-18 22:41:51 +01:00

1715 lines
76 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import os, csv, io, uuid, json, zipfile
from pathlib import Path
from typing import Optional
from datetime import datetime
from decimal import Decimal
from fastapi import FastAPI, HTTPException, UploadFile, File, Header, Query, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse, FileResponse, Response
from pydantic import BaseModel
import aiofiles
import bcrypt
from slowapi import Limiter, _rate_limit_exceeded_handler
from slowapi.util import get_remote_address
from slowapi.errors import RateLimitExceeded
from starlette.requests import Request
from db import get_db, get_cursor, r2d
DATA_DIR = Path(os.getenv("DATA_DIR", "./data"))
PHOTOS_DIR = Path(os.getenv("PHOTOS_DIR", "./photos"))
DATA_DIR.mkdir(parents=True, exist_ok=True)
PHOTOS_DIR.mkdir(parents=True, exist_ok=True)
OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY", "")
OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "anthropic/claude-sonnet-4")
ANTHROPIC_KEY = os.getenv("ANTHROPIC_API_KEY", "")
app = FastAPI(title="Mitai Jinkendo API", version="3.0.0")
limiter = Limiter(key_func=get_remote_address)
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
app.add_middleware(
CORSMiddleware,
allow_origins=os.getenv("ALLOWED_ORIGINS", "*").split(","),
allow_credentials=True,
allow_methods=["GET","POST","PUT","DELETE","OPTIONS"],
allow_headers=["*"],
)
AVATAR_COLORS = ['#1D9E75','#378ADD','#D85A30','#EF9F27','#7F77DD','#D4537E','#639922','#888780']
def init_db():
"""Initialize database - Schema is loaded by startup.sh"""
# Schema loading and migration handled by startup.sh
# This function kept for backwards compatibility
pass
# ── Helper: get profile_id from header ───────────────────────────────────────
def get_pid(x_profile_id: Optional[str] = Header(default=None)) -> str:
"""Get profile_id - from header for legacy endpoints."""
if x_profile_id:
return x_profile_id
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id FROM profiles ORDER BY created LIMIT 1")
row = cur.fetchone()
if row: return row['id']
raise HTTPException(400, "Kein Profil gefunden")
# ── Models ────────────────────────────────────────────────────────────────────
class ProfileCreate(BaseModel):
name: str
avatar_color: Optional[str] = '#1D9E75'
sex: Optional[str] = 'm'
dob: Optional[str] = None
height: Optional[float] = 178
goal_weight: Optional[float] = None
goal_bf_pct: Optional[float] = None
class ProfileUpdate(BaseModel):
name: Optional[str] = None
avatar_color: Optional[str] = None
sex: Optional[str] = None
dob: Optional[str] = None
height: Optional[float] = None
goal_weight: Optional[float] = None
goal_bf_pct: Optional[float] = None
class WeightEntry(BaseModel):
date: str; weight: float; note: Optional[str]=None
class CircumferenceEntry(BaseModel):
date: str
c_neck: Optional[float]=None; c_chest: Optional[float]=None
c_waist: Optional[float]=None; c_belly: Optional[float]=None
c_hip: Optional[float]=None; c_thigh: Optional[float]=None
c_calf: Optional[float]=None; c_arm: Optional[float]=None
notes: Optional[str]=None; photo_id: Optional[str]=None
class CaliperEntry(BaseModel):
date: str; sf_method: Optional[str]='jackson3'
sf_chest: Optional[float]=None; sf_axilla: Optional[float]=None
sf_triceps: Optional[float]=None; sf_subscap: Optional[float]=None
sf_suprailiac: Optional[float]=None; sf_abdomen: Optional[float]=None
sf_thigh: Optional[float]=None; sf_calf_med: Optional[float]=None
sf_lowerback: Optional[float]=None; sf_biceps: Optional[float]=None
body_fat_pct: Optional[float]=None; lean_mass: Optional[float]=None
fat_mass: Optional[float]=None; notes: Optional[str]=None
class ActivityEntry(BaseModel):
date: str; start_time: Optional[str]=None; end_time: Optional[str]=None
activity_type: str; duration_min: Optional[float]=None
kcal_active: Optional[float]=None; kcal_resting: Optional[float]=None
hr_avg: Optional[float]=None; hr_max: Optional[float]=None
distance_km: Optional[float]=None; rpe: Optional[int]=None
source: Optional[str]='manual'; notes: Optional[str]=None
class NutritionDay(BaseModel):
date: str; kcal: Optional[float]=None; protein_g: Optional[float]=None
fat_g: Optional[float]=None; carbs_g: Optional[float]=None
# ── Profiles ──────────────────────────────────────────────────────────────────
import hashlib, secrets
from datetime import timedelta
def hash_pin(pin: str) -> str:
"""Hash password with bcrypt. Falls back gracefully from legacy SHA256."""
return bcrypt.hashpw(pin.encode(), bcrypt.gensalt()).decode()
def verify_pin(pin: str, stored_hash: str) -> bool:
"""Verify password - supports both bcrypt and legacy SHA256."""
if not stored_hash:
return False
# Detect bcrypt hash (starts with $2b$ or $2a$)
if stored_hash.startswith('$2'):
return bcrypt.checkpw(pin.encode(), stored_hash.encode())
# Legacy SHA256 fallback - auto-upgrade on successful login
import hashlib
return hashlib.sha256(pin.encode()).hexdigest() == stored_hash
def make_token() -> str:
return secrets.token_urlsafe(32)
def get_session(token: str):
if not token: return None
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT s.*, p.role, p.name, p.ai_enabled, p.ai_limit_day, p.export_enabled "
"FROM sessions s JOIN profiles p ON s.profile_id=p.id "
"WHERE s.token=%s AND s.expires_at > CURRENT_TIMESTAMP", (token,)
)
row = cur.fetchone()
return r2d(row)
def require_auth(x_auth_token: Optional[str]=Header(default=None)):
session = get_session(x_auth_token)
if not session: raise HTTPException(401, "Nicht eingeloggt")
return session
def require_admin(x_auth_token: Optional[str]=Header(default=None)):
session = get_session(x_auth_token)
if not session: raise HTTPException(401, "Nicht eingeloggt")
if session['role'] != 'admin': raise HTTPException(403, "Nur für Admins")
return session
@app.get("/api/profiles")
def list_profiles(session=Depends(require_auth)):
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles ORDER BY created")
rows = cur.fetchall()
return [r2d(r) for r in rows]
@app.post("/api/profiles")
def create_profile(p: ProfileCreate, session=Depends(require_auth)):
pid = str(uuid.uuid4())
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""INSERT INTO profiles (id,name,avatar_color,sex,dob,height,goal_weight,goal_bf_pct,created,updated)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP,CURRENT_TIMESTAMP)""",
(pid,p.name,p.avatar_color,p.sex,p.dob,p.height,p.goal_weight,p.goal_bf_pct))
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
return r2d(cur.fetchone())
@app.get("/api/profiles/{pid}")
def get_profile(pid: str, session=Depends(require_auth)):
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
row = cur.fetchone()
if not row: raise HTTPException(404, "Profil nicht gefunden")
return r2d(row)
@app.put("/api/profiles/{pid}")
def update_profile(pid: str, p: ProfileUpdate, session=Depends(require_auth)):
with get_db() as conn:
data = {k:v for k,v in p.model_dump().items() if v is not None}
data['updated'] = datetime.now().isoformat()
cur = get_cursor(conn)
cur.execute(f"UPDATE profiles SET {', '.join(f'{k}=%s' for k in data)} WHERE id=%s",
list(data.values())+[pid])
return get_profile(pid, session)
@app.delete("/api/profiles/{pid}")
def delete_profile(pid: str, session=Depends(require_auth)):
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT COUNT(*) as count FROM profiles")
count = cur.fetchone()['count']
if count <= 1: raise HTTPException(400, "Letztes Profil kann nicht gelöscht werden")
for table in ['weight_log','circumference_log','caliper_log','nutrition_log','activity_log','ai_insights']:
cur.execute(f"DELETE FROM {table} WHERE profile_id=%s", (pid,))
cur.execute("DELETE FROM profiles WHERE id=%s", (pid,))
return {"ok": True}
@app.get("/api/profile")
def get_active_profile(x_profile_id: Optional[str] = Header(default=None), session: dict = Depends(require_auth)):
"""Legacy endpoint returns active profile."""
pid = get_pid(x_profile_id)
return get_profile(pid, session)
@app.put("/api/profile")
def update_active_profile(p: ProfileUpdate, x_profile_id: Optional[str] = Header(default=None), session: dict = Depends(require_auth)):
pid = get_pid(x_profile_id)
return update_profile(pid, p, session)
# ── Weight ────────────────────────────────────────────────────────────────────
@app.get("/api/weight")
def list_weight(limit: int=365, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM weight_log WHERE profile_id=%s ORDER BY date DESC LIMIT %s", (pid,limit))
return [r2d(r) for r in cur.fetchall()]
@app.post("/api/weight")
def upsert_weight(e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id FROM weight_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone()
if ex:
cur.execute("UPDATE weight_log SET weight=%s,note=%s WHERE id=%s", (e.weight,e.note,ex['id']))
wid = ex['id']
else:
wid = str(uuid.uuid4())
cur.execute("INSERT INTO weight_log (id,profile_id,date,weight,note,created) VALUES (%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(wid,pid,e.date,e.weight,e.note))
return {"id":wid,"date":e.date,"weight":e.weight}
@app.put("/api/weight/{wid}")
def update_weight(wid: str, e: WeightEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("UPDATE weight_log SET date=%s,weight=%s,note=%s WHERE id=%s AND profile_id=%s",
(e.date,e.weight,e.note,wid,pid))
return {"id":wid}
@app.delete("/api/weight/{wid}")
def delete_weight(wid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM weight_log WHERE id=%s AND profile_id=%s", (wid,pid))
return {"ok":True}
@app.get("/api/weight/stats")
def weight_stats(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT date,weight FROM weight_log WHERE profile_id=%s ORDER BY date DESC LIMIT 90", (pid,))
rows = cur.fetchall()
if not rows: return {"count":0,"latest":None,"prev":None,"min":None,"max":None,"avg_7d":None}
w=[float(r['weight']) for r in rows]
return {"count":len(rows),"latest":{"date":rows[0]['date'],"weight":float(rows[0]['weight'])},
"prev":{"date":rows[1]['date'],"weight":float(rows[1]['weight'])} if len(rows)>1 else None,
"min":min(w),"max":max(w),"avg_7d":round(sum(w[:7])/min(7,len(w)),2)}
# ── Circumferences ────────────────────────────────────────────────────────────
@app.get("/api/circumferences")
def list_circs(limit: int=100, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM circumference_log WHERE profile_id=%s ORDER BY date DESC LIMIT %s", (pid,limit))
return [r2d(r) for r in cur.fetchall()]
@app.post("/api/circumferences")
def upsert_circ(e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id FROM circumference_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone()
d = e.model_dump()
if ex:
eid = ex['id']
sets = ', '.join(f"{k}=%s" for k in d if k!='date')
cur.execute(f"UPDATE circumference_log SET {sets} WHERE id=%s",
[v for k,v in d.items() if k!='date']+[eid])
else:
eid = str(uuid.uuid4())
cur.execute("""INSERT INTO circumference_log
(id,profile_id,date,c_neck,c_chest,c_waist,c_belly,c_hip,c_thigh,c_calf,c_arm,notes,photo_id,created)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)""",
(eid,pid,d['date'],d['c_neck'],d['c_chest'],d['c_waist'],d['c_belly'],
d['c_hip'],d['c_thigh'],d['c_calf'],d['c_arm'],d['notes'],d['photo_id']))
return {"id":eid,"date":e.date}
@app.put("/api/circumferences/{eid}")
def update_circ(eid: str, e: CircumferenceEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
d = e.model_dump()
cur = get_cursor(conn)
cur.execute(f"UPDATE circumference_log SET {', '.join(f'{k}=%s' for k in d)} WHERE id=%s AND profile_id=%s",
list(d.values())+[eid,pid])
return {"id":eid}
@app.delete("/api/circumferences/{eid}")
def delete_circ(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM circumference_log WHERE id=%s AND profile_id=%s", (eid,pid))
return {"ok":True}
# ── Caliper ───────────────────────────────────────────────────────────────────
@app.get("/api/caliper")
def list_caliper(limit: int=100, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM caliper_log WHERE profile_id=%s ORDER BY date DESC LIMIT %s", (pid,limit))
return [r2d(r) for r in cur.fetchall()]
@app.post("/api/caliper")
def upsert_caliper(e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id FROM caliper_log WHERE profile_id=%s AND date=%s", (pid,e.date))
ex = cur.fetchone()
d = e.model_dump()
if ex:
eid = ex['id']
sets = ', '.join(f"{k}=%s" for k in d if k!='date')
cur.execute(f"UPDATE caliper_log SET {sets} WHERE id=%s",
[v for k,v in d.items() if k!='date']+[eid])
else:
eid = str(uuid.uuid4())
cur.execute("""INSERT INTO caliper_log
(id,profile_id,date,sf_method,sf_chest,sf_axilla,sf_triceps,sf_subscap,sf_suprailiac,
sf_abdomen,sf_thigh,sf_calf_med,sf_lowerback,sf_biceps,body_fat_pct,lean_mass,fat_mass,notes,created)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)""",
(eid,pid,d['date'],d['sf_method'],d['sf_chest'],d['sf_axilla'],d['sf_triceps'],
d['sf_subscap'],d['sf_suprailiac'],d['sf_abdomen'],d['sf_thigh'],d['sf_calf_med'],
d['sf_lowerback'],d['sf_biceps'],d['body_fat_pct'],d['lean_mass'],d['fat_mass'],d['notes']))
return {"id":eid,"date":e.date}
@app.put("/api/caliper/{eid}")
def update_caliper(eid: str, e: CaliperEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
d = e.model_dump()
cur = get_cursor(conn)
cur.execute(f"UPDATE caliper_log SET {', '.join(f'{k}=%s' for k in d)} WHERE id=%s AND profile_id=%s",
list(d.values())+[eid,pid])
return {"id":eid}
@app.delete("/api/caliper/{eid}")
def delete_caliper(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM caliper_log WHERE id=%s AND profile_id=%s", (eid,pid))
return {"ok":True}
# ── Activity ──────────────────────────────────────────────────────────────────
@app.get("/api/activity")
def list_activity(limit: int=200, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM activity_log WHERE profile_id=%s ORDER BY date DESC, start_time DESC LIMIT %s", (pid,limit))
return [r2d(r) for r in cur.fetchall()]
@app.post("/api/activity")
def create_activity(e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
eid = str(uuid.uuid4())
d = e.model_dump()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""INSERT INTO activity_log
(id,profile_id,date,start_time,end_time,activity_type,duration_min,kcal_active,kcal_resting,
hr_avg,hr_max,distance_km,rpe,source,notes,created)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,CURRENT_TIMESTAMP)""",
(eid,pid,d['date'],d['start_time'],d['end_time'],d['activity_type'],d['duration_min'],
d['kcal_active'],d['kcal_resting'],d['hr_avg'],d['hr_max'],d['distance_km'],
d['rpe'],d['source'],d['notes']))
return {"id":eid,"date":e.date}
@app.put("/api/activity/{eid}")
def update_activity(eid: str, e: ActivityEntry, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
d = e.model_dump()
cur = get_cursor(conn)
cur.execute(f"UPDATE activity_log SET {', '.join(f'{k}=%s' for k in d)} WHERE id=%s AND profile_id=%s",
list(d.values())+[eid,pid])
return {"id":eid}
@app.delete("/api/activity/{eid}")
def delete_activity(eid: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM activity_log WHERE id=%s AND profile_id=%s", (eid,pid))
return {"ok":True}
@app.get("/api/activity/stats")
def activity_stats(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM activity_log WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
rows = [r2d(r) for r in cur.fetchall()]
if not rows: return {"count":0,"total_kcal":0,"total_min":0,"by_type":{}}
total_kcal=sum(float(r.get('kcal_active') or 0) for r in rows)
total_min=sum(float(r.get('duration_min') or 0) for r in rows)
by_type={}
for r in rows:
t=r['activity_type']; by_type.setdefault(t,{'count':0,'kcal':0,'min':0})
by_type[t]['count']+=1
by_type[t]['kcal']+=float(r.get('kcal_active') or 0)
by_type[t]['min']+=float(r.get('duration_min') or 0)
return {"count":len(rows),"total_kcal":round(total_kcal),"total_min":round(total_min),"by_type":by_type}
@app.post("/api/activity/import-csv")
async def import_activity_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
raw = await file.read()
try: text = raw.decode('utf-8')
except: text = raw.decode('latin-1')
if text.startswith('\ufeff'): text = text[1:]
if not text.strip(): raise HTTPException(400,"Leere Datei")
reader = csv.DictReader(io.StringIO(text))
inserted = skipped = 0
with get_db() as conn:
cur = get_cursor(conn)
for row in reader:
wtype = row.get('Workout Type','').strip()
start = row.get('Start','').strip()
if not wtype or not start: continue
try: date = start[:10]
except: continue
dur = row.get('Duration','').strip()
duration_min = None
if dur:
try:
p = dur.split(':')
duration_min = round(int(p[0])*60+int(p[1])+int(p[2])/60,1)
except: pass
def kj(v):
try: return round(float(v)/4.184) if v else None
except: return None
def tf(v):
try: return round(float(v),1) if v else None
except: return None
try:
cur.execute("""INSERT INTO activity_log
(id,profile_id,date,start_time,end_time,activity_type,duration_min,kcal_active,kcal_resting,
hr_avg,hr_max,distance_km,source,created)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,'apple_health',CURRENT_TIMESTAMP)""",
(str(uuid.uuid4()),pid,date,start,row.get('End',''),wtype,duration_min,
kj(row.get('Aktive Energie (kJ)','')),kj(row.get('Ruheeinträge (kJ)','')),
tf(row.get('Durchschn. Herzfrequenz (count/min)','')),
tf(row.get('Max. Herzfrequenz (count/min)','')),
tf(row.get('Distanz (km)',''))))
inserted+=1
except: skipped+=1
return {"inserted":inserted,"skipped":skipped,"message":f"{inserted} Trainings importiert"}
# ── Photos ────────────────────────────────────────────────────────────────────
@app.post("/api/photos")
async def upload_photo(file: UploadFile=File(...), date: str="",
x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
fid = str(uuid.uuid4())
ext = Path(file.filename).suffix or '.jpg'
path = PHOTOS_DIR / f"{fid}{ext}"
async with aiofiles.open(path,'wb') as f: await f.write(await file.read())
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("INSERT INTO photos (id,profile_id,date,path,created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(fid,pid,date,str(path)))
return {"id":fid,"date":date}
@app.get("/api/photos/{fid}")
def get_photo(fid: str, session: dict=Depends(require_auth)):
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT path FROM photos WHERE id=%s", (fid,))
row = cur.fetchone()
if not row: raise HTTPException(404)
return FileResponse(row['path'])
@app.get("/api/photos")
def list_photos(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM photos WHERE profile_id=%s ORDER BY created DESC LIMIT 100", (pid,))
return [r2d(r) for r in cur.fetchall()]
# ── Nutrition ─────────────────────────────────────────────────────────────────
def _pf(s):
try: return float(str(s).replace(',','.').strip())
except: return 0.0
@app.post("/api/nutrition/import-csv")
async def import_nutrition_csv(file: UploadFile=File(...), x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
raw = await file.read()
try: text = raw.decode('utf-8')
except: text = raw.decode('latin-1')
if text.startswith('\ufeff'): text = text[1:]
if not text.strip(): raise HTTPException(400,"Leere Datei")
reader = csv.DictReader(io.StringIO(text), delimiter=';')
days: dict = {}
count = 0
for row in reader:
rd = row.get('datum_tag_monat_jahr_stunde_minute','').strip().strip('"')
if not rd: continue
try:
p = rd.split(' ')[0].split('.')
iso = f"{p[2]}-{p[1]}-{p[0]}"
except: continue
days.setdefault(iso,{'kcal':0,'fat_g':0,'carbs_g':0,'protein_g':0})
days[iso]['kcal'] += _pf(row.get('kj',0))/4.184
days[iso]['fat_g'] += _pf(row.get('fett_g',0))
days[iso]['carbs_g'] += _pf(row.get('kh_g',0))
days[iso]['protein_g'] += _pf(row.get('protein_g',0))
count+=1
inserted=0
with get_db() as conn:
cur = get_cursor(conn)
for iso,vals in days.items():
kcal=round(vals['kcal'],1); fat=round(vals['fat_g'],1)
carbs=round(vals['carbs_g'],1); prot=round(vals['protein_g'],1)
cur.execute("SELECT id FROM nutrition_log WHERE profile_id=%s AND date=%s",(pid,iso))
if cur.fetchone():
cur.execute("UPDATE nutrition_log SET kcal=%s,protein_g=%s,fat_g=%s,carbs_g=%s WHERE profile_id=%s AND date=%s",
(kcal,prot,fat,carbs,pid,iso))
else:
cur.execute("INSERT INTO nutrition_log (id,profile_id,date,kcal,protein_g,fat_g,carbs_g,source,created) VALUES (%s,%s,%s,%s,%s,%s,%s,'csv',CURRENT_TIMESTAMP)",
(str(uuid.uuid4()),pid,iso,kcal,prot,fat,carbs))
inserted+=1
return {"rows_parsed":count,"days_imported":inserted,
"date_range":{"from":min(days) if days else None,"to":max(days) if days else None}}
@app.get("/api/nutrition")
def list_nutrition(limit: int=365, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute(
"SELECT * FROM nutrition_log WHERE profile_id=%s ORDER BY date DESC LIMIT %s", (pid,limit))
return [r2d(r) for r in cur.fetchall()]
@app.get("/api/nutrition/correlations")
def nutrition_correlations(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM nutrition_log WHERE profile_id=%s ORDER BY date",(pid,))
nutr={r['date']:r2d(r) for r in cur.fetchall()}
cur.execute("SELECT date,weight FROM weight_log WHERE profile_id=%s ORDER BY date",(pid,))
wlog={r['date']:r['weight'] for r in cur.fetchall()}
cur.execute("SELECT date,lean_mass,body_fat_pct FROM caliper_log WHERE profile_id=%s ORDER BY date",(pid,))
cals=sorted([r2d(r) for r in cur.fetchall()],key=lambda x:x['date'])
all_dates=sorted(set(list(nutr)+list(wlog)))
mi,last_cal,cal_by_date=0,{},{}
for d in all_dates:
while mi<len(cals) and cals[mi]['date']<=d: last_cal=cals[mi]; mi+=1
if last_cal: cal_by_date[d]=last_cal
result=[]
for d in all_dates:
if d not in nutr and d not in wlog: continue
row={'date':d}
if d in nutr: row.update({k:float(nutr[d][k]) if nutr[d][k] is not None else None for k in ['kcal','protein_g','fat_g','carbs_g']})
if d in wlog: row['weight']=float(wlog[d])
if d in cal_by_date:
lm = cal_by_date[d].get('lean_mass')
bf = cal_by_date[d].get('body_fat_pct')
row['lean_mass']=float(lm) if lm is not None else None
row['body_fat_pct']=float(bf) if bf is not None else None
result.append(row)
return result
@app.get("/api/nutrition/weekly")
def nutrition_weekly(weeks: int=16, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM nutrition_log WHERE profile_id=%s ORDER BY date DESC LIMIT %s",(pid,weeks*7))
rows=[r2d(r) for r in cur.fetchall()]
if not rows: return []
wm={}
for d in rows:
wk=datetime.strptime(d['date'],'%Y-%m-%d').strftime('%Y-W%V')
wm.setdefault(wk,[]).append(d)
result=[]
for wk in sorted(wm):
en=wm[wk]; n=len(en)
def avg(k): return round(sum(float(e.get(k) or 0) for e in en)/n,1)
result.append({'week':wk,'days':n,'kcal':avg('kcal'),'protein_g':avg('protein_g'),'fat_g':avg('fat_g'),'carbs_g':avg('carbs_g')})
return result
# ── Stats ─────────────────────────────────────────────────────────────────────
@app.get("/api/stats")
def get_stats(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT COUNT(*) as count FROM weight_log WHERE profile_id=%s",(pid,))
weight_count = cur.fetchone()['count']
cur.execute("SELECT COUNT(*) as count FROM circumference_log WHERE profile_id=%s",(pid,))
circ_count = cur.fetchone()['count']
cur.execute("SELECT COUNT(*) as count FROM caliper_log WHERE profile_id=%s",(pid,))
caliper_count = cur.fetchone()['count']
cur.execute("SELECT COUNT(*) as count FROM nutrition_log WHERE profile_id=%s",(pid,))
nutrition_count = cur.fetchone()['count']
cur.execute("SELECT COUNT(*) as count FROM activity_log WHERE profile_id=%s",(pid,))
activity_count = cur.fetchone()['count']
return {
"weight_count": weight_count,
"circ_count": circ_count,
"caliper_count": caliper_count,
"nutrition_count": nutrition_count,
"activity_count": activity_count
}
# ── AI Insights ───────────────────────────────────────────────────────────────
import httpx, json
@app.get("/api/insights")
def get_all_insights(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Get all AI insights for profile."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM ai_insights WHERE profile_id=%s ORDER BY created DESC", (pid,))
rows = cur.fetchall()
return [r2d(r) for r in rows]
@app.get("/api/insights/latest")
def get_latest_insights(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Get latest AI insights across all scopes."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM ai_insights WHERE profile_id=%s ORDER BY created DESC LIMIT 10", (pid,))
rows = cur.fetchall()
return [r2d(r) for r in rows]
@app.get("/api/ai/insights/{scope}")
def get_ai_insight(scope: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM ai_insights WHERE profile_id=%s AND scope=%s ORDER BY created DESC LIMIT 1", (pid,scope))
row = cur.fetchone()
if not row: return None
return r2d(row)
@app.delete("/api/insights/{insight_id}")
def delete_insight_by_id(insight_id: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Delete a specific insight by ID."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM ai_insights WHERE id=%s AND profile_id=%s", (insight_id, pid))
return {"ok":True}
@app.delete("/api/ai/insights/{scope}")
def delete_ai_insight(scope: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM ai_insights WHERE profile_id=%s AND scope=%s", (pid,scope))
return {"ok":True}
def check_ai_limit(pid: str):
"""Check if profile has reached daily AI limit. Returns (allowed, limit, used)."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT ai_enabled, ai_limit_day FROM profiles WHERE id=%s", (pid,))
prof = cur.fetchone()
if not prof or not prof['ai_enabled']:
raise HTTPException(403, "KI ist für dieses Profil deaktiviert")
limit = prof['ai_limit_day']
if limit is None:
return (True, None, 0)
today = datetime.now().date().isoformat()
cur.execute("SELECT call_count FROM ai_usage WHERE profile_id=%s AND date=%s", (pid, today))
usage = cur.fetchone()
used = usage['call_count'] if usage else 0
if used >= limit:
raise HTTPException(429, f"Tägliches KI-Limit erreicht ({limit} Calls)")
return (True, limit, used)
def inc_ai_usage(pid: str):
"""Increment AI usage counter for today."""
today = datetime.now().date().isoformat()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id, call_count FROM ai_usage WHERE profile_id=%s AND date=%s", (pid, today))
row = cur.fetchone()
if row:
cur.execute("UPDATE ai_usage SET call_count=%s WHERE id=%s", (row['call_count']+1, row['id']))
else:
cur.execute("INSERT INTO ai_usage (id, profile_id, date, call_count) VALUES (%s,%s,%s,1)",
(str(uuid.uuid4()), pid, today))
def _get_profile_data(pid: str):
"""Fetch all relevant data for AI analysis."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
prof = r2d(cur.fetchone())
cur.execute("SELECT * FROM weight_log WHERE profile_id=%s ORDER BY date DESC LIMIT 90", (pid,))
weight = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM circumference_log WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
circ = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM caliper_log WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
caliper = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM nutrition_log WHERE profile_id=%s ORDER BY date DESC LIMIT 90", (pid,))
nutrition = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM activity_log WHERE profile_id=%s ORDER BY date DESC LIMIT 90", (pid,))
activity = [r2d(r) for r in cur.fetchall()]
return {
"profile": prof,
"weight": weight,
"circumference": circ,
"caliper": caliper,
"nutrition": nutrition,
"activity": activity
}
def _render_template(template: str, data: dict) -> str:
"""Simple template variable replacement."""
result = template
for k, v in data.items():
result = result.replace(f"{{{{{k}}}}}", str(v) if v is not None else "")
return result
def _prepare_template_vars(data: dict) -> dict:
"""Prepare template variables from profile data."""
prof = data['profile']
weight = data['weight']
circ = data['circumference']
caliper = data['caliper']
nutrition = data['nutrition']
activity = data['activity']
vars = {
"name": prof.get('name', 'Nutzer'),
"geschlecht": "männlich" if prof.get('sex') == 'm' else "weiblich",
"height": prof.get('height', 178),
"goal_weight": float(prof.get('goal_weight')) if prof.get('goal_weight') else "nicht gesetzt",
"goal_bf_pct": float(prof.get('goal_bf_pct')) if prof.get('goal_bf_pct') else "nicht gesetzt",
"weight_aktuell": float(weight[0]['weight']) if weight else "keine Daten",
"kf_aktuell": float(caliper[0]['body_fat_pct']) if caliper and caliper[0].get('body_fat_pct') else "unbekannt",
}
# Calculate age from dob
if prof.get('dob'):
try:
from datetime import date
dob = datetime.strptime(prof['dob'], '%Y-%m-%d').date()
today = date.today()
age = today.year - dob.year - ((today.month, today.day) < (dob.month, dob.day))
vars['age'] = age
except:
vars['age'] = "unbekannt"
else:
vars['age'] = "unbekannt"
# Weight trend summary
if len(weight) >= 2:
recent = weight[:30]
delta = float(recent[0]['weight']) - float(recent[-1]['weight'])
vars['weight_trend'] = f"{len(recent)} Einträge, Δ30d: {delta:+.1f}kg"
else:
vars['weight_trend'] = "zu wenig Daten"
# Caliper summary
if caliper:
c = caliper[0]
bf = float(c.get('body_fat_pct')) if c.get('body_fat_pct') else '?'
vars['caliper_summary'] = f"KF: {bf}%, Methode: {c.get('sf_method','?')}"
else:
vars['caliper_summary'] = "keine Daten"
# Circumference summary
if circ:
c = circ[0]
parts = []
for k in ['c_waist', 'c_belly', 'c_hip']:
if c.get(k): parts.append(f"{k.split('_')[1]}: {float(c[k])}cm")
vars['circ_summary'] = ", ".join(parts) if parts else "keine Daten"
else:
vars['circ_summary'] = "keine Daten"
# Nutrition summary
if nutrition:
n = len(nutrition)
avg_kcal = sum(float(d.get('kcal',0) or 0) for d in nutrition) / n
avg_prot = sum(float(d.get('protein_g',0) or 0) for d in nutrition) / n
vars['nutrition_summary'] = f"{n} Tage, Ø {avg_kcal:.0f}kcal, {avg_prot:.0f}g Protein"
vars['nutrition_detail'] = vars['nutrition_summary']
vars['nutrition_days'] = n
vars['kcal_avg'] = round(avg_kcal)
vars['protein_avg'] = round(avg_prot,1)
vars['fat_avg'] = round(sum(float(d.get('fat_g',0) or 0) for d in nutrition) / n,1)
vars['carb_avg'] = round(sum(float(d.get('carbs_g',0) or 0) for d in nutrition) / n,1)
else:
vars['nutrition_summary'] = "keine Daten"
vars['nutrition_detail'] = "keine Daten"
vars['nutrition_days'] = 0
vars['kcal_avg'] = 0
vars['protein_avg'] = 0
vars['fat_avg'] = 0
vars['carb_avg'] = 0
# Protein targets
w = weight[0]['weight'] if weight else prof.get('height',178) - 100
w = float(w) # Convert Decimal to float for math operations
vars['protein_ziel_low'] = round(w * 1.6)
vars['protein_ziel_high'] = round(w * 2.2)
# Activity summary
if activity:
n = len(activity)
total_kcal = sum(float(a.get('kcal_active',0) or 0) for a in activity)
vars['activity_summary'] = f"{n} Trainings, {total_kcal:.0f}kcal gesamt"
vars['activity_detail'] = vars['activity_summary']
vars['activity_kcal_summary'] = f"Ø {total_kcal/n:.0f}kcal/Training"
else:
vars['activity_summary'] = "keine Daten"
vars['activity_detail'] = "keine Daten"
vars['activity_kcal_summary'] = "keine Daten"
return vars
@app.post("/api/insights/run/{slug}")
async def analyze_with_prompt(slug: str, x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Run AI analysis with specified prompt template."""
pid = get_pid(x_profile_id)
check_ai_limit(pid)
# Get prompt template
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM ai_prompts WHERE slug=%s AND active=true", (slug,))
prompt_row = cur.fetchone()
if not prompt_row:
raise HTTPException(404, f"Prompt '{slug}' nicht gefunden")
prompt_tmpl = prompt_row['template']
data = _get_profile_data(pid)
vars = _prepare_template_vars(data)
final_prompt = _render_template(prompt_tmpl, vars)
# Call AI
if ANTHROPIC_KEY:
# Use Anthropic SDK
import anthropic
client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
response = client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=2000,
messages=[{"role": "user", "content": final_prompt}]
)
content = response.content[0].text
elif OPENROUTER_KEY:
async with httpx.AsyncClient() as client:
resp = await client.post("https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENROUTER_KEY}"},
json={
"model": OPENROUTER_MODEL,
"messages": [{"role": "user", "content": final_prompt}],
"max_tokens": 2000
},
timeout=60.0
)
if resp.status_code != 200:
raise HTTPException(500, f"KI-Fehler: {resp.text}")
content = resp.json()['choices'][0]['message']['content']
else:
raise HTTPException(500, "Keine KI-API konfiguriert")
# Save insight
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM ai_insights WHERE profile_id=%s AND scope=%s", (pid, slug))
cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,%s,%s,CURRENT_TIMESTAMP)",
(str(uuid.uuid4()), pid, slug, content))
inc_ai_usage(pid)
return {"scope": slug, "content": content}
@app.post("/api/insights/pipeline")
async def analyze_pipeline(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Run 3-stage pipeline analysis."""
pid = get_pid(x_profile_id)
check_ai_limit(pid)
data = _get_profile_data(pid)
vars = _prepare_template_vars(data)
# Stage 1: Parallel JSON analyses
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT slug, template FROM ai_prompts WHERE slug LIKE 'pipeline_%' AND slug NOT IN ('pipeline_synthesis','pipeline_goals') AND active=true")
stage1_prompts = [r2d(r) for r in cur.fetchall()]
stage1_results = {}
for p in stage1_prompts:
slug = p['slug']
final_prompt = _render_template(p['template'], vars)
if ANTHROPIC_KEY:
import anthropic
client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
response = client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1000,
messages=[{"role": "user", "content": final_prompt}]
)
content = response.content[0].text.strip()
elif OPENROUTER_KEY:
async with httpx.AsyncClient() as client:
resp = await client.post("https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENROUTER_KEY}"},
json={
"model": OPENROUTER_MODEL,
"messages": [{"role": "user", "content": final_prompt}],
"max_tokens": 1000
},
timeout=60.0
)
content = resp.json()['choices'][0]['message']['content'].strip()
else:
raise HTTPException(500, "Keine KI-API konfiguriert")
# Try to parse JSON, fallback to raw text
try:
stage1_results[slug] = json.loads(content)
except:
stage1_results[slug] = content
# Stage 2: Synthesis
vars['stage1_body'] = json.dumps(stage1_results.get('pipeline_body', {}), ensure_ascii=False)
vars['stage1_nutrition'] = json.dumps(stage1_results.get('pipeline_nutrition', {}), ensure_ascii=False)
vars['stage1_activity'] = json.dumps(stage1_results.get('pipeline_activity', {}), ensure_ascii=False)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT template FROM ai_prompts WHERE slug='pipeline_synthesis' AND active=true")
synth_row = cur.fetchone()
if not synth_row:
raise HTTPException(500, "Pipeline synthesis prompt not found")
synth_prompt = _render_template(synth_row['template'], vars)
if ANTHROPIC_KEY:
import anthropic
client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
response = client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=2000,
messages=[{"role": "user", "content": synth_prompt}]
)
synthesis = response.content[0].text
elif OPENROUTER_KEY:
async with httpx.AsyncClient() as client:
resp = await client.post("https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENROUTER_KEY}"},
json={
"model": OPENROUTER_MODEL,
"messages": [{"role": "user", "content": synth_prompt}],
"max_tokens": 2000
},
timeout=60.0
)
synthesis = resp.json()['choices'][0]['message']['content']
else:
raise HTTPException(500, "Keine KI-API konfiguriert")
# Stage 3: Goals (only if goals are set)
goals_text = None
prof = data['profile']
if prof.get('goal_weight') or prof.get('goal_bf_pct'):
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT template FROM ai_prompts WHERE slug='pipeline_goals' AND active=true")
goals_row = cur.fetchone()
if goals_row:
goals_prompt = _render_template(goals_row['template'], vars)
if ANTHROPIC_KEY:
import anthropic
client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
response = client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=800,
messages=[{"role": "user", "content": goals_prompt}]
)
goals_text = response.content[0].text
elif OPENROUTER_KEY:
async with httpx.AsyncClient() as client:
resp = await client.post("https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENROUTER_KEY}"},
json={
"model": OPENROUTER_MODEL,
"messages": [{"role": "user", "content": goals_prompt}],
"max_tokens": 800
},
timeout=60.0
)
goals_text = resp.json()['choices'][0]['message']['content']
# Combine synthesis + goals
final_content = synthesis
if goals_text:
final_content += "\n\n" + goals_text
# Save as 'gesamt' scope
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM ai_insights WHERE profile_id=%s AND scope='gesamt'", (pid,))
cur.execute("INSERT INTO ai_insights (id, profile_id, scope, content, created) VALUES (%s,%s,'gesamt',%s,CURRENT_TIMESTAMP)",
(str(uuid.uuid4()), pid, final_content))
inc_ai_usage(pid)
return {"scope": "gesamt", "content": final_content, "stage1": stage1_results}
@app.get("/api/prompts")
def list_prompts(session: dict=Depends(require_auth)):
"""List all available AI prompts."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM ai_prompts WHERE active=true AND slug NOT LIKE 'pipeline_%' ORDER BY sort_order")
return [r2d(r) for r in cur.fetchall()]
@app.put("/api/prompts/{prompt_id}")
def update_prompt(prompt_id: str, data: dict, session: dict=Depends(require_admin)):
"""Update AI prompt template (admin only)."""
with get_db() as conn:
cur = get_cursor(conn)
updates = []
values = []
if 'name' in data:
updates.append('name=%s')
values.append(data['name'])
if 'description' in data:
updates.append('description=%s')
values.append(data['description'])
if 'template' in data:
updates.append('template=%s')
values.append(data['template'])
if 'active' in data:
updates.append('active=%s')
values.append(data['active'])
if updates:
cur.execute(f"UPDATE ai_prompts SET {', '.join(updates)}, updated=CURRENT_TIMESTAMP WHERE id=%s",
values + [prompt_id])
return {"ok": True}
@app.get("/api/ai/usage")
def get_ai_usage(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Get AI usage stats for current profile."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT ai_limit_day FROM profiles WHERE id=%s", (pid,))
prof = cur.fetchone()
limit = prof['ai_limit_day'] if prof else None
today = datetime.now().date().isoformat()
cur.execute("SELECT call_count FROM ai_usage WHERE profile_id=%s AND date=%s", (pid, today))
usage = cur.fetchone()
used = usage['call_count'] if usage else 0
cur.execute("SELECT date, call_count FROM ai_usage WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
history = [r2d(r) for r in cur.fetchall()]
return {
"limit": limit,
"used_today": used,
"remaining": (limit - used) if limit else None,
"history": history
}
# ── Auth ──────────────────────────────────────────────────────────────────────
class LoginRequest(BaseModel):
email: str
password: str
class PasswordResetRequest(BaseModel):
email: str
class PasswordResetConfirm(BaseModel):
token: str
new_password: str
@app.post("/api/auth/login")
@limiter.limit("5/minute")
async def login(req: LoginRequest, request: Request):
"""Login with email + password."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE email=%s", (req.email.lower().strip(),))
prof = cur.fetchone()
if not prof:
raise HTTPException(401, "Ungültige Zugangsdaten")
# Verify password
if not verify_pin(req.password, prof['pin_hash']):
raise HTTPException(401, "Ungültige Zugangsdaten")
# Auto-upgrade from SHA256 to bcrypt
if prof['pin_hash'] and not prof['pin_hash'].startswith('$2'):
new_hash = hash_pin(req.password)
cur.execute("UPDATE profiles SET pin_hash=%s WHERE id=%s", (new_hash, prof['id']))
# Create session
token = make_token()
session_days = prof.get('session_days', 30)
expires = datetime.now() + timedelta(days=session_days)
cur.execute("INSERT INTO sessions (token, profile_id, expires_at, created) VALUES (%s,%s,%s,CURRENT_TIMESTAMP)",
(token, prof['id'], expires.isoformat()))
return {
"token": token,
"profile_id": prof['id'],
"name": prof['name'],
"role": prof['role'],
"expires_at": expires.isoformat()
}
@app.post("/api/auth/logout")
def logout(x_auth_token: Optional[str]=Header(default=None)):
"""Logout (delete session)."""
if x_auth_token:
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("DELETE FROM sessions WHERE token=%s", (x_auth_token,))
return {"ok": True}
@app.get("/api/auth/me")
def get_me(session: dict=Depends(require_auth)):
"""Get current user info."""
pid = session['profile_id']
return get_profile(pid, session)
@app.get("/api/auth/status")
def auth_status():
"""Health check endpoint."""
return {"status": "ok", "service": "mitai-jinkendo", "version": "v9b"}
@app.post("/api/auth/pin")
def change_pin(req: dict, session: dict=Depends(require_auth)):
"""Change PIN/password for current user."""
pid = session['profile_id']
new_pin = req.get('pin', '')
if len(new_pin) < 4:
raise HTTPException(400, "PIN/Passwort muss mind. 4 Zeichen haben")
new_hash = hash_pin(new_pin)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("UPDATE profiles SET pin_hash=%s WHERE id=%s", (new_hash, pid))
return {"ok": True}
@app.post("/api/auth/forgot-password")
@limiter.limit("3/minute")
async def password_reset_request(req: PasswordResetRequest, request: Request):
"""Request password reset email."""
email = req.email.lower().strip()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT id, name FROM profiles WHERE email=%s", (email,))
prof = cur.fetchone()
if not prof:
# Don't reveal if email exists
return {"ok": True, "message": "Falls die E-Mail existiert, wurde ein Reset-Link gesendet."}
# Generate reset token
token = secrets.token_urlsafe(32)
expires = datetime.now() + timedelta(hours=1)
# Store in sessions table (reuse mechanism)
cur.execute("INSERT INTO sessions (token, profile_id, expires_at, created) VALUES (%s,%s,%s,CURRENT_TIMESTAMP)",
(f"reset_{token}", prof['id'], expires.isoformat()))
# Send email
try:
import smtplib
from email.mime.text import MIMEText
smtp_host = os.getenv("SMTP_HOST")
smtp_port = int(os.getenv("SMTP_PORT", 587))
smtp_user = os.getenv("SMTP_USER")
smtp_pass = os.getenv("SMTP_PASS")
smtp_from = os.getenv("SMTP_FROM")
app_url = os.getenv("APP_URL", "https://mitai.jinkendo.de")
if smtp_host and smtp_user and smtp_pass:
msg = MIMEText(f"""Hallo {prof['name']},
Du hast einen Passwort-Reset angefordert.
Reset-Link: {app_url}/reset-password?token={token}
Der Link ist 1 Stunde gültig.
Falls du diese Anfrage nicht gestellt hast, ignoriere diese E-Mail.
Dein Mitai Jinkendo Team
""")
msg['Subject'] = "Passwort zurücksetzen Mitai Jinkendo"
msg['From'] = smtp_from
msg['To'] = email
with smtplib.SMTP(smtp_host, smtp_port) as server:
server.starttls()
server.login(smtp_user, smtp_pass)
server.send_message(msg)
except Exception as e:
print(f"Email error: {e}")
return {"ok": True, "message": "Falls die E-Mail existiert, wurde ein Reset-Link gesendet."}
@app.post("/api/auth/reset-password")
def password_reset_confirm(req: PasswordResetConfirm):
"""Confirm password reset with token."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT profile_id FROM sessions WHERE token=%s AND expires_at > CURRENT_TIMESTAMP",
(f"reset_{req.token}",))
sess = cur.fetchone()
if not sess:
raise HTTPException(400, "Ungültiger oder abgelaufener Reset-Link")
pid = sess['profile_id']
new_hash = hash_pin(req.new_password)
cur.execute("UPDATE profiles SET pin_hash=%s WHERE id=%s", (new_hash, pid))
cur.execute("DELETE FROM sessions WHERE token=%s", (f"reset_{req.token}",))
return {"ok": True, "message": "Passwort erfolgreich zurückgesetzt"}
# ── Admin ─────────────────────────────────────────────────────────────────────
class AdminProfileUpdate(BaseModel):
role: Optional[str] = None
ai_enabled: Optional[int] = None
ai_limit_day: Optional[int] = None
export_enabled: Optional[int] = None
@app.get("/api/admin/profiles")
def admin_list_profiles(session: dict=Depends(require_admin)):
"""Admin: List all profiles with stats."""
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles ORDER BY created")
profs = [r2d(r) for r in cur.fetchall()]
for p in profs:
pid = p['id']
cur.execute("SELECT COUNT(*) as count FROM weight_log WHERE profile_id=%s", (pid,))
p['weight_count'] = cur.fetchone()['count']
cur.execute("SELECT COUNT(*) as count FROM ai_insights WHERE profile_id=%s", (pid,))
p['ai_insights_count'] = cur.fetchone()['count']
today = datetime.now().date().isoformat()
cur.execute("SELECT call_count FROM ai_usage WHERE profile_id=%s AND date=%s", (pid, today))
usage = cur.fetchone()
p['ai_usage_today'] = usage['call_count'] if usage else 0
return profs
@app.put("/api/admin/profiles/{pid}")
def admin_update_profile(pid: str, data: AdminProfileUpdate, session: dict=Depends(require_admin)):
"""Admin: Update profile settings."""
with get_db() as conn:
updates = {k:v for k,v in data.model_dump().items() if v is not None}
if not updates:
return {"ok": True}
cur = get_cursor(conn)
cur.execute(f"UPDATE profiles SET {', '.join(f'{k}=%s' for k in updates)} WHERE id=%s",
list(updates.values()) + [pid])
return {"ok": True}
@app.put("/api/admin/profiles/{pid}/permissions")
def admin_set_permissions(pid: str, data: dict, session: dict=Depends(require_admin)):
"""Admin: Set profile permissions."""
with get_db() as conn:
cur = get_cursor(conn)
updates = []
values = []
if 'ai_enabled' in data:
updates.append('ai_enabled=%s')
values.append(data['ai_enabled'])
if 'ai_limit_day' in data:
updates.append('ai_limit_day=%s')
values.append(data['ai_limit_day'])
if 'export_enabled' in data:
updates.append('export_enabled=%s')
values.append(data['export_enabled'])
if 'role' in data:
updates.append('role=%s')
values.append(data['role'])
if updates:
cur.execute(f"UPDATE profiles SET {', '.join(updates)} WHERE id=%s", values + [pid])
return {"ok": True}
@app.put("/api/admin/profiles/{pid}/email")
def admin_set_email(pid: str, data: dict, session: dict=Depends(require_admin)):
"""Admin: Set profile email."""
email = data.get('email', '').strip().lower()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("UPDATE profiles SET email=%s WHERE id=%s", (email if email else None, pid))
return {"ok": True}
@app.put("/api/admin/profiles/{pid}/pin")
def admin_set_pin(pid: str, data: dict, session: dict=Depends(require_admin)):
"""Admin: Set profile PIN/password."""
new_pin = data.get('pin', '')
if len(new_pin) < 4:
raise HTTPException(400, "PIN/Passwort muss mind. 4 Zeichen haben")
new_hash = hash_pin(new_pin)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("UPDATE profiles SET pin_hash=%s WHERE id=%s", (new_hash, pid))
return {"ok": True}
@app.get("/api/admin/email/status")
def admin_email_status(session: dict=Depends(require_admin)):
"""Admin: Check email configuration status."""
smtp_host = os.getenv("SMTP_HOST")
smtp_user = os.getenv("SMTP_USER")
smtp_pass = os.getenv("SMTP_PASS")
app_url = os.getenv("APP_URL", "http://localhost:3002")
configured = bool(smtp_host and smtp_user and smtp_pass)
return {
"configured": configured,
"smtp_host": smtp_host or "",
"smtp_user": smtp_user or "",
"app_url": app_url
}
@app.post("/api/admin/email/test")
def admin_test_email(data: dict, session: dict=Depends(require_admin)):
"""Admin: Send test email."""
email = data.get('to', '')
if not email:
raise HTTPException(400, "E-Mail-Adresse fehlt")
try:
import smtplib
from email.mime.text import MIMEText
smtp_host = os.getenv("SMTP_HOST")
smtp_port = int(os.getenv("SMTP_PORT", 587))
smtp_user = os.getenv("SMTP_USER")
smtp_pass = os.getenv("SMTP_PASS")
smtp_from = os.getenv("SMTP_FROM")
if not smtp_host or not smtp_user or not smtp_pass:
raise HTTPException(500, "SMTP nicht konfiguriert")
msg = MIMEText("Dies ist eine Test-E-Mail von Mitai Jinkendo.")
msg['Subject'] = "Test-E-Mail"
msg['From'] = smtp_from
msg['To'] = email
with smtplib.SMTP(smtp_host, smtp_port) as server:
server.starttls()
server.login(smtp_user, smtp_pass)
server.send_message(msg)
return {"ok": True, "message": f"Test-E-Mail an {email} gesendet"}
except Exception as e:
raise HTTPException(500, f"Fehler beim Senden: {str(e)}")
# ── Export ────────────────────────────────────────────────────────────────────
@app.get("/api/export/csv")
def export_csv(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Export all data as CSV."""
pid = get_pid(x_profile_id)
# Check export permission
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,))
prof = cur.fetchone()
if not prof or not prof['export_enabled']:
raise HTTPException(403, "Export ist für dieses Profil deaktiviert")
# Build CSV
output = io.StringIO()
writer = csv.writer(output)
# Header
writer.writerow(["Typ", "Datum", "Wert", "Details"])
# Weight
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT date, weight, note FROM weight_log WHERE profile_id=%s ORDER BY date", (pid,))
for r in cur.fetchall():
writer.writerow(["Gewicht", r['date'], f"{float(r['weight'])}kg", r['note'] or ""])
# Circumferences
cur.execute("SELECT date, c_waist, c_belly, c_hip FROM circumference_log WHERE profile_id=%s ORDER BY date", (pid,))
for r in cur.fetchall():
details = f"Taille:{float(r['c_waist'])}cm Bauch:{float(r['c_belly'])}cm Hüfte:{float(r['c_hip'])}cm"
writer.writerow(["Umfänge", r['date'], "", details])
# Caliper
cur.execute("SELECT date, body_fat_pct, lean_mass FROM caliper_log WHERE profile_id=%s ORDER BY date", (pid,))
for r in cur.fetchall():
writer.writerow(["Caliper", r['date'], f"{float(r['body_fat_pct'])}%", f"Magermasse:{float(r['lean_mass'])}kg"])
# Nutrition
cur.execute("SELECT date, kcal, protein_g FROM nutrition_log WHERE profile_id=%s ORDER BY date", (pid,))
for r in cur.fetchall():
writer.writerow(["Ernährung", r['date'], f"{float(r['kcal'])}kcal", f"Protein:{float(r['protein_g'])}g"])
# Activity
cur.execute("SELECT date, activity_type, duration_min, kcal_active FROM activity_log WHERE profile_id=%s ORDER BY date", (pid,))
for r in cur.fetchall():
writer.writerow(["Training", r['date'], r['activity_type'], f"{float(r['duration_min'])}min {float(r['kcal_active'])}kcal"])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=mitai-export-{pid}.csv"}
)
@app.get("/api/export/json")
def export_json(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Export all data as JSON."""
pid = get_pid(x_profile_id)
# Check export permission
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT export_enabled FROM profiles WHERE id=%s", (pid,))
prof = cur.fetchone()
if not prof or not prof['export_enabled']:
raise HTTPException(403, "Export ist für dieses Profil deaktiviert")
# Collect all data
data = {}
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
data['profile'] = r2d(cur.fetchone())
cur.execute("SELECT * FROM weight_log WHERE profile_id=%s ORDER BY date", (pid,))
data['weight'] = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM circumference_log WHERE profile_id=%s ORDER BY date", (pid,))
data['circumferences'] = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM caliper_log WHERE profile_id=%s ORDER BY date", (pid,))
data['caliper'] = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM nutrition_log WHERE profile_id=%s ORDER BY date", (pid,))
data['nutrition'] = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM activity_log WHERE profile_id=%s ORDER BY date", (pid,))
data['activity'] = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM ai_insights WHERE profile_id=%s ORDER BY created DESC", (pid,))
data['insights'] = [r2d(r) for r in cur.fetchall()]
def decimal_handler(obj):
if isinstance(obj, Decimal):
return float(obj)
return str(obj)
json_str = json.dumps(data, indent=2, default=decimal_handler)
return Response(
content=json_str,
media_type="application/json",
headers={"Content-Disposition": f"attachment; filename=mitai-export-{pid}.json"}
)
@app.get("/api/export/zip")
def export_zip(x_profile_id: Optional[str]=Header(default=None), session: dict=Depends(require_auth)):
"""Export all data as ZIP (CSV + JSON + photos) per specification."""
pid = get_pid(x_profile_id)
# Check export permission & get profile
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("SELECT * FROM profiles WHERE id=%s", (pid,))
prof = r2d(cur.fetchone())
if not prof or not prof.get('export_enabled'):
raise HTTPException(403, "Export ist für dieses Profil deaktiviert")
# Helper: CSV writer with UTF-8 BOM + semicolon
def write_csv(zf, filename, rows, columns):
if not rows:
return
output = io.StringIO()
writer = csv.writer(output, delimiter=';')
writer.writerow(columns)
for r in rows:
writer.writerow([
'' if r.get(col) is None else
(float(r[col]) if isinstance(r.get(col), Decimal) else r[col])
for col in columns
])
# UTF-8 with BOM for Excel
csv_bytes = '\ufeff'.encode('utf-8') + output.getvalue().encode('utf-8')
zf.writestr(f"data/{filename}", csv_bytes)
# Create ZIP
zip_buffer = io.BytesIO()
export_date = datetime.now().strftime('%Y-%m-%d')
profile_name = prof.get('name', 'export')
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
with get_db() as conn:
cur = get_cursor(conn)
# 1. README.txt
readme = f"""Mitai Jinkendo Datenexport
Version: 2
Exportiert am: {export_date}
Profil: {profile_name}
Inhalt:
- profile.json: Profildaten und Einstellungen
- data/*.csv: Messdaten (Semikolon-getrennt, UTF-8)
- insights/: KI-Auswertungen (JSON)
- photos/: Progress-Fotos (JPEG)
Import:
Dieser Export kann in Mitai Jinkendo unter
Einstellungen → Import → "Mitai Backup importieren"
wieder eingespielt werden.
Format-Version 2 (ab v9b):
Alle CSV-Dateien sind UTF-8 mit BOM kodiert.
Trennzeichen: Semikolon (;)
Datumsformat: YYYY-MM-DD
"""
zf.writestr("README.txt", readme.encode('utf-8'))
# 2. profile.json (ohne Passwort-Hash)
cur.execute("SELECT COUNT(*) as c FROM weight_log WHERE profile_id=%s", (pid,))
w_count = cur.fetchone()['c']
cur.execute("SELECT COUNT(*) as c FROM nutrition_log WHERE profile_id=%s", (pid,))
n_count = cur.fetchone()['c']
cur.execute("SELECT COUNT(*) as c FROM activity_log WHERE profile_id=%s", (pid,))
a_count = cur.fetchone()['c']
cur.execute("SELECT COUNT(*) as c FROM photos WHERE profile_id=%s", (pid,))
p_count = cur.fetchone()['c']
profile_data = {
"export_version": "2",
"export_date": export_date,
"app": "Mitai Jinkendo",
"profile": {
"name": prof.get('name'),
"email": prof.get('email'),
"sex": prof.get('sex'),
"height": float(prof['height']) if prof.get('height') else None,
"birth_year": prof['dob'].year if prof.get('dob') else None,
"goal_weight": float(prof['goal_weight']) if prof.get('goal_weight') else None,
"goal_bf_pct": float(prof['goal_bf_pct']) if prof.get('goal_bf_pct') else None,
"avatar_color": prof.get('avatar_color'),
"auth_type": prof.get('auth_type'),
"session_days": prof.get('session_days'),
"ai_enabled": prof.get('ai_enabled'),
"tier": prof.get('tier')
},
"stats": {
"weight_entries": w_count,
"nutrition_entries": n_count,
"activity_entries": a_count,
"photos": p_count
}
}
zf.writestr("profile.json", json.dumps(profile_data, indent=2, ensure_ascii=False).encode('utf-8'))
# 3. data/weight.csv
cur.execute("SELECT id, date, weight, note, source, created FROM weight_log WHERE profile_id=%s ORDER BY date", (pid,))
write_csv(zf, "weight.csv", [r2d(r) for r in cur.fetchall()],
['id','date','weight','note','source','created'])
# 4. data/circumferences.csv
cur.execute("SELECT id, date, c_waist, c_hip, c_chest, c_neck, c_arm, c_thigh, c_calf, notes, created FROM circumference_log WHERE profile_id=%s ORDER BY date", (pid,))
rows = [r2d(r) for r in cur.fetchall()]
# Rename columns to match spec
for r in rows:
r['waist'] = r.pop('c_waist', None)
r['hip'] = r.pop('c_hip', None)
r['chest'] = r.pop('c_chest', None)
r['neck'] = r.pop('c_neck', None)
r['upper_arm'] = r.pop('c_arm', None)
r['thigh'] = r.pop('c_thigh', None)
r['calf'] = r.pop('c_calf', None)
r['forearm'] = None # not tracked
r['note'] = r.pop('notes', None)
write_csv(zf, "circumferences.csv", rows,
['id','date','waist','hip','chest','neck','upper_arm','thigh','calf','forearm','note','created'])
# 5. data/caliper.csv
cur.execute("SELECT id, date, sf_chest, sf_abdomen, sf_thigh, sf_triceps, sf_subscap, sf_suprailiac, sf_axilla, sf_method, body_fat_pct, notes, created FROM caliper_log WHERE profile_id=%s ORDER BY date", (pid,))
rows = [r2d(r) for r in cur.fetchall()]
for r in rows:
r['chest'] = r.pop('sf_chest', None)
r['abdomen'] = r.pop('sf_abdomen', None)
r['thigh'] = r.pop('sf_thigh', None)
r['tricep'] = r.pop('sf_triceps', None)
r['subscapular'] = r.pop('sf_subscap', None)
r['suprailiac'] = r.pop('sf_suprailiac', None)
r['midaxillary'] = r.pop('sf_axilla', None)
r['method'] = r.pop('sf_method', None)
r['bf_percent'] = r.pop('body_fat_pct', None)
r['note'] = r.pop('notes', None)
write_csv(zf, "caliper.csv", rows,
['id','date','chest','abdomen','thigh','tricep','subscapular','suprailiac','midaxillary','method','bf_percent','note','created'])
# 6. data/nutrition.csv
cur.execute("SELECT id, date, kcal, protein_g, fat_g, carbs_g, source, created FROM nutrition_log WHERE profile_id=%s ORDER BY date", (pid,))
rows = [r2d(r) for r in cur.fetchall()]
for r in rows:
r['meal_name'] = '' # not tracked per meal
r['protein'] = r.pop('protein_g', None)
r['fat'] = r.pop('fat_g', None)
r['carbs'] = r.pop('carbs_g', None)
r['fiber'] = None # not tracked
r['note'] = ''
write_csv(zf, "nutrition.csv", rows,
['id','date','meal_name','kcal','protein','fat','carbs','fiber','note','source','created'])
# 7. data/activity.csv
cur.execute("SELECT id, date, activity_type, duration_min, kcal_active, hr_avg, hr_max, distance_km, notes, source, created FROM activity_log WHERE profile_id=%s ORDER BY date", (pid,))
rows = [r2d(r) for r in cur.fetchall()]
for r in rows:
r['name'] = r['activity_type']
r['type'] = r.pop('activity_type', None)
r['kcal'] = r.pop('kcal_active', None)
r['heart_rate_avg'] = r.pop('hr_avg', None)
r['heart_rate_max'] = r.pop('hr_max', None)
r['note'] = r.pop('notes', None)
write_csv(zf, "activity.csv", rows,
['id','date','name','type','duration_min','kcal','heart_rate_avg','heart_rate_max','distance_km','note','source','created'])
# 8. insights/ai_insights.json
cur.execute("SELECT id, scope, content, created FROM ai_insights WHERE profile_id=%s ORDER BY created DESC", (pid,))
insights = []
for r in cur.fetchall():
rd = r2d(r)
insights.append({
"id": rd['id'],
"scope": rd['scope'],
"created": rd['created'].isoformat() if hasattr(rd['created'], 'isoformat') else str(rd['created']),
"result": rd['content']
})
if insights:
zf.writestr("insights/ai_insights.json", json.dumps(insights, indent=2, ensure_ascii=False).encode('utf-8'))
# 9. photos/
cur.execute("SELECT * FROM photos WHERE profile_id=%s ORDER BY date", (pid,))
photos = [r2d(r) for r in cur.fetchall()]
for i, photo in enumerate(photos):
photo_path = Path(PHOTOS_DIR) / photo['path']
if photo_path.exists():
filename = f"{photo.get('date') or export_date}_{i+1}{photo_path.suffix}"
zf.write(photo_path, f"photos/{filename}")
zip_buffer.seek(0)
filename = f"mitai-export-{profile_name.replace(' ','-')}-{export_date}.zip"
return StreamingResponse(
iter([zip_buffer.getvalue()]),
media_type="application/zip",
headers={"Content-Disposition": f"attachment; filename={filename}"}
)