mitai-jinkendo/backend/routers/vitals_baseline.py
Lars e4a2b63a48
All checks were successful
Deploy Development / deploy (push) Successful in 49s
Build Test / lint-backend (push) Successful in 0s
Build Test / build-frontend (push) Successful in 13s
fix: vitals baseline parameter sync + goal utils transaction rollback
Bug 1 Fix (Ruhepuls):
- Completely rewrote vitals_baseline POST endpoint
- Clear separation: param_values array contains ALL values (pid, date, ...)
- Synchronized insert_cols, insert_placeholders, and param_values
- Added debug logging
- Simplified UPDATE logic (EXCLUDED.col instead of COALESCE)

Bug 2 Fix (Custom Goal Type Transaction Error):
- Added transaction rollback in goal_utils._fetch_by_aggregation_method()
- When SQL query fails (e.g., invalid column name), rollback transaction
- Prevents 'InFailedSqlTransaction' errors on subsequent queries
- Enhanced error logging (shows filter conditions, SQL, params)
- Returns None gracefully so goal creation can continue

User Action Required for Bug 2:
- Edit goal type 'Trainingshäufigkeit Krafttraining'
- Change filter from {"training_type": "strength"}
  to {"training_category": "strength"}
- activity_log has training_category, NOT training_type column
2026-03-27 22:09:52 +01:00

461 lines
17 KiB
Python

"""
Vitals Baseline Router - v9d Phase 2d Refactored
Baseline vitals measured once daily (morning, fasted):
- Resting Heart Rate (RHR)
- Heart Rate Variability (HRV)
- VO2 Max
- SpO2 (Blood Oxygen Saturation)
- Respiratory Rate
Endpoints:
- GET /api/vitals/baseline List baseline vitals
- GET /api/vitals/baseline/by-date/{date} Get entry for specific date
- POST /api/vitals/baseline Create/update baseline entry (upsert)
- PUT /api/vitals/baseline/{id} Update baseline entry
- DELETE /api/vitals/baseline/{id} Delete baseline entry
- GET /api/vitals/baseline/stats Statistics and trends
- POST /api/vitals/baseline/import/apple-health Import Apple Health CSV
"""
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
from pydantic import BaseModel
from typing import Optional
from datetime import datetime, timedelta
import logging
import csv
import io
from db import get_db, get_cursor, r2d
from auth import require_auth
from routers.profiles import get_pid
router = APIRouter(prefix="/api/vitals/baseline", tags=["vitals_baseline"])
logger = logging.getLogger(__name__)
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Pydantic Models
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
class BaselineEntry(BaseModel):
date: str
resting_hr: Optional[int] = None
hrv: Optional[int] = None
vo2_max: Optional[float] = None
spo2: Optional[int] = None
respiratory_rate: Optional[float] = None
body_temperature: Optional[float] = None
resting_metabolic_rate: Optional[int] = None
note: Optional[str] = None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# CRUD Endpoints
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("")
def list_baseline_vitals(
limit: int = 90,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline vitals (last N days)."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM vitals_baseline
WHERE profile_id = %s
ORDER BY date DESC
LIMIT %s
""", (pid, limit))
return [r2d(r) for r in cur.fetchall()]
@router.get("/by-date/{date}")
def get_baseline_by_date(
date: str,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline entry for specific date."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM vitals_baseline
WHERE profile_id = %s AND date = %s
""", (pid, date))
row = cur.fetchone()
return r2d(row) if row else None
@router.post("")
def create_or_update_baseline(
entry: BaselineEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Create or update baseline entry (upsert on date)."""
pid = get_pid(x_profile_id)
# Build dynamic INSERT columns, placeholders, UPDATE fields, and values list
# All arrays must stay synchronized
insert_cols = []
insert_placeholders = []
update_fields = []
param_values = [] # Will contain ALL values including pid and date
# Always include profile_id and date
param_values.append(pid)
param_values.append(entry.date)
param_idx = 3 # Next parameter starts at $3
if entry.resting_hr is not None:
insert_cols.append("resting_hr")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"resting_hr = EXCLUDED.resting_hr")
param_values.append(entry.resting_hr)
param_idx += 1
if entry.hrv is not None:
insert_cols.append("hrv")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"hrv = EXCLUDED.hrv")
param_values.append(entry.hrv)
param_idx += 1
if entry.vo2_max is not None:
insert_cols.append("vo2_max")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"vo2_max = EXCLUDED.vo2_max")
param_values.append(entry.vo2_max)
param_idx += 1
if entry.spo2 is not None:
insert_cols.append("spo2")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"spo2 = EXCLUDED.spo2")
param_values.append(entry.spo2)
param_idx += 1
if entry.respiratory_rate is not None:
insert_cols.append("respiratory_rate")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"respiratory_rate = EXCLUDED.respiratory_rate")
param_values.append(entry.respiratory_rate)
param_idx += 1
if entry.body_temperature is not None:
insert_cols.append("body_temperature")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"body_temperature = EXCLUDED.body_temperature")
param_values.append(entry.body_temperature)
param_idx += 1
if entry.resting_metabolic_rate is not None:
insert_cols.append("resting_metabolic_rate")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"resting_metabolic_rate = EXCLUDED.resting_metabolic_rate")
param_values.append(entry.resting_metabolic_rate)
param_idx += 1
if entry.note:
insert_cols.append("note")
insert_placeholders.append(f"${param_idx}")
update_fields.append(f"note = EXCLUDED.note")
param_values.append(entry.note)
param_idx += 1
# At least one field must be provided
if not insert_cols:
raise HTTPException(400, "At least one baseline vital must be provided")
with get_db() as conn:
cur = get_cursor(conn)
# Build complete column list and placeholder list
all_cols = f"profile_id, date, {', '.join(insert_cols)}"
all_placeholders = f"$1, $2, {', '.join(insert_placeholders)}"
query = f"""
INSERT INTO vitals_baseline ({all_cols})
VALUES ({all_placeholders})
ON CONFLICT (profile_id, date)
DO UPDATE SET {', '.join(update_fields)}, updated_at = NOW()
RETURNING *
"""
# Debug logging
print(f"[DEBUG] Vitals baseline query: {query}")
print(f"[DEBUG] Param values ({len(param_values)}): {param_values}")
cur.execute(query, tuple(param_values))
return r2d(cur.fetchone())
@router.put("/{entry_id}")
def update_baseline(
entry_id: int,
entry: BaselineEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Update existing baseline entry."""
pid = get_pid(x_profile_id)
# Build SET clause dynamically
updates = []
values = []
idx = 1
if entry.resting_hr is not None:
updates.append(f"resting_hr = ${idx}")
values.append(entry.resting_hr)
idx += 1
if entry.hrv is not None:
updates.append(f"hrv = ${idx}")
values.append(entry.hrv)
idx += 1
if entry.vo2_max is not None:
updates.append(f"vo2_max = ${idx}")
values.append(entry.vo2_max)
idx += 1
if entry.spo2 is not None:
updates.append(f"spo2 = ${idx}")
values.append(entry.spo2)
idx += 1
if entry.respiratory_rate is not None:
updates.append(f"respiratory_rate = ${idx}")
values.append(entry.respiratory_rate)
idx += 1
if entry.note:
updates.append(f"note = ${idx}")
values.append(entry.note)
idx += 1
if not updates:
raise HTTPException(400, "No fields to update")
updates.append("updated_at = NOW()")
values.extend([entry_id, pid])
with get_db() as conn:
cur = get_cursor(conn)
query = f"""
UPDATE vitals_baseline
SET {', '.join(updates)}
WHERE id = ${idx} AND profile_id = ${idx + 1}
RETURNING *
"""
cur.execute(query, values)
row = cur.fetchone()
if not row:
raise HTTPException(404, "Entry not found")
return r2d(row)
@router.delete("/{entry_id}")
def delete_baseline(
entry_id: int,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Delete baseline entry."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
DELETE FROM vitals_baseline
WHERE id = %s AND profile_id = %s
""", (entry_id, pid))
if cur.rowcount == 0:
raise HTTPException(404, "Entry not found")
return {"ok": True}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Statistics & Trends
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("/stats")
def get_baseline_stats(
days: int = 30,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline vitals statistics and trends."""
pid = get_pid(x_profile_id)
cutoff_date = (datetime.now() - timedelta(days=days)).date()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT
COUNT(*) as total_entries,
-- Resting HR
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_rhr_7d,
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_rhr_30d,
-- HRV
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_hrv_7d,
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_hrv_30d,
-- Latest values
(SELECT vo2_max FROM vitals_baseline WHERE profile_id = %s AND vo2_max IS NOT NULL ORDER BY date DESC LIMIT 1) as latest_vo2_max,
AVG(spo2) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_spo2_7d
FROM vitals_baseline
WHERE profile_id = %s AND date >= %s
""", (cutoff_date, cutoff_date, cutoff_date, cutoff_date, pid, cutoff_date, pid, cutoff_date))
stats = r2d(cur.fetchone())
# Calculate trends (7d vs 30d)
if stats['avg_rhr_7d'] and stats['avg_rhr_30d']:
if stats['avg_rhr_7d'] < stats['avg_rhr_30d'] - 2:
stats['trend_rhr'] = 'decreasing' # Good!
elif stats['avg_rhr_7d'] > stats['avg_rhr_30d'] + 2:
stats['trend_rhr'] = 'increasing' # Warning
else:
stats['trend_rhr'] = 'stable'
else:
stats['trend_rhr'] = None
if stats['avg_hrv_7d'] and stats['avg_hrv_30d']:
if stats['avg_hrv_7d'] > stats['avg_hrv_30d'] + 5:
stats['trend_hrv'] = 'increasing' # Good!
elif stats['avg_hrv_7d'] < stats['avg_hrv_30d'] - 5:
stats['trend_hrv'] = 'decreasing' # Warning
else:
stats['trend_hrv'] = 'stable'
else:
stats['trend_hrv'] = None
return stats
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Import: Apple Health CSV
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
def safe_int(value):
"""Safely parse string to int, handling decimals."""
if not value or value == '':
return None
try:
# If it has a decimal point, parse as float first then round to int
if '.' in str(value):
return int(float(value))
return int(value)
except (ValueError, TypeError):
return None
def safe_float(value):
"""Safely parse string to float."""
if not value or value == '':
return None
try:
return float(value)
except (ValueError, TypeError):
return None
@router.post("/import/apple-health")
async def import_apple_health_baseline(
file: UploadFile = File(...),
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Import baseline vitals from Apple Health CSV export."""
pid = get_pid(x_profile_id)
content = await file.read()
decoded = content.decode('utf-8')
reader = csv.DictReader(io.StringIO(decoded))
inserted = 0
updated = 0
skipped = 0
errors = 0
error_details = [] # Collect error messages
with get_db() as conn:
cur = get_cursor(conn)
# Log available columns for debugging
first_row = True
for row in reader:
try:
if first_row:
logger.info(f"CSV Columns: {list(row.keys())}")
first_row = False
# Support both English and German column names
date_raw = row.get('Start') or row.get('Datum/Uhrzeit')
date = date_raw[:10] if date_raw else None
if not date:
logger.warning(f"Skipped row (no date): Start='{row.get('Start')}', Datum/Uhrzeit='{row.get('Datum/Uhrzeit')}'")
skipped += 1
continue
# Extract baseline vitals (support English + German column names)
rhr = row.get('Resting Heart Rate') or row.get('Ruhepuls (count/min)')
hrv = row.get('Heart Rate Variability') or row.get('Herzfrequenzvariabilität (ms)')
vo2 = row.get('VO2 Max') or row.get('VO2 max (ml/(kg·min))')
spo2 = row.get('Oxygen Saturation') or row.get('Blutsauerstoffsättigung (%)')
resp_rate = row.get('Respiratory Rate') or row.get('Atemfrequenz (count/min)')
# Skip if no baseline vitals
if not any([rhr, hrv, vo2, spo2, resp_rate]):
logger.warning(f"Skipped row {date} (no vitals): RHR={rhr}, HRV={hrv}, VO2={vo2}, SpO2={spo2}, RespRate={resp_rate}")
skipped += 1
continue
# Upsert
cur.execute("""
INSERT INTO vitals_baseline (
profile_id, date,
resting_hr, hrv, vo2_max, spo2, respiratory_rate,
source
) VALUES (%s, %s, %s, %s, %s, %s, %s, 'apple_health')
ON CONFLICT (profile_id, date)
DO UPDATE SET
resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr),
hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv),
vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max),
spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2),
respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate),
updated_at = NOW()
WHERE vitals_baseline.source != 'manual'
RETURNING (xmax = 0) AS inserted
""", (
pid, date,
safe_int(rhr),
safe_int(hrv),
safe_float(vo2),
safe_int(spo2),
safe_float(resp_rate)
))
result = cur.fetchone()
if result is None:
# WHERE clause prevented update (manual entry exists)
skipped += 1
elif result['inserted']:
inserted += 1
else:
updated += 1
except Exception as e:
import traceback
error_msg = f"Row {date if 'date' in locals() else 'unknown'}: {str(e)}"
error_details.append(error_msg)
logger.error(f"{error_msg}\n{traceback.format_exc()}")
errors += 1
return {
"inserted": inserted,
"updated": updated,
"skipped": skipped,
"errors": errors,
"error_details": error_details[:10] # Return first 10 errors
}