Logs: - CSV column names from first row - Rows skipped due to missing date - Rows skipped due to no vitals data - Shows which fields were found/missing Helps diagnose CSV format mismatches. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
387 lines
15 KiB
Python
387 lines
15 KiB
Python
"""
|
|
Vitals Baseline Router - v9d Phase 2d Refactored
|
|
|
|
Baseline vitals measured once daily (morning, fasted):
|
|
- Resting Heart Rate (RHR)
|
|
- Heart Rate Variability (HRV)
|
|
- VO2 Max
|
|
- SpO2 (Blood Oxygen Saturation)
|
|
- Respiratory Rate
|
|
|
|
Endpoints:
|
|
- GET /api/vitals/baseline List baseline vitals
|
|
- GET /api/vitals/baseline/by-date/{date} Get entry for specific date
|
|
- POST /api/vitals/baseline Create/update baseline entry (upsert)
|
|
- PUT /api/vitals/baseline/{id} Update baseline entry
|
|
- DELETE /api/vitals/baseline/{id} Delete baseline entry
|
|
- GET /api/vitals/baseline/stats Statistics and trends
|
|
- POST /api/vitals/baseline/import/apple-health Import Apple Health CSV
|
|
"""
|
|
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
|
|
from pydantic import BaseModel
|
|
from typing import Optional
|
|
from datetime import datetime, timedelta
|
|
import logging
|
|
import csv
|
|
import io
|
|
|
|
from db import get_db, get_cursor, r2d
|
|
from auth import require_auth
|
|
from routers.profiles import get_pid
|
|
|
|
router = APIRouter(prefix="/api/vitals/baseline", tags=["vitals_baseline"])
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
# Pydantic Models
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
class BaselineEntry(BaseModel):
|
|
date: str
|
|
resting_hr: Optional[int] = None
|
|
hrv: Optional[int] = None
|
|
vo2_max: Optional[float] = None
|
|
spo2: Optional[int] = None
|
|
respiratory_rate: Optional[float] = None
|
|
body_temperature: Optional[float] = None
|
|
resting_metabolic_rate: Optional[int] = None
|
|
note: Optional[str] = None
|
|
|
|
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
# CRUD Endpoints
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
@router.get("")
|
|
def list_baseline_vitals(
|
|
limit: int = 90,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Get baseline vitals (last N days)."""
|
|
pid = get_pid(x_profile_id)
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
cur.execute("""
|
|
SELECT * FROM vitals_baseline
|
|
WHERE profile_id = %s
|
|
ORDER BY date DESC
|
|
LIMIT %s
|
|
""", (pid, limit))
|
|
return [r2d(r) for r in cur.fetchall()]
|
|
|
|
|
|
@router.get("/by-date/{date}")
|
|
def get_baseline_by_date(
|
|
date: str,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Get baseline entry for specific date."""
|
|
pid = get_pid(x_profile_id)
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
cur.execute("""
|
|
SELECT * FROM vitals_baseline
|
|
WHERE profile_id = %s AND date = %s
|
|
""", (pid, date))
|
|
row = cur.fetchone()
|
|
return r2d(row) if row else None
|
|
|
|
|
|
@router.post("")
|
|
def create_or_update_baseline(
|
|
entry: BaselineEntry,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Create or update baseline entry (upsert on date)."""
|
|
pid = get_pid(x_profile_id)
|
|
|
|
# Build dynamic update columns (only non-None fields)
|
|
fields = []
|
|
values = [pid, entry.date]
|
|
|
|
if entry.resting_hr is not None:
|
|
fields.append("resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr)")
|
|
values.append(entry.resting_hr)
|
|
if entry.hrv is not None:
|
|
fields.append("hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv)")
|
|
values.append(entry.hrv)
|
|
if entry.vo2_max is not None:
|
|
fields.append("vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max)")
|
|
values.append(entry.vo2_max)
|
|
if entry.spo2 is not None:
|
|
fields.append("spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2)")
|
|
values.append(entry.spo2)
|
|
if entry.respiratory_rate is not None:
|
|
fields.append("respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate)")
|
|
values.append(entry.respiratory_rate)
|
|
if entry.body_temperature is not None:
|
|
fields.append("body_temperature = COALESCE(EXCLUDED.body_temperature, vitals_baseline.body_temperature)")
|
|
values.append(entry.body_temperature)
|
|
if entry.resting_metabolic_rate is not None:
|
|
fields.append("resting_metabolic_rate = COALESCE(EXCLUDED.resting_metabolic_rate, vitals_baseline.resting_metabolic_rate)")
|
|
values.append(entry.resting_metabolic_rate)
|
|
if entry.note:
|
|
fields.append("note = COALESCE(EXCLUDED.note, vitals_baseline.note)")
|
|
values.append(entry.note)
|
|
|
|
# At least one field must be provided
|
|
if not fields:
|
|
raise HTTPException(400, "At least one baseline vital must be provided")
|
|
|
|
# Build value placeholders
|
|
placeholders = ", ".join([f"${i}" for i in range(1, len(values) + 1)])
|
|
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
query = f"""
|
|
INSERT INTO vitals_baseline (profile_id, date, {', '.join([f.split('=')[0].strip() for f in fields])})
|
|
VALUES ($1, $2, {', '.join([f'${i}' for i in range(3, len(values) + 1)])})
|
|
ON CONFLICT (profile_id, date)
|
|
DO UPDATE SET {', '.join(fields)}, updated_at = NOW()
|
|
RETURNING *
|
|
"""
|
|
cur.execute(query, values)
|
|
return r2d(cur.fetchone())
|
|
|
|
|
|
@router.put("/{entry_id}")
|
|
def update_baseline(
|
|
entry_id: int,
|
|
entry: BaselineEntry,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Update existing baseline entry."""
|
|
pid = get_pid(x_profile_id)
|
|
|
|
# Build SET clause dynamically
|
|
updates = []
|
|
values = []
|
|
idx = 1
|
|
|
|
if entry.resting_hr is not None:
|
|
updates.append(f"resting_hr = ${idx}")
|
|
values.append(entry.resting_hr)
|
|
idx += 1
|
|
if entry.hrv is not None:
|
|
updates.append(f"hrv = ${idx}")
|
|
values.append(entry.hrv)
|
|
idx += 1
|
|
if entry.vo2_max is not None:
|
|
updates.append(f"vo2_max = ${idx}")
|
|
values.append(entry.vo2_max)
|
|
idx += 1
|
|
if entry.spo2 is not None:
|
|
updates.append(f"spo2 = ${idx}")
|
|
values.append(entry.spo2)
|
|
idx += 1
|
|
if entry.respiratory_rate is not None:
|
|
updates.append(f"respiratory_rate = ${idx}")
|
|
values.append(entry.respiratory_rate)
|
|
idx += 1
|
|
if entry.note:
|
|
updates.append(f"note = ${idx}")
|
|
values.append(entry.note)
|
|
idx += 1
|
|
|
|
if not updates:
|
|
raise HTTPException(400, "No fields to update")
|
|
|
|
updates.append("updated_at = NOW()")
|
|
values.extend([entry_id, pid])
|
|
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
query = f"""
|
|
UPDATE vitals_baseline
|
|
SET {', '.join(updates)}
|
|
WHERE id = ${idx} AND profile_id = ${idx + 1}
|
|
RETURNING *
|
|
"""
|
|
cur.execute(query, values)
|
|
row = cur.fetchone()
|
|
if not row:
|
|
raise HTTPException(404, "Entry not found")
|
|
return r2d(row)
|
|
|
|
|
|
@router.delete("/{entry_id}")
|
|
def delete_baseline(
|
|
entry_id: int,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Delete baseline entry."""
|
|
pid = get_pid(x_profile_id)
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
cur.execute("""
|
|
DELETE FROM vitals_baseline
|
|
WHERE id = %s AND profile_id = %s
|
|
""", (entry_id, pid))
|
|
if cur.rowcount == 0:
|
|
raise HTTPException(404, "Entry not found")
|
|
return {"ok": True}
|
|
|
|
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
# Statistics & Trends
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
@router.get("/stats")
|
|
def get_baseline_stats(
|
|
days: int = 30,
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Get baseline vitals statistics and trends."""
|
|
pid = get_pid(x_profile_id)
|
|
cutoff_date = (datetime.now() - timedelta(days=days)).date()
|
|
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
cur.execute("""
|
|
SELECT
|
|
COUNT(*) as total_entries,
|
|
-- Resting HR
|
|
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_rhr_7d,
|
|
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_rhr_30d,
|
|
-- HRV
|
|
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_hrv_7d,
|
|
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_hrv_30d,
|
|
-- Latest values
|
|
(SELECT vo2_max FROM vitals_baseline WHERE profile_id = %s AND vo2_max IS NOT NULL ORDER BY date DESC LIMIT 1) as latest_vo2_max,
|
|
AVG(spo2) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_spo2_7d
|
|
FROM vitals_baseline
|
|
WHERE profile_id = %s AND date >= %s
|
|
""", (cutoff_date, cutoff_date, cutoff_date, cutoff_date, pid, cutoff_date, pid, cutoff_date))
|
|
|
|
stats = r2d(cur.fetchone())
|
|
|
|
# Calculate trends (7d vs 30d)
|
|
if stats['avg_rhr_7d'] and stats['avg_rhr_30d']:
|
|
if stats['avg_rhr_7d'] < stats['avg_rhr_30d'] - 2:
|
|
stats['trend_rhr'] = 'decreasing' # Good!
|
|
elif stats['avg_rhr_7d'] > stats['avg_rhr_30d'] + 2:
|
|
stats['trend_rhr'] = 'increasing' # Warning
|
|
else:
|
|
stats['trend_rhr'] = 'stable'
|
|
else:
|
|
stats['trend_rhr'] = None
|
|
|
|
if stats['avg_hrv_7d'] and stats['avg_hrv_30d']:
|
|
if stats['avg_hrv_7d'] > stats['avg_hrv_30d'] + 5:
|
|
stats['trend_hrv'] = 'increasing' # Good!
|
|
elif stats['avg_hrv_7d'] < stats['avg_hrv_30d'] - 5:
|
|
stats['trend_hrv'] = 'decreasing' # Warning
|
|
else:
|
|
stats['trend_hrv'] = 'stable'
|
|
else:
|
|
stats['trend_hrv'] = None
|
|
|
|
return stats
|
|
|
|
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
# Import: Apple Health CSV
|
|
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
@router.post("/import/apple-health")
|
|
async def import_apple_health_baseline(
|
|
file: UploadFile = File(...),
|
|
x_profile_id: Optional[str] = Header(default=None),
|
|
session: dict = Depends(require_auth)
|
|
):
|
|
"""Import baseline vitals from Apple Health CSV export."""
|
|
pid = get_pid(x_profile_id)
|
|
|
|
content = await file.read()
|
|
decoded = content.decode('utf-8')
|
|
reader = csv.DictReader(io.StringIO(decoded))
|
|
|
|
inserted = 0
|
|
updated = 0
|
|
skipped = 0
|
|
errors = 0
|
|
|
|
with get_db() as conn:
|
|
cur = get_cursor(conn)
|
|
|
|
# Log available columns for debugging
|
|
first_row = True
|
|
|
|
for row in reader:
|
|
try:
|
|
if first_row:
|
|
logger.info(f"CSV Columns: {list(row.keys())}")
|
|
first_row = False
|
|
|
|
date = row.get('Start')[:10] if row.get('Start') else None
|
|
if not date:
|
|
logger.warning(f"Skipped row (no date): Start='{row.get('Start')}'")
|
|
skipped += 1
|
|
continue
|
|
|
|
# Extract baseline vitals from Apple Health export
|
|
rhr = row.get('Resting Heart Rate')
|
|
hrv = row.get('Heart Rate Variability')
|
|
vo2 = row.get('VO2 Max')
|
|
spo2 = row.get('Oxygen Saturation')
|
|
resp_rate = row.get('Respiratory Rate')
|
|
|
|
# Skip if no baseline vitals
|
|
if not any([rhr, hrv, vo2, spo2, resp_rate]):
|
|
logger.warning(f"Skipped row {date} (no vitals): RHR={rhr}, HRV={hrv}, VO2={vo2}, SpO2={spo2}, RespRate={resp_rate}")
|
|
skipped += 1
|
|
continue
|
|
|
|
# Upsert
|
|
cur.execute("""
|
|
INSERT INTO vitals_baseline (
|
|
profile_id, date,
|
|
resting_hr, hrv, vo2_max, spo2, respiratory_rate,
|
|
source
|
|
) VALUES (%s, %s, %s, %s, %s, %s, %s, 'apple_health')
|
|
ON CONFLICT (profile_id, date)
|
|
DO UPDATE SET
|
|
resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr),
|
|
hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv),
|
|
vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max),
|
|
spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2),
|
|
respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate),
|
|
updated_at = NOW()
|
|
WHERE vitals_baseline.source != 'manual'
|
|
RETURNING (xmax = 0) AS inserted
|
|
""", (
|
|
pid, date,
|
|
int(rhr) if rhr else None,
|
|
int(hrv) if hrv else None,
|
|
float(vo2) if vo2 else None,
|
|
int(spo2) if spo2 else None,
|
|
float(resp_rate) if resp_rate else None
|
|
))
|
|
|
|
result = cur.fetchone()
|
|
if result is None:
|
|
# WHERE clause prevented update (manual entry exists)
|
|
skipped += 1
|
|
elif result['inserted']:
|
|
inserted += 1
|
|
else:
|
|
updated += 1
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error importing row: {e}")
|
|
errors += 1
|
|
|
|
return {
|
|
"inserted": inserted,
|
|
"updated": updated,
|
|
"skipped": skipped,
|
|
"errors": errors
|
|
}
|