refactor: vitals architecture - separate baseline vs blood pressure
Some checks failed
Build Test / lint-backend (push) Waiting to run
Build Test / build-frontend (push) Waiting to run
Deploy Development / deploy (push) Has been cancelled

BREAKING CHANGE: vitals_log split into vitals_baseline + blood_pressure_log

**Architektur-Änderung:**
- Baseline-Vitals (langsam veränderlich, 1x täglich morgens)
  → vitals_baseline (RHR, HRV, VO2 Max, SpO2, Atemfrequenz)
- Kontext-abhängige Vitals (mehrfach täglich, situativ)
  → blood_pressure_log (Blutdruck + Kontext-Tagging)

**Migration 015:**
- CREATE TABLE vitals_baseline (once daily, morning measurements)
- CREATE TABLE blood_pressure_log (multiple daily, context-aware)
- Migrate data from vitals_log → new tables
- Rename vitals_log → vitals_log_backup_pre_015 (safety)
- Prepared for future: glucose_log, temperature_log (commented)

**Backend:**
- NEW: routers/vitals_baseline.py (CRUD + Apple Health import)
- NEW: routers/blood_pressure.py (CRUD + Omron import + context)
- UPDATED: main.py (register new routers, remove old vitals)
- UPDATED: insights.py (query new tables, split template vars)

**Frontend:**
- UPDATED: api.js (new endpoints für baseline + BP)
- UPDATED: Analysis.jsx (add {{bp_summary}} variable)

**Nächster Schritt:**
- Frontend: VitalsPage.jsx refactoren (3 Tabs: Morgenmessung, Blutdruck, Import)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Lars 2026-03-23 16:02:40 +01:00
parent 1619091640
commit 1866ff9ce6
7 changed files with 1004 additions and 32 deletions

View File

@ -20,7 +20,8 @@ from routers import activity, nutrition, photos, insights, prompts
from routers import admin, stats, exportdata, importdata from routers import admin, stats, exportdata, importdata
from routers import subscription, coupons, features, tiers_mgmt, tier_limits from routers import subscription, coupons, features, tiers_mgmt, tier_limits
from routers import user_restrictions, access_grants, training_types, admin_training_types from routers import user_restrictions, access_grants, training_types, admin_training_types
from routers import admin_activity_mappings, sleep, rest_days, vitals from routers import admin_activity_mappings, sleep, rest_days
from routers import vitals_baseline, blood_pressure # v9d Phase 2d Refactored
from routers import evaluation # v9d/v9e Training Type Profiles (#15) from routers import evaluation # v9d/v9e Training Type Profiles (#15)
# ── App Configuration ───────────────────────────────────────────────────────── # ── App Configuration ─────────────────────────────────────────────────────────
@ -93,7 +94,8 @@ app.include_router(admin_training_types.router) # /api/admin/training-types/*
app.include_router(admin_activity_mappings.router) # /api/admin/activity-mappings/* app.include_router(admin_activity_mappings.router) # /api/admin/activity-mappings/*
app.include_router(sleep.router) # /api/sleep/* (v9d Phase 2b) app.include_router(sleep.router) # /api/sleep/* (v9d Phase 2b)
app.include_router(rest_days.router) # /api/rest-days/* (v9d Phase 2a) app.include_router(rest_days.router) # /api/rest-days/* (v9d Phase 2a)
app.include_router(vitals.router) # /api/vitals/* (v9d Phase 2d) app.include_router(vitals_baseline.router) # /api/vitals/baseline/* (v9d Phase 2d Refactored)
app.include_router(blood_pressure.router) # /api/blood-pressure/* (v9d Phase 2d Refactored)
app.include_router(evaluation.router) # /api/evaluation/* (v9d/v9e Training Profiles #15) app.include_router(evaluation.router) # /api/evaluation/* (v9d/v9e Training Profiles #15)
# ── Health Check ────────────────────────────────────────────────────────────── # ── Health Check ──────────────────────────────────────────────────────────────

View File

@ -0,0 +1,184 @@
-- Migration 015: Vitals Refactoring - Trennung Baseline vs. Context-Dependent
-- v9d Phase 2d: Architektur-Verbesserung für bessere Datenqualität
-- Date: 2026-03-23
-- ══════════════════════════════════════════════════════════════════════════════
-- STEP 1: Create new tables
-- ══════════════════════════════════════════════════════════════════════════════
-- Baseline Vitals (slow-changing, once daily, morning measurement)
CREATE TABLE IF NOT EXISTS vitals_baseline (
id SERIAL PRIMARY KEY,
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
date DATE NOT NULL,
-- Core baseline vitals
resting_hr INTEGER CHECK (resting_hr > 0 AND resting_hr < 120),
hrv INTEGER CHECK (hrv > 0 AND hrv < 300),
vo2_max DECIMAL(4,1) CHECK (vo2_max > 0 AND vo2_max < 100),
spo2 INTEGER CHECK (spo2 >= 70 AND spo2 <= 100),
respiratory_rate DECIMAL(4,1) CHECK (respiratory_rate > 0 AND respiratory_rate < 60),
-- Future baseline vitals (prepared for expansion)
body_temperature DECIMAL(3,1) CHECK (body_temperature > 30 AND body_temperature < 45),
resting_metabolic_rate INTEGER CHECK (resting_metabolic_rate > 0),
-- Metadata
note TEXT,
source VARCHAR(20) DEFAULT 'manual' CHECK (source IN ('manual', 'apple_health', 'garmin', 'withings')),
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW(),
CONSTRAINT unique_baseline_per_day UNIQUE(profile_id, date)
);
CREATE INDEX idx_vitals_baseline_profile_date ON vitals_baseline(profile_id, date DESC);
COMMENT ON TABLE vitals_baseline IS 'v9d Phase 2d: Baseline vitals measured once daily (morning, fasted)';
COMMENT ON COLUMN vitals_baseline.resting_hr IS 'Resting heart rate (bpm) - measured in the morning before getting up';
COMMENT ON COLUMN vitals_baseline.hrv IS 'Heart rate variability (ms) - higher is better';
COMMENT ON COLUMN vitals_baseline.vo2_max IS 'VO2 Max (ml/kg/min) - estimated by Apple Watch or lab test';
COMMENT ON COLUMN vitals_baseline.spo2 IS 'Blood oxygen saturation (%) - baseline measurement';
COMMENT ON COLUMN vitals_baseline.respiratory_rate IS 'Respiratory rate (breaths/min) - baseline measurement';
-- ══════════════════════════════════════════════════════════════════════════════
-- Blood Pressure Log (context-dependent, multiple times per day)
CREATE TABLE IF NOT EXISTS blood_pressure_log (
id SERIAL PRIMARY KEY,
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
measured_at TIMESTAMP NOT NULL,
-- Blood pressure measurements
systolic INTEGER NOT NULL CHECK (systolic > 0 AND systolic < 300),
diastolic INTEGER NOT NULL CHECK (diastolic > 0 AND diastolic < 200),
pulse INTEGER CHECK (pulse > 0 AND pulse < 250),
-- Context tagging for correlation analysis
context VARCHAR(30) CHECK (context IN (
'morning_fasted', -- Morgens nüchtern
'after_meal', -- Nach dem Essen
'before_training', -- Vor dem Training
'after_training', -- Nach dem Training
'evening', -- Abends
'stress', -- Bei Stress
'resting', -- Ruhemessung
'other' -- Sonstiges
)),
-- Warning flags (Omron)
irregular_heartbeat BOOLEAN DEFAULT false,
possible_afib BOOLEAN DEFAULT false,
-- Metadata
note TEXT,
source VARCHAR(20) DEFAULT 'manual' CHECK (source IN ('manual', 'omron', 'apple_health', 'withings')),
created_at TIMESTAMP DEFAULT NOW(),
CONSTRAINT unique_bp_measurement UNIQUE(profile_id, measured_at)
);
CREATE INDEX idx_blood_pressure_profile_datetime ON blood_pressure_log(profile_id, measured_at DESC);
CREATE INDEX idx_blood_pressure_context ON blood_pressure_log(context) WHERE context IS NOT NULL;
COMMENT ON TABLE blood_pressure_log IS 'v9d Phase 2d: Blood pressure measurements (multiple per day, context-aware)';
COMMENT ON COLUMN blood_pressure_log.context IS 'Measurement context for correlation analysis';
COMMENT ON COLUMN blood_pressure_log.irregular_heartbeat IS 'Irregular heartbeat detected (Omron device)';
COMMENT ON COLUMN blood_pressure_log.possible_afib IS 'Possible atrial fibrillation (Omron device)';
-- ══════════════════════════════════════════════════════════════════════════════
-- STEP 2: Migrate existing data from vitals_log
-- ══════════════════════════════════════════════════════════════════════════════
-- Migrate baseline vitals (RHR, HRV, VO2 Max, SpO2, Respiratory Rate)
INSERT INTO vitals_baseline (
profile_id, date,
resting_hr, hrv, vo2_max, spo2, respiratory_rate,
note, source, created_at, updated_at
)
SELECT
profile_id, date,
resting_hr, hrv, vo2_max, spo2, respiratory_rate,
note, source, created_at, updated_at
FROM vitals_log
WHERE resting_hr IS NOT NULL
OR hrv IS NOT NULL
OR vo2_max IS NOT NULL
OR spo2 IS NOT NULL
OR respiratory_rate IS NOT NULL
ON CONFLICT (profile_id, date) DO NOTHING;
-- Migrate blood pressure measurements
-- Note: Use date + 08:00 as default timestamp (morning measurement)
INSERT INTO blood_pressure_log (
profile_id, measured_at,
systolic, diastolic, pulse,
irregular_heartbeat, possible_afib,
note, source, created_at
)
SELECT
profile_id,
(date + TIME '08:00:00')::timestamp AS measured_at,
blood_pressure_systolic,
blood_pressure_diastolic,
pulse,
irregular_heartbeat,
possible_afib,
note,
CASE
WHEN source = 'manual' THEN 'manual'
WHEN source = 'omron' THEN 'omron'
ELSE 'manual'
END AS source,
created_at
FROM vitals_log
WHERE blood_pressure_systolic IS NOT NULL
AND blood_pressure_diastolic IS NOT NULL
ON CONFLICT (profile_id, measured_at) DO NOTHING;
-- ══════════════════════════════════════════════════════════════════════════════
-- STEP 3: Drop old vitals_log table (backup first)
-- ══════════════════════════════════════════════════════════════════════════════
-- Rename old table as backup (keep for safety, can be dropped later)
ALTER TABLE vitals_log RENAME TO vitals_log_backup_pre_015;
-- Drop old index (it's on the renamed table now)
DROP INDEX IF EXISTS idx_vitals_profile_date;
-- ══════════════════════════════════════════════════════════════════════════════
-- STEP 4: Prepared for future vitals types
-- ══════════════════════════════════════════════════════════════════════════════
-- Future tables (commented out, create when needed):
-- Glucose Log (for blood sugar tracking)
-- CREATE TABLE glucose_log (
-- id SERIAL PRIMARY KEY,
-- profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
-- measured_at TIMESTAMP NOT NULL,
-- glucose_mg_dl INTEGER NOT NULL CHECK (glucose_mg_dl > 0 AND glucose_mg_dl < 500),
-- context VARCHAR(30) CHECK (context IN (
-- 'fasted', 'before_meal', 'after_meal_1h', 'after_meal_2h', 'before_training', 'after_training', 'other'
-- )),
-- note TEXT,
-- source VARCHAR(20) DEFAULT 'manual',
-- created_at TIMESTAMP DEFAULT NOW(),
-- CONSTRAINT unique_glucose_measurement UNIQUE(profile_id, measured_at)
-- );
-- Temperature Log (for illness tracking)
-- CREATE TABLE temperature_log (
-- id SERIAL PRIMARY KEY,
-- profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
-- measured_at TIMESTAMP NOT NULL,
-- temperature_celsius DECIMAL(3,1) NOT NULL CHECK (temperature_celsius > 30 AND temperature_celsius < 45),
-- measurement_location VARCHAR(20) CHECK (measurement_location IN ('oral', 'ear', 'forehead', 'armpit')),
-- note TEXT,
-- created_at TIMESTAMP DEFAULT NOW(),
-- CONSTRAINT unique_temperature_measurement UNIQUE(profile_id, measured_at)
-- );
-- ══════════════════════════════════════════════════════════════════════════════
-- Migration complete
-- ══════════════════════════════════════════════════════════════════════════════

View File

@ -0,0 +1,393 @@
"""
Blood Pressure Router - v9d Phase 2d Refactored
Context-dependent blood pressure measurements (multiple times per day):
- Systolic/Diastolic Blood Pressure
- Pulse during measurement
- Context tagging (morning_fasted, after_meal, before_training, etc.)
- Warning flags (irregular heartbeat, AFib)
Endpoints:
- GET /api/blood-pressure List BP measurements
- GET /api/blood-pressure/by-date/{date} Get measurements for specific date
- POST /api/blood-pressure Create BP measurement
- PUT /api/blood-pressure/{id} Update BP measurement
- DELETE /api/blood-pressure/{id} Delete BP measurement
- GET /api/blood-pressure/stats Statistics and trends
- POST /api/blood-pressure/import/omron Import Omron CSV
"""
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
from pydantic import BaseModel
from typing import Optional
from datetime import datetime, timedelta
import logging
import csv
import io
from db import get_db, get_cursor, r2d
from auth import require_auth
from routers.profiles import get_pid
router = APIRouter(prefix="/api/blood-pressure", tags=["blood_pressure"])
logger = logging.getLogger(__name__)
# German month mapping for Omron dates
GERMAN_MONTHS = {
'Januar': '01', 'Jan.': '01', 'Jan': '01',
'Februar': '02', 'Feb.': '02', 'Feb': '02',
'März': '03', 'Mär.': '03', 'Mär': '03',
'April': '04', 'Apr.': '04', 'Apr': '04',
'Mai': '05',
'Juni': '06', 'Jun.': '06', 'Jun': '06',
'Juli': '07', 'Jul.': '07', 'Jul': '07',
'August': '08', 'Aug.': '08', 'Aug': '08',
'September': '09', 'Sep.': '09', 'Sep': '09',
'Oktober': '10', 'Okt.': '10', 'Okt': '10',
'November': '11', 'Nov.': '11', 'Nov': '11',
'Dezember': '12', 'Dez.': '12', 'Dez': '12',
}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Pydantic Models
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
class BPEntry(BaseModel):
measured_at: str # ISO format datetime
systolic: int
diastolic: int
pulse: Optional[int] = None
context: Optional[str] = None # morning_fasted, after_meal, etc.
irregular_heartbeat: Optional[bool] = False
possible_afib: Optional[bool] = False
note: Optional[str] = None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Helper Functions
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
def parse_omron_date(date_str: str, time_str: str) -> str:
"""
Parse Omron German date/time format to ISO datetime.
Input: "13 März 2026", "08:30"
Output: "2026-03-13 08:30:00"
"""
try:
parts = date_str.strip().split()
if len(parts) != 3:
return None
day = parts[0]
month_name = parts[1]
year = parts[2]
month = GERMAN_MONTHS.get(month_name)
if not month:
return None
iso_date = f"{year}-{month}-{day.zfill(2)}"
iso_datetime = f"{iso_date} {time_str}:00"
# Validate
datetime.fromisoformat(iso_datetime)
return iso_datetime
except Exception as e:
logger.error(f"Error parsing Omron date: {date_str} {time_str} - {e}")
return None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# CRUD Endpoints
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("")
def list_bp_measurements(
limit: int = 90,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get blood pressure measurements (last N entries)."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM blood_pressure_log
WHERE profile_id = %s
ORDER BY measured_at DESC
LIMIT %s
""", (pid, limit))
return [r2d(r) for r in cur.fetchall()]
@router.get("/by-date/{date}")
def get_bp_by_date(
date: str,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get all BP measurements for a specific date."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM blood_pressure_log
WHERE profile_id = %s
AND DATE(measured_at) = %s
ORDER BY measured_at ASC
""", (pid, date))
return [r2d(r) for r in cur.fetchall()]
@router.post("")
def create_bp_measurement(
entry: BPEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Create new BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
INSERT INTO blood_pressure_log (
profile_id, measured_at,
systolic, diastolic, pulse,
context, irregular_heartbeat, possible_afib,
note, source
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, 'manual')
RETURNING *
""", (
pid, entry.measured_at,
entry.systolic, entry.diastolic, entry.pulse,
entry.context, entry.irregular_heartbeat, entry.possible_afib,
entry.note
))
return r2d(cur.fetchone())
@router.put("/{entry_id}")
def update_bp_measurement(
entry_id: int,
entry: BPEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Update existing BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
UPDATE blood_pressure_log
SET measured_at = %s,
systolic = %s,
diastolic = %s,
pulse = %s,
context = %s,
irregular_heartbeat = %s,
possible_afib = %s,
note = %s
WHERE id = %s AND profile_id = %s
RETURNING *
""", (
entry.measured_at,
entry.systolic, entry.diastolic, entry.pulse,
entry.context, entry.irregular_heartbeat, entry.possible_afib,
entry.note,
entry_id, pid
))
row = cur.fetchone()
if not row:
raise HTTPException(404, "Entry not found")
return r2d(row)
@router.delete("/{entry_id}")
def delete_bp_measurement(
entry_id: int,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Delete BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
DELETE FROM blood_pressure_log
WHERE id = %s AND profile_id = %s
""", (entry_id, pid))
if cur.rowcount == 0:
raise HTTPException(404, "Entry not found")
return {"ok": True}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Statistics & Trends
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("/stats")
def get_bp_stats(
days: int = 30,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get blood pressure statistics and trends."""
pid = get_pid(x_profile_id)
cutoff_date = datetime.now() - timedelta(days=days)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT
COUNT(*) as total_measurements,
-- Overall averages
AVG(systolic) as avg_systolic,
AVG(diastolic) as avg_diastolic,
AVG(pulse) FILTER (WHERE pulse IS NOT NULL) as avg_pulse,
-- 7-day averages
AVG(systolic) FILTER (WHERE measured_at >= NOW() - INTERVAL '7 days') as avg_systolic_7d,
AVG(diastolic) FILTER (WHERE measured_at >= NOW() - INTERVAL '7 days') as avg_diastolic_7d,
-- Context-specific averages
AVG(systolic) FILTER (WHERE context = 'morning_fasted') as avg_systolic_morning,
AVG(diastolic) FILTER (WHERE context = 'morning_fasted') as avg_diastolic_morning,
AVG(systolic) FILTER (WHERE context = 'evening') as avg_systolic_evening,
AVG(diastolic) FILTER (WHERE context = 'evening') as avg_diastolic_evening,
-- Warning flags
COUNT(*) FILTER (WHERE irregular_heartbeat = true) as irregular_count,
COUNT(*) FILTER (WHERE possible_afib = true) as afib_count
FROM blood_pressure_log
WHERE profile_id = %s AND measured_at >= %s
""", (pid, cutoff_date))
stats = r2d(cur.fetchone())
# Classify BP ranges (WHO/ISH guidelines)
if stats['avg_systolic'] and stats['avg_diastolic']:
if stats['avg_systolic'] < 120 and stats['avg_diastolic'] < 80:
stats['bp_category'] = 'optimal'
elif stats['avg_systolic'] < 130 and stats['avg_diastolic'] < 85:
stats['bp_category'] = 'normal'
elif stats['avg_systolic'] < 140 and stats['avg_diastolic'] < 90:
stats['bp_category'] = 'high_normal'
elif stats['avg_systolic'] < 160 and stats['avg_diastolic'] < 100:
stats['bp_category'] = 'grade_1_hypertension'
elif stats['avg_systolic'] < 180 and stats['avg_diastolic'] < 110:
stats['bp_category'] = 'grade_2_hypertension'
else:
stats['bp_category'] = 'grade_3_hypertension'
else:
stats['bp_category'] = None
return stats
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Import: Omron CSV
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.post("/import/omron")
async def import_omron_csv(
file: UploadFile = File(...),
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Import blood pressure measurements from Omron CSV export."""
pid = get_pid(x_profile_id)
content = await file.read()
decoded = content.decode('utf-8')
reader = csv.DictReader(io.StringIO(decoded))
inserted = 0
updated = 0
skipped = 0
errors = 0
with get_db() as conn:
cur = get_cursor(conn)
for row in reader:
try:
# Parse Omron German date format
date_str = row.get('Datum', row.get('Date'))
time_str = row.get('Zeit', row.get('Time', '08:00'))
if not date_str:
skipped += 1
continue
measured_at = parse_omron_date(date_str, time_str)
if not measured_at:
errors += 1
continue
# Extract measurements
systolic = row.get('Systolisch', row.get('Systolic'))
diastolic = row.get('Diastolisch', row.get('Diastolic'))
pulse = row.get('Puls', row.get('Pulse'))
if not systolic or not diastolic:
skipped += 1
continue
# Parse warning flags
irregular = row.get('Unregelmäßiger Herzschlag', row.get('Irregular Heartbeat', ''))
afib = row.get('Vorhofflimmern', row.get('AFib', ''))
irregular_heartbeat = irregular.lower() in ['ja', 'yes', 'true', '1']
possible_afib = afib.lower() in ['ja', 'yes', 'true', '1']
# Determine context based on time
hour = int(time_str.split(':')[0])
if 5 <= hour < 10:
context = 'morning_fasted'
elif 18 <= hour < 23:
context = 'evening'
else:
context = 'other'
# Upsert
cur.execute("""
INSERT INTO blood_pressure_log (
profile_id, measured_at,
systolic, diastolic, pulse,
context, irregular_heartbeat, possible_afib,
source
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, 'omron')
ON CONFLICT (profile_id, measured_at)
DO UPDATE SET
systolic = EXCLUDED.systolic,
diastolic = EXCLUDED.diastolic,
pulse = EXCLUDED.pulse,
context = EXCLUDED.context,
irregular_heartbeat = EXCLUDED.irregular_heartbeat,
possible_afib = EXCLUDED.possible_afib
WHERE blood_pressure_log.source != 'manual'
RETURNING (xmax = 0) AS inserted
""", (
pid, measured_at,
int(systolic), int(diastolic),
int(pulse) if pulse else None,
context, irregular_heartbeat, possible_afib
))
result = cur.fetchone()
if result and result['inserted']:
inserted += 1
else:
updated += 1
except Exception as e:
logger.error(f"Error importing Omron row: {e}")
errors += 1
return {
"inserted": inserted,
"updated": updated,
"skipped": skipped,
"errors": errors
}

View File

@ -82,8 +82,11 @@ def _get_profile_data(pid: str):
sleep = [r2d(r) for r in cur.fetchall()] sleep = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM rest_days WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,)) cur.execute("SELECT * FROM rest_days WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
rest_days = [r2d(r) for r in cur.fetchall()] rest_days = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM vitals_log WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,)) # v9d Phase 2d Refactored: separate baseline and BP tables
vitals = [r2d(r) for r in cur.fetchall()] cur.execute("SELECT * FROM vitals_baseline WHERE profile_id=%s ORDER BY date DESC LIMIT 30", (pid,))
vitals_baseline = [r2d(r) for r in cur.fetchall()]
cur.execute("SELECT * FROM blood_pressure_log WHERE profile_id=%s ORDER BY measured_at DESC LIMIT 90", (pid,))
blood_pressure = [r2d(r) for r in cur.fetchall()]
return { return {
"profile": prof, "profile": prof,
"weight": weight, "weight": weight,
@ -93,7 +96,8 @@ def _get_profile_data(pid: str):
"activity": activity, "activity": activity,
"sleep": sleep, "sleep": sleep,
"rest_days": rest_days, "rest_days": rest_days,
"vitals": vitals "vitals_baseline": vitals_baseline,
"blood_pressure": blood_pressure
} }
@ -115,7 +119,8 @@ def _prepare_template_vars(data: dict) -> dict:
activity = data['activity'] activity = data['activity']
sleep = data.get('sleep', []) sleep = data.get('sleep', [])
rest_days = data.get('rest_days', []) rest_days = data.get('rest_days', [])
vitals = data.get('vitals', []) vitals_baseline = data.get('vitals_baseline', [])
blood_pressure = data.get('blood_pressure', [])
vars = { vars = {
"name": prof.get('name', 'Nutzer'), "name": prof.get('name', 'Nutzer'),
@ -240,40 +245,48 @@ def _prepare_template_vars(data: dict) -> dict:
vars['rest_days_count'] = 0 vars['rest_days_count'] = 0
vars['rest_days_types'] = "keine" vars['rest_days_types'] = "keine"
# Vitals summary (v9d Phase 2d) # Vitals Baseline summary (v9d Phase 2d Refactored)
if vitals: if vitals_baseline:
n = len(vitals) n = len(vitals_baseline)
hr_data = [v for v in vitals if v.get('resting_hr')] hr_data = [v for v in vitals_baseline if v.get('resting_hr')]
hrv_data = [v for v in vitals if v.get('hrv')] hrv_data = [v for v in vitals_baseline if v.get('hrv')]
bp_data = [v for v in vitals if v.get('blood_pressure_systolic') and v.get('blood_pressure_diastolic')] vo2_data = [v for v in vitals_baseline if v.get('vo2_max')]
vo2_data = [v for v in vitals if v.get('vo2_max')]
avg_hr = sum(int(v.get('resting_hr')) for v in hr_data) / len(hr_data) if hr_data else 0 avg_hr = sum(int(v.get('resting_hr')) for v in hr_data) / len(hr_data) if hr_data else 0
avg_hrv = sum(int(v.get('hrv')) for v in hrv_data) / len(hrv_data) if hrv_data else 0 avg_hrv = sum(int(v.get('hrv')) for v in hrv_data) / len(hrv_data) if hrv_data else 0
avg_bp_sys = sum(int(v.get('blood_pressure_systolic')) for v in bp_data) / len(bp_data) if bp_data else 0
avg_bp_dia = sum(int(v.get('blood_pressure_diastolic')) for v in bp_data) / len(bp_data) if bp_data else 0
latest_vo2 = float(vo2_data[0].get('vo2_max')) if vo2_data else 0 latest_vo2 = float(vo2_data[0].get('vo2_max')) if vo2_data else 0
parts = [] parts = []
if avg_hr: parts.append(f"Ruhepuls Ø {avg_hr:.0f}bpm") if avg_hr: parts.append(f"Ruhepuls Ø {avg_hr:.0f}bpm")
if avg_hrv: parts.append(f"HRV Ø {avg_hrv:.0f}ms") if avg_hrv: parts.append(f"HRV Ø {avg_hrv:.0f}ms")
if avg_bp_sys: parts.append(f"Blutdruck Ø {avg_bp_sys:.0f}/{avg_bp_dia:.0f}mmHg")
if latest_vo2: parts.append(f"VO2 Max {latest_vo2:.1f}") if latest_vo2: parts.append(f"VO2 Max {latest_vo2:.1f}")
vars['vitals_summary'] = f"{n} Messungen: " + ", ".join(parts) if parts else "keine verwertbaren Daten" vars['vitals_summary'] = f"{n} Messungen: " + ", ".join(parts) if parts else "keine verwertbaren Daten"
vars['vitals_detail'] = vars['vitals_summary'] vars['vitals_detail'] = vars['vitals_summary']
vars['vitals_avg_hr'] = round(avg_hr) vars['vitals_avg_hr'] = round(avg_hr)
vars['vitals_avg_hrv'] = round(avg_hrv) vars['vitals_avg_hrv'] = round(avg_hrv)
vars['vitals_avg_bp'] = f"{round(avg_bp_sys)}/{round(avg_bp_dia)}" if avg_bp_sys else "k.A."
vars['vitals_vo2_max'] = round(latest_vo2, 1) if latest_vo2 else "k.A." vars['vitals_vo2_max'] = round(latest_vo2, 1) if latest_vo2 else "k.A."
else: else:
vars['vitals_summary'] = "keine Daten" vars['vitals_summary'] = "keine Daten"
vars['vitals_detail'] = "keine Daten" vars['vitals_detail'] = "keine Daten"
vars['vitals_avg_hr'] = 0 vars['vitals_avg_hr'] = 0
vars['vitals_avg_hrv'] = 0 vars['vitals_avg_hrv'] = 0
vars['vitals_avg_bp'] = "k.A."
vars['vitals_vo2_max'] = "k.A." vars['vitals_vo2_max'] = "k.A."
# Blood Pressure summary (v9d Phase 2d Refactored)
if blood_pressure:
n = len(blood_pressure)
bp_data = [bp for bp in blood_pressure if bp.get('systolic') and bp.get('diastolic')]
avg_bp_sys = sum(int(bp.get('systolic')) for bp in bp_data) / len(bp_data) if bp_data else 0
avg_bp_dia = sum(int(bp.get('diastolic')) for bp in bp_data) / len(bp_data) if bp_data else 0
vars['vitals_avg_bp'] = f"{round(avg_bp_sys)}/{round(avg_bp_dia)}" if avg_bp_sys else "k.A."
vars['bp_summary'] = f"{n} Messungen, Ø {avg_bp_sys:.0f}/{avg_bp_dia:.0f} mmHg" if avg_bp_sys else "keine Daten"
else:
vars['vitals_avg_bp'] = "k.A."
vars['bp_summary'] = "keine Daten"
return vars return vars

View File

@ -0,0 +1,374 @@
"""
Vitals Baseline Router - v9d Phase 2d Refactored
Baseline vitals measured once daily (morning, fasted):
- Resting Heart Rate (RHR)
- Heart Rate Variability (HRV)
- VO2 Max
- SpO2 (Blood Oxygen Saturation)
- Respiratory Rate
Endpoints:
- GET /api/vitals/baseline List baseline vitals
- GET /api/vitals/baseline/by-date/{date} Get entry for specific date
- POST /api/vitals/baseline Create/update baseline entry (upsert)
- PUT /api/vitals/baseline/{id} Update baseline entry
- DELETE /api/vitals/baseline/{id} Delete baseline entry
- GET /api/vitals/baseline/stats Statistics and trends
- POST /api/vitals/baseline/import/apple-health Import Apple Health CSV
"""
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
from pydantic import BaseModel
from typing import Optional
from datetime import datetime, timedelta
import logging
import csv
import io
from db import get_db, get_cursor, r2d
from auth import require_auth
from routers.profiles import get_pid
router = APIRouter(prefix="/api/vitals/baseline", tags=["vitals_baseline"])
logger = logging.getLogger(__name__)
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Pydantic Models
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
class BaselineEntry(BaseModel):
date: str
resting_hr: Optional[int] = None
hrv: Optional[int] = None
vo2_max: Optional[float] = None
spo2: Optional[int] = None
respiratory_rate: Optional[float] = None
body_temperature: Optional[float] = None
resting_metabolic_rate: Optional[int] = None
note: Optional[str] = None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# CRUD Endpoints
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("")
def list_baseline_vitals(
limit: int = 90,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline vitals (last N days)."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM vitals_baseline
WHERE profile_id = %s
ORDER BY date DESC
LIMIT %s
""", (pid, limit))
return [r2d(r) for r in cur.fetchall()]
@router.get("/by-date/{date}")
def get_baseline_by_date(
date: str,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline entry for specific date."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM vitals_baseline
WHERE profile_id = %s AND date = %s
""", (pid, date))
row = cur.fetchone()
return r2d(row) if row else None
@router.post("")
def create_or_update_baseline(
entry: BaselineEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Create or update baseline entry (upsert on date)."""
pid = get_pid(x_profile_id)
# Build dynamic update columns (only non-None fields)
fields = []
values = [pid, entry.date]
if entry.resting_hr is not None:
fields.append("resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr)")
values.append(entry.resting_hr)
if entry.hrv is not None:
fields.append("hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv)")
values.append(entry.hrv)
if entry.vo2_max is not None:
fields.append("vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max)")
values.append(entry.vo2_max)
if entry.spo2 is not None:
fields.append("spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2)")
values.append(entry.spo2)
if entry.respiratory_rate is not None:
fields.append("respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate)")
values.append(entry.respiratory_rate)
if entry.body_temperature is not None:
fields.append("body_temperature = COALESCE(EXCLUDED.body_temperature, vitals_baseline.body_temperature)")
values.append(entry.body_temperature)
if entry.resting_metabolic_rate is not None:
fields.append("resting_metabolic_rate = COALESCE(EXCLUDED.resting_metabolic_rate, vitals_baseline.resting_metabolic_rate)")
values.append(entry.resting_metabolic_rate)
if entry.note:
fields.append("note = COALESCE(EXCLUDED.note, vitals_baseline.note)")
values.append(entry.note)
# At least one field must be provided
if not fields:
raise HTTPException(400, "At least one baseline vital must be provided")
# Build value placeholders
placeholders = ", ".join([f"${i}" for i in range(1, len(values) + 1)])
with get_db() as conn:
cur = get_cursor(conn)
query = f"""
INSERT INTO vitals_baseline (profile_id, date, {', '.join([f.split('=')[0].strip() for f in fields])})
VALUES ($1, $2, {', '.join([f'${i}' for i in range(3, len(values) + 1)])})
ON CONFLICT (profile_id, date)
DO UPDATE SET {', '.join(fields)}, updated_at = NOW()
RETURNING *
"""
cur.execute(query, values)
return r2d(cur.fetchone())
@router.put("/{entry_id}")
def update_baseline(
entry_id: int,
entry: BaselineEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Update existing baseline entry."""
pid = get_pid(x_profile_id)
# Build SET clause dynamically
updates = []
values = []
idx = 1
if entry.resting_hr is not None:
updates.append(f"resting_hr = ${idx}")
values.append(entry.resting_hr)
idx += 1
if entry.hrv is not None:
updates.append(f"hrv = ${idx}")
values.append(entry.hrv)
idx += 1
if entry.vo2_max is not None:
updates.append(f"vo2_max = ${idx}")
values.append(entry.vo2_max)
idx += 1
if entry.spo2 is not None:
updates.append(f"spo2 = ${idx}")
values.append(entry.spo2)
idx += 1
if entry.respiratory_rate is not None:
updates.append(f"respiratory_rate = ${idx}")
values.append(entry.respiratory_rate)
idx += 1
if entry.note:
updates.append(f"note = ${idx}")
values.append(entry.note)
idx += 1
if not updates:
raise HTTPException(400, "No fields to update")
updates.append("updated_at = NOW()")
values.extend([entry_id, pid])
with get_db() as conn:
cur = get_cursor(conn)
query = f"""
UPDATE vitals_baseline
SET {', '.join(updates)}
WHERE id = ${idx} AND profile_id = ${idx + 1}
RETURNING *
"""
cur.execute(query, values)
row = cur.fetchone()
if not row:
raise HTTPException(404, "Entry not found")
return r2d(row)
@router.delete("/{entry_id}")
def delete_baseline(
entry_id: int,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Delete baseline entry."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
DELETE FROM vitals_baseline
WHERE id = %s AND profile_id = %s
""", (entry_id, pid))
if cur.rowcount == 0:
raise HTTPException(404, "Entry not found")
return {"ok": True}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Statistics & Trends
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("/stats")
def get_baseline_stats(
days: int = 30,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get baseline vitals statistics and trends."""
pid = get_pid(x_profile_id)
cutoff_date = (datetime.now() - timedelta(days=days)).date()
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT
COUNT(*) as total_entries,
-- Resting HR
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_rhr_7d,
AVG(resting_hr) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_rhr_30d,
-- HRV
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_hrv_7d,
AVG(hrv) FILTER (WHERE date >= %s - INTERVAL '30 days') as avg_hrv_30d,
-- Latest values
(SELECT vo2_max FROM vitals_baseline WHERE profile_id = %s AND vo2_max IS NOT NULL ORDER BY date DESC LIMIT 1) as latest_vo2_max,
AVG(spo2) FILTER (WHERE date >= %s - INTERVAL '7 days') as avg_spo2_7d
FROM vitals_baseline
WHERE profile_id = %s AND date >= %s
""", (cutoff_date, cutoff_date, cutoff_date, cutoff_date, pid, cutoff_date, pid, cutoff_date))
stats = r2d(cur.fetchone())
# Calculate trends (7d vs 30d)
if stats['avg_rhr_7d'] and stats['avg_rhr_30d']:
if stats['avg_rhr_7d'] < stats['avg_rhr_30d'] - 2:
stats['trend_rhr'] = 'decreasing' # Good!
elif stats['avg_rhr_7d'] > stats['avg_rhr_30d'] + 2:
stats['trend_rhr'] = 'increasing' # Warning
else:
stats['trend_rhr'] = 'stable'
else:
stats['trend_rhr'] = None
if stats['avg_hrv_7d'] and stats['avg_hrv_30d']:
if stats['avg_hrv_7d'] > stats['avg_hrv_30d'] + 5:
stats['trend_hrv'] = 'increasing' # Good!
elif stats['avg_hrv_7d'] < stats['avg_hrv_30d'] - 5:
stats['trend_hrv'] = 'decreasing' # Warning
else:
stats['trend_hrv'] = 'stable'
else:
stats['trend_hrv'] = None
return stats
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Import: Apple Health CSV
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.post("/import/apple-health")
async def import_apple_health_baseline(
file: UploadFile = File(...),
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Import baseline vitals from Apple Health CSV export."""
pid = get_pid(x_profile_id)
content = await file.read()
decoded = content.decode('utf-8')
reader = csv.DictReader(io.StringIO(decoded))
inserted = 0
updated = 0
skipped = 0
errors = 0
with get_db() as conn:
cur = get_cursor(conn)
for row in reader:
try:
date = row.get('Start')[:10] if row.get('Start') else None
if not date:
skipped += 1
continue
# Extract baseline vitals from Apple Health export
rhr = row.get('Resting Heart Rate')
hrv = row.get('Heart Rate Variability')
vo2 = row.get('VO2 Max')
spo2 = row.get('Oxygen Saturation')
resp_rate = row.get('Respiratory Rate')
# Skip if no baseline vitals
if not any([rhr, hrv, vo2, spo2, resp_rate]):
skipped += 1
continue
# Upsert
cur.execute("""
INSERT INTO vitals_baseline (
profile_id, date,
resting_hr, hrv, vo2_max, spo2, respiratory_rate,
source
) VALUES (%s, %s, %s, %s, %s, %s, %s, 'apple_health')
ON CONFLICT (profile_id, date)
DO UPDATE SET
resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr),
hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv),
vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max),
spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2),
respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate),
updated_at = NOW()
WHERE vitals_baseline.source != 'manual'
RETURNING (xmax = 0) AS inserted
""", (
pid, date,
int(rhr) if rhr else None,
int(hrv) if hrv else None,
float(vo2) if vo2 else None,
int(spo2) if spo2 else None,
float(resp_rate) if resp_rate else None
))
result = cur.fetchone()
if result and result['inserted']:
inserted += 1
else:
updated += 1
except Exception as e:
logger.error(f"Error importing row: {e}")
errors += 1
return {
"inserted": inserted,
"updated": updated,
"skipped": skipped,
"errors": errors
}

View File

@ -61,7 +61,7 @@ function PromptEditor({ prompt, onSave, onCancel }) {
'{{sleep_summary}}','{{sleep_detail}}','{{sleep_avg_duration}}','{{sleep_avg_quality}}', '{{sleep_summary}}','{{sleep_detail}}','{{sleep_avg_duration}}','{{sleep_avg_quality}}',
'{{rest_days_summary}}','{{rest_days_count}}','{{rest_days_types}}', '{{rest_days_summary}}','{{rest_days_count}}','{{rest_days_types}}',
'{{vitals_summary}}','{{vitals_detail}}','{{vitals_avg_hr}}','{{vitals_avg_hrv}}', '{{vitals_summary}}','{{vitals_detail}}','{{vitals_avg_hr}}','{{vitals_avg_hrv}}',
'{{vitals_avg_bp}}','{{vitals_vo2_max}}'] '{{vitals_avg_bp}}','{{vitals_vo2_max}}','{{bp_summary}}']
return ( return (
<div className="card section-gap"> <div className="card section-gap">

View File

@ -257,23 +257,29 @@ export const api = {
getRestDaysStats: (weeks=4) => req(`/rest-days/stats?weeks=${weeks}`), getRestDaysStats: (weeks=4) => req(`/rest-days/stats?weeks=${weeks}`),
validateActivity: (date, activityType) => req('/rest-days/validate-activity', json({date, activity_type: activityType})), validateActivity: (date, activityType) => req('/rest-days/validate-activity', json({date, activity_type: activityType})),
// Vitals (v9d Phase 2d) // Vitals Baseline (v9d Phase 2d Refactored - once daily, morning)
listVitals: (l=90) => req(`/vitals?limit=${l}`), listBaseline: (l=90) => req(`/vitals/baseline?limit=${l}`),
getVitalsByDate: (date) => req(`/vitals/by-date/${date}`), getBaselineByDate: (date) => req(`/vitals/baseline/by-date/${date}`),
createVitals: (d) => req('/vitals', json(d)), createBaseline: (d) => req('/vitals/baseline', json(d)),
updateVitals: (id,d) => req(`/vitals/${id}`, jput(d)), updateBaseline: (id,d) => req(`/vitals/baseline/${id}`, jput(d)),
deleteVitals: (id) => req(`/vitals/${id}`, {method:'DELETE'}), deleteBaseline: (id) => req(`/vitals/baseline/${id}`, {method:'DELETE'}),
getVitalsStats: (days=30) => req(`/vitals/stats?days=${days}`), getBaselineStats: (days=30) => req(`/vitals/baseline/stats?days=${days}`),
importBaselineAppleHealth: (file) => {
// Vitals Import (v9d Phase 2d)
importVitalsOmron: (file) => {
const fd = new FormData() const fd = new FormData()
fd.append('file', file) fd.append('file', file)
return req('/vitals/import/omron', {method:'POST', body:fd}) return req('/vitals/baseline/import/apple-health', {method:'POST', body:fd})
}, },
importVitalsAppleHealth: (file) => {
// Blood Pressure (v9d Phase 2d Refactored - multiple daily, context-aware)
listBloodPressure: (l=90) => req(`/blood-pressure?limit=${l}`),
getBPByDate: (date) => req(`/blood-pressure/by-date/${date}`),
createBloodPressure:(d) => req('/blood-pressure', json(d)),
updateBloodPressure:(id,d) => req(`/blood-pressure/${id}`, jput(d)),
deleteBloodPressure:(id) => req(`/blood-pressure/${id}`, {method:'DELETE'}),
getBPStats: (days=30) => req(`/blood-pressure/stats?days=${days}`),
importBPOmron: (file) => {
const fd = new FormData() const fd = new FormData()
fd.append('file', file) fd.append('file', file)
return req('/vitals/import/apple-health', {method:'POST', body:fd}) return req('/blood-pressure/import/omron', {method:'POST', body:fd})
}, },
} }