mitai-jinkendo/backend/routers/blood_pressure.py
Lars 1866ff9ce6
Some checks failed
Build Test / lint-backend (push) Waiting to run
Build Test / build-frontend (push) Waiting to run
Deploy Development / deploy (push) Has been cancelled
refactor: vitals architecture - separate baseline vs blood pressure
BREAKING CHANGE: vitals_log split into vitals_baseline + blood_pressure_log

**Architektur-Änderung:**
- Baseline-Vitals (langsam veränderlich, 1x täglich morgens)
  → vitals_baseline (RHR, HRV, VO2 Max, SpO2, Atemfrequenz)
- Kontext-abhängige Vitals (mehrfach täglich, situativ)
  → blood_pressure_log (Blutdruck + Kontext-Tagging)

**Migration 015:**
- CREATE TABLE vitals_baseline (once daily, morning measurements)
- CREATE TABLE blood_pressure_log (multiple daily, context-aware)
- Migrate data from vitals_log → new tables
- Rename vitals_log → vitals_log_backup_pre_015 (safety)
- Prepared for future: glucose_log, temperature_log (commented)

**Backend:**
- NEW: routers/vitals_baseline.py (CRUD + Apple Health import)
- NEW: routers/blood_pressure.py (CRUD + Omron import + context)
- UPDATED: main.py (register new routers, remove old vitals)
- UPDATED: insights.py (query new tables, split template vars)

**Frontend:**
- UPDATED: api.js (new endpoints für baseline + BP)
- UPDATED: Analysis.jsx (add {{bp_summary}} variable)

**Nächster Schritt:**
- Frontend: VitalsPage.jsx refactoren (3 Tabs: Morgenmessung, Blutdruck, Import)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-23 16:02:40 +01:00

394 lines
15 KiB
Python

"""
Blood Pressure Router - v9d Phase 2d Refactored
Context-dependent blood pressure measurements (multiple times per day):
- Systolic/Diastolic Blood Pressure
- Pulse during measurement
- Context tagging (morning_fasted, after_meal, before_training, etc.)
- Warning flags (irregular heartbeat, AFib)
Endpoints:
- GET /api/blood-pressure List BP measurements
- GET /api/blood-pressure/by-date/{date} Get measurements for specific date
- POST /api/blood-pressure Create BP measurement
- PUT /api/blood-pressure/{id} Update BP measurement
- DELETE /api/blood-pressure/{id} Delete BP measurement
- GET /api/blood-pressure/stats Statistics and trends
- POST /api/blood-pressure/import/omron Import Omron CSV
"""
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
from pydantic import BaseModel
from typing import Optional
from datetime import datetime, timedelta
import logging
import csv
import io
from db import get_db, get_cursor, r2d
from auth import require_auth
from routers.profiles import get_pid
router = APIRouter(prefix="/api/blood-pressure", tags=["blood_pressure"])
logger = logging.getLogger(__name__)
# German month mapping for Omron dates
GERMAN_MONTHS = {
'Januar': '01', 'Jan.': '01', 'Jan': '01',
'Februar': '02', 'Feb.': '02', 'Feb': '02',
'März': '03', 'Mär.': '03', 'Mär': '03',
'April': '04', 'Apr.': '04', 'Apr': '04',
'Mai': '05',
'Juni': '06', 'Jun.': '06', 'Jun': '06',
'Juli': '07', 'Jul.': '07', 'Jul': '07',
'August': '08', 'Aug.': '08', 'Aug': '08',
'September': '09', 'Sep.': '09', 'Sep': '09',
'Oktober': '10', 'Okt.': '10', 'Okt': '10',
'November': '11', 'Nov.': '11', 'Nov': '11',
'Dezember': '12', 'Dez.': '12', 'Dez': '12',
}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Pydantic Models
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
class BPEntry(BaseModel):
measured_at: str # ISO format datetime
systolic: int
diastolic: int
pulse: Optional[int] = None
context: Optional[str] = None # morning_fasted, after_meal, etc.
irregular_heartbeat: Optional[bool] = False
possible_afib: Optional[bool] = False
note: Optional[str] = None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Helper Functions
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
def parse_omron_date(date_str: str, time_str: str) -> str:
"""
Parse Omron German date/time format to ISO datetime.
Input: "13 März 2026", "08:30"
Output: "2026-03-13 08:30:00"
"""
try:
parts = date_str.strip().split()
if len(parts) != 3:
return None
day = parts[0]
month_name = parts[1]
year = parts[2]
month = GERMAN_MONTHS.get(month_name)
if not month:
return None
iso_date = f"{year}-{month}-{day.zfill(2)}"
iso_datetime = f"{iso_date} {time_str}:00"
# Validate
datetime.fromisoformat(iso_datetime)
return iso_datetime
except Exception as e:
logger.error(f"Error parsing Omron date: {date_str} {time_str} - {e}")
return None
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# CRUD Endpoints
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("")
def list_bp_measurements(
limit: int = 90,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get blood pressure measurements (last N entries)."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM blood_pressure_log
WHERE profile_id = %s
ORDER BY measured_at DESC
LIMIT %s
""", (pid, limit))
return [r2d(r) for r in cur.fetchall()]
@router.get("/by-date/{date}")
def get_bp_by_date(
date: str,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get all BP measurements for a specific date."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT * FROM blood_pressure_log
WHERE profile_id = %s
AND DATE(measured_at) = %s
ORDER BY measured_at ASC
""", (pid, date))
return [r2d(r) for r in cur.fetchall()]
@router.post("")
def create_bp_measurement(
entry: BPEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Create new BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
INSERT INTO blood_pressure_log (
profile_id, measured_at,
systolic, diastolic, pulse,
context, irregular_heartbeat, possible_afib,
note, source
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, 'manual')
RETURNING *
""", (
pid, entry.measured_at,
entry.systolic, entry.diastolic, entry.pulse,
entry.context, entry.irregular_heartbeat, entry.possible_afib,
entry.note
))
return r2d(cur.fetchone())
@router.put("/{entry_id}")
def update_bp_measurement(
entry_id: int,
entry: BPEntry,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Update existing BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
UPDATE blood_pressure_log
SET measured_at = %s,
systolic = %s,
diastolic = %s,
pulse = %s,
context = %s,
irregular_heartbeat = %s,
possible_afib = %s,
note = %s
WHERE id = %s AND profile_id = %s
RETURNING *
""", (
entry.measured_at,
entry.systolic, entry.diastolic, entry.pulse,
entry.context, entry.irregular_heartbeat, entry.possible_afib,
entry.note,
entry_id, pid
))
row = cur.fetchone()
if not row:
raise HTTPException(404, "Entry not found")
return r2d(row)
@router.delete("/{entry_id}")
def delete_bp_measurement(
entry_id: int,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Delete BP measurement."""
pid = get_pid(x_profile_id)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
DELETE FROM blood_pressure_log
WHERE id = %s AND profile_id = %s
""", (entry_id, pid))
if cur.rowcount == 0:
raise HTTPException(404, "Entry not found")
return {"ok": True}
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Statistics & Trends
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.get("/stats")
def get_bp_stats(
days: int = 30,
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Get blood pressure statistics and trends."""
pid = get_pid(x_profile_id)
cutoff_date = datetime.now() - timedelta(days=days)
with get_db() as conn:
cur = get_cursor(conn)
cur.execute("""
SELECT
COUNT(*) as total_measurements,
-- Overall averages
AVG(systolic) as avg_systolic,
AVG(diastolic) as avg_diastolic,
AVG(pulse) FILTER (WHERE pulse IS NOT NULL) as avg_pulse,
-- 7-day averages
AVG(systolic) FILTER (WHERE measured_at >= NOW() - INTERVAL '7 days') as avg_systolic_7d,
AVG(diastolic) FILTER (WHERE measured_at >= NOW() - INTERVAL '7 days') as avg_diastolic_7d,
-- Context-specific averages
AVG(systolic) FILTER (WHERE context = 'morning_fasted') as avg_systolic_morning,
AVG(diastolic) FILTER (WHERE context = 'morning_fasted') as avg_diastolic_morning,
AVG(systolic) FILTER (WHERE context = 'evening') as avg_systolic_evening,
AVG(diastolic) FILTER (WHERE context = 'evening') as avg_diastolic_evening,
-- Warning flags
COUNT(*) FILTER (WHERE irregular_heartbeat = true) as irregular_count,
COUNT(*) FILTER (WHERE possible_afib = true) as afib_count
FROM blood_pressure_log
WHERE profile_id = %s AND measured_at >= %s
""", (pid, cutoff_date))
stats = r2d(cur.fetchone())
# Classify BP ranges (WHO/ISH guidelines)
if stats['avg_systolic'] and stats['avg_diastolic']:
if stats['avg_systolic'] < 120 and stats['avg_diastolic'] < 80:
stats['bp_category'] = 'optimal'
elif stats['avg_systolic'] < 130 and stats['avg_diastolic'] < 85:
stats['bp_category'] = 'normal'
elif stats['avg_systolic'] < 140 and stats['avg_diastolic'] < 90:
stats['bp_category'] = 'high_normal'
elif stats['avg_systolic'] < 160 and stats['avg_diastolic'] < 100:
stats['bp_category'] = 'grade_1_hypertension'
elif stats['avg_systolic'] < 180 and stats['avg_diastolic'] < 110:
stats['bp_category'] = 'grade_2_hypertension'
else:
stats['bp_category'] = 'grade_3_hypertension'
else:
stats['bp_category'] = None
return stats
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Import: Omron CSV
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
@router.post("/import/omron")
async def import_omron_csv(
file: UploadFile = File(...),
x_profile_id: Optional[str] = Header(default=None),
session: dict = Depends(require_auth)
):
"""Import blood pressure measurements from Omron CSV export."""
pid = get_pid(x_profile_id)
content = await file.read()
decoded = content.decode('utf-8')
reader = csv.DictReader(io.StringIO(decoded))
inserted = 0
updated = 0
skipped = 0
errors = 0
with get_db() as conn:
cur = get_cursor(conn)
for row in reader:
try:
# Parse Omron German date format
date_str = row.get('Datum', row.get('Date'))
time_str = row.get('Zeit', row.get('Time', '08:00'))
if not date_str:
skipped += 1
continue
measured_at = parse_omron_date(date_str, time_str)
if not measured_at:
errors += 1
continue
# Extract measurements
systolic = row.get('Systolisch', row.get('Systolic'))
diastolic = row.get('Diastolisch', row.get('Diastolic'))
pulse = row.get('Puls', row.get('Pulse'))
if not systolic or not diastolic:
skipped += 1
continue
# Parse warning flags
irregular = row.get('Unregelmäßiger Herzschlag', row.get('Irregular Heartbeat', ''))
afib = row.get('Vorhofflimmern', row.get('AFib', ''))
irregular_heartbeat = irregular.lower() in ['ja', 'yes', 'true', '1']
possible_afib = afib.lower() in ['ja', 'yes', 'true', '1']
# Determine context based on time
hour = int(time_str.split(':')[0])
if 5 <= hour < 10:
context = 'morning_fasted'
elif 18 <= hour < 23:
context = 'evening'
else:
context = 'other'
# Upsert
cur.execute("""
INSERT INTO blood_pressure_log (
profile_id, measured_at,
systolic, diastolic, pulse,
context, irregular_heartbeat, possible_afib,
source
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, 'omron')
ON CONFLICT (profile_id, measured_at)
DO UPDATE SET
systolic = EXCLUDED.systolic,
diastolic = EXCLUDED.diastolic,
pulse = EXCLUDED.pulse,
context = EXCLUDED.context,
irregular_heartbeat = EXCLUDED.irregular_heartbeat,
possible_afib = EXCLUDED.possible_afib
WHERE blood_pressure_log.source != 'manual'
RETURNING (xmax = 0) AS inserted
""", (
pid, measured_at,
int(systolic), int(diastolic),
int(pulse) if pulse else None,
context, irregular_heartbeat, possible_afib
))
result = cur.fetchone()
if result and result['inserted']:
inserted += 1
else:
updated += 1
except Exception as e:
logger.error(f"Error importing Omron row: {e}")
errors += 1
return {
"inserted": inserted,
"updated": updated,
"skipped": skipped,
"errors": errors
}