chore: remove deprecated vitals.py (-684 lines)
This file was replaced by the refactored vitals system: - vitals_baseline.py (morning measurements) - blood_pressure.py (BP tracking with context) Migration 015 completed the split in v9d Phase 2d. File was no longer imported in main.py. Cleanup result: -684 lines of dead code
This commit is contained in:
parent
12d516c881
commit
56933431f6
|
|
@ -1,684 +0,0 @@
|
|||
"""
|
||||
Vitals Router - Resting HR + HRV Tracking
|
||||
v9d Phase 2: Vitals Module
|
||||
|
||||
Endpoints:
|
||||
- GET /api/vitals List vitals (with limit)
|
||||
- GET /api/vitals/by-date/{date} Get vitals for specific date
|
||||
- POST /api/vitals Create/update vitals (upsert)
|
||||
- PUT /api/vitals/{id} Update vitals
|
||||
- DELETE /api/vitals/{id} Delete vitals
|
||||
- GET /api/vitals/stats Get vitals statistics
|
||||
- POST /api/vitals/import/omron Import Omron CSV
|
||||
- POST /api/vitals/import/apple-health Import Apple Health CSV
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Depends, Header, UploadFile, File
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import csv
|
||||
import io
|
||||
from dateutil import parser as date_parser
|
||||
|
||||
from db import get_db, get_cursor, r2d
|
||||
from auth import require_auth
|
||||
|
||||
router = APIRouter(prefix="/api/vitals", tags=["vitals"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# German month mapping for Omron dates
|
||||
GERMAN_MONTHS = {
|
||||
'Januar': '01', 'Jan.': '01',
|
||||
'Februar': '02', 'Feb.': '02',
|
||||
'März': '03',
|
||||
'April': '04', 'Apr.': '04',
|
||||
'Mai': '05',
|
||||
'Juni': '06',
|
||||
'Juli': '07',
|
||||
'August': '08', 'Aug.': '08',
|
||||
'September': '09', 'Sep.': '09',
|
||||
'Oktober': '10', 'Okt.': '10',
|
||||
'November': '11', 'Nov.': '11',
|
||||
'Dezember': '12', 'Dez.': '12'
|
||||
}
|
||||
|
||||
|
||||
class VitalsEntry(BaseModel):
|
||||
date: str
|
||||
resting_hr: Optional[int] = None
|
||||
hrv: Optional[int] = None
|
||||
blood_pressure_systolic: Optional[int] = None
|
||||
blood_pressure_diastolic: Optional[int] = None
|
||||
pulse: Optional[int] = None
|
||||
vo2_max: Optional[float] = None
|
||||
spo2: Optional[int] = None
|
||||
respiratory_rate: Optional[float] = None
|
||||
irregular_heartbeat: Optional[bool] = None
|
||||
possible_afib: Optional[bool] = None
|
||||
note: Optional[str] = None
|
||||
|
||||
|
||||
class VitalsUpdate(BaseModel):
|
||||
date: Optional[str] = None
|
||||
resting_hr: Optional[int] = None
|
||||
hrv: Optional[int] = None
|
||||
blood_pressure_systolic: Optional[int] = None
|
||||
blood_pressure_diastolic: Optional[int] = None
|
||||
pulse: Optional[int] = None
|
||||
vo2_max: Optional[float] = None
|
||||
spo2: Optional[int] = None
|
||||
respiratory_rate: Optional[float] = None
|
||||
irregular_heartbeat: Optional[bool] = None
|
||||
possible_afib: Optional[bool] = None
|
||||
note: Optional[str] = None
|
||||
|
||||
|
||||
def get_pid(x_profile_id: Optional[str], session: dict) -> str:
|
||||
"""Extract profile_id from session (never from header for security)."""
|
||||
return session['profile_id']
|
||||
|
||||
|
||||
@router.get("")
|
||||
def list_vitals(
|
||||
limit: int = 90,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""Get vitals entries for current profile."""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, profile_id, date, resting_hr, hrv,
|
||||
blood_pressure_systolic, blood_pressure_diastolic, pulse,
|
||||
vo2_max, spo2, respiratory_rate,
|
||||
irregular_heartbeat, possible_afib,
|
||||
note, source, created_at, updated_at
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s
|
||||
ORDER BY date DESC
|
||||
LIMIT %s
|
||||
""",
|
||||
(pid, limit)
|
||||
)
|
||||
return [r2d(r) for r in cur.fetchall()]
|
||||
|
||||
|
||||
@router.get("/by-date/{date}")
|
||||
def get_vitals_by_date(
|
||||
date: str,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""Get vitals entry for a specific date."""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, profile_id, date, resting_hr, hrv,
|
||||
blood_pressure_systolic, blood_pressure_diastolic, pulse,
|
||||
vo2_max, spo2, respiratory_rate,
|
||||
irregular_heartbeat, possible_afib,
|
||||
note, source, created_at, updated_at
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s AND date = %s
|
||||
""",
|
||||
(pid, date)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if not row:
|
||||
raise HTTPException(404, "Keine Vitalwerte für dieses Datum gefunden")
|
||||
return r2d(row)
|
||||
|
||||
|
||||
@router.post("")
|
||||
def create_vitals(
|
||||
entry: VitalsEntry,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""
|
||||
Create or update vitals entry (upsert).
|
||||
|
||||
Post-Migration-015: Routes to vitals_baseline (for RHR, HRV, etc.)
|
||||
Note: BP measurements should use /api/blood-pressure endpoint instead.
|
||||
"""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
# Validation: at least one baseline vital must be provided
|
||||
has_baseline = any([
|
||||
entry.resting_hr, entry.hrv, entry.vo2_max,
|
||||
entry.spo2, entry.respiratory_rate
|
||||
])
|
||||
|
||||
if not has_baseline:
|
||||
raise HTTPException(400, "Mindestens ein Vitalwert muss angegeben werden (RHR, HRV, VO2Max, SpO2, oder Atemfrequenz)")
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
# Upsert into vitals_baseline (Migration 015)
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vitals_baseline (
|
||||
profile_id, date, resting_hr, hrv,
|
||||
vo2_max, spo2, respiratory_rate,
|
||||
note, source
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, 'manual')
|
||||
ON CONFLICT (profile_id, date)
|
||||
DO UPDATE SET
|
||||
resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_baseline.resting_hr),
|
||||
hrv = COALESCE(EXCLUDED.hrv, vitals_baseline.hrv),
|
||||
vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_baseline.vo2_max),
|
||||
spo2 = COALESCE(EXCLUDED.spo2, vitals_baseline.spo2),
|
||||
respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_baseline.respiratory_rate),
|
||||
note = COALESCE(EXCLUDED.note, vitals_baseline.note),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
RETURNING id, profile_id, date, resting_hr, hrv,
|
||||
vo2_max, spo2, respiratory_rate,
|
||||
note, source, created_at, updated_at
|
||||
""",
|
||||
(pid, entry.date, entry.resting_hr, entry.hrv,
|
||||
entry.vo2_max, entry.spo2, entry.respiratory_rate,
|
||||
entry.note)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"[VITALS] Upserted baseline vitals for {pid} on {entry.date}")
|
||||
|
||||
# Return in legacy format for backward compatibility
|
||||
result = r2d(row)
|
||||
result['blood_pressure_systolic'] = None
|
||||
result['blood_pressure_diastolic'] = None
|
||||
result['pulse'] = None
|
||||
result['irregular_heartbeat'] = None
|
||||
result['possible_afib'] = None
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@router.put("/{vitals_id}")
|
||||
def update_vitals(
|
||||
vitals_id: int,
|
||||
updates: VitalsUpdate,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""Update existing vitals entry."""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
# Check ownership
|
||||
cur.execute(
|
||||
"SELECT id FROM vitals_log WHERE id = %s AND profile_id = %s",
|
||||
(vitals_id, pid)
|
||||
)
|
||||
if not cur.fetchone():
|
||||
raise HTTPException(404, "Eintrag nicht gefunden")
|
||||
|
||||
# Build update query dynamically
|
||||
fields = []
|
||||
values = []
|
||||
|
||||
if updates.date is not None:
|
||||
fields.append("date = %s")
|
||||
values.append(updates.date)
|
||||
if updates.resting_hr is not None:
|
||||
fields.append("resting_hr = %s")
|
||||
values.append(updates.resting_hr)
|
||||
if updates.hrv is not None:
|
||||
fields.append("hrv = %s")
|
||||
values.append(updates.hrv)
|
||||
if updates.blood_pressure_systolic is not None:
|
||||
fields.append("blood_pressure_systolic = %s")
|
||||
values.append(updates.blood_pressure_systolic)
|
||||
if updates.blood_pressure_diastolic is not None:
|
||||
fields.append("blood_pressure_diastolic = %s")
|
||||
values.append(updates.blood_pressure_diastolic)
|
||||
if updates.pulse is not None:
|
||||
fields.append("pulse = %s")
|
||||
values.append(updates.pulse)
|
||||
if updates.vo2_max is not None:
|
||||
fields.append("vo2_max = %s")
|
||||
values.append(updates.vo2_max)
|
||||
if updates.spo2 is not None:
|
||||
fields.append("spo2 = %s")
|
||||
values.append(updates.spo2)
|
||||
if updates.respiratory_rate is not None:
|
||||
fields.append("respiratory_rate = %s")
|
||||
values.append(updates.respiratory_rate)
|
||||
if updates.irregular_heartbeat is not None:
|
||||
fields.append("irregular_heartbeat = %s")
|
||||
values.append(updates.irregular_heartbeat)
|
||||
if updates.possible_afib is not None:
|
||||
fields.append("possible_afib = %s")
|
||||
values.append(updates.possible_afib)
|
||||
if updates.note is not None:
|
||||
fields.append("note = %s")
|
||||
values.append(updates.note)
|
||||
|
||||
if not fields:
|
||||
raise HTTPException(400, "Keine Änderungen angegeben")
|
||||
|
||||
fields.append("updated_at = CURRENT_TIMESTAMP")
|
||||
values.append(vitals_id)
|
||||
|
||||
query = f"""
|
||||
UPDATE vitals_log
|
||||
SET {', '.join(fields)}
|
||||
WHERE id = %s
|
||||
RETURNING id, profile_id, date, resting_hr, hrv,
|
||||
blood_pressure_systolic, blood_pressure_diastolic, pulse,
|
||||
vo2_max, spo2, respiratory_rate,
|
||||
irregular_heartbeat, possible_afib,
|
||||
note, source, created_at, updated_at
|
||||
"""
|
||||
|
||||
cur.execute(query, values)
|
||||
row = cur.fetchone()
|
||||
conn.commit()
|
||||
|
||||
return r2d(row)
|
||||
|
||||
|
||||
@router.delete("/{vitals_id}")
|
||||
def delete_vitals(
|
||||
vitals_id: int,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""Delete vitals entry."""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
# Check ownership and delete
|
||||
cur.execute(
|
||||
"DELETE FROM vitals_log WHERE id = %s AND profile_id = %s RETURNING id",
|
||||
(vitals_id, pid)
|
||||
)
|
||||
if not cur.fetchone():
|
||||
raise HTTPException(404, "Eintrag nicht gefunden")
|
||||
|
||||
conn.commit()
|
||||
logger.info(f"[VITALS] Deleted vitals {vitals_id} for {pid}")
|
||||
return {"message": "Eintrag gelöscht"}
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
def get_vitals_stats(
|
||||
days: int = 30,
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""
|
||||
Get vitals statistics over the last N days.
|
||||
|
||||
Returns:
|
||||
- avg_resting_hr (7d and 30d)
|
||||
- avg_hrv (7d and 30d)
|
||||
- trend (increasing/decreasing/stable)
|
||||
- latest values
|
||||
"""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
# Get latest entry
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT date, resting_hr, hrv
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s AND date >= CURRENT_DATE - INTERVAL '%s days'
|
||||
ORDER BY date DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(pid, days)
|
||||
)
|
||||
latest = cur.fetchone()
|
||||
|
||||
# Get averages (7d and 30d)
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '7 days' THEN resting_hr END) as avg_hr_7d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '30 days' THEN resting_hr END) as avg_hr_30d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '7 days' THEN hrv END) as avg_hrv_7d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '30 days' THEN hrv END) as avg_hrv_30d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '7 days' THEN blood_pressure_systolic END) as avg_bp_sys_7d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '30 days' THEN blood_pressure_systolic END) as avg_bp_sys_30d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '7 days' THEN blood_pressure_diastolic END) as avg_bp_dia_7d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '30 days' THEN blood_pressure_diastolic END) as avg_bp_dia_30d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '7 days' THEN spo2 END) as avg_spo2_7d,
|
||||
AVG(CASE WHEN date >= CURRENT_DATE - INTERVAL '30 days' THEN spo2 END) as avg_spo2_30d,
|
||||
COUNT(*) as total_entries
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s AND date >= CURRENT_DATE - INTERVAL '%s days'
|
||||
""",
|
||||
(pid, max(days, 30))
|
||||
)
|
||||
stats_row = cur.fetchone()
|
||||
|
||||
# Get latest VO2 Max
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT vo2_max
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s AND vo2_max IS NOT NULL
|
||||
ORDER BY date DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(pid,)
|
||||
)
|
||||
vo2_row = cur.fetchone()
|
||||
latest_vo2 = vo2_row['vo2_max'] if vo2_row else None
|
||||
|
||||
# Get entries for trend calculation (last 14 days)
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT date, resting_hr, hrv
|
||||
FROM vitals_log
|
||||
WHERE profile_id = %s AND date >= CURRENT_DATE - INTERVAL '14 days'
|
||||
ORDER BY date ASC
|
||||
""",
|
||||
(pid,)
|
||||
)
|
||||
entries = [r2d(r) for r in cur.fetchall()]
|
||||
|
||||
# Simple trend calculation (compare first half vs second half)
|
||||
trend_hr = "stable"
|
||||
trend_hrv = "stable"
|
||||
|
||||
if len(entries) >= 4:
|
||||
mid = len(entries) // 2
|
||||
first_half_hr = [e['resting_hr'] for e in entries[:mid] if e['resting_hr']]
|
||||
second_half_hr = [e['resting_hr'] for e in entries[mid:] if e['resting_hr']]
|
||||
|
||||
if first_half_hr and second_half_hr:
|
||||
avg_first = sum(first_half_hr) / len(first_half_hr)
|
||||
avg_second = sum(second_half_hr) / len(second_half_hr)
|
||||
diff = avg_second - avg_first
|
||||
|
||||
if diff > 2:
|
||||
trend_hr = "increasing"
|
||||
elif diff < -2:
|
||||
trend_hr = "decreasing"
|
||||
|
||||
first_half_hrv = [e['hrv'] for e in entries[:mid] if e['hrv']]
|
||||
second_half_hrv = [e['hrv'] for e in entries[mid:] if e['hrv']]
|
||||
|
||||
if first_half_hrv and second_half_hrv:
|
||||
avg_first_hrv = sum(first_half_hrv) / len(first_half_hrv)
|
||||
avg_second_hrv = sum(second_half_hrv) / len(second_half_hrv)
|
||||
diff_hrv = avg_second_hrv - avg_first_hrv
|
||||
|
||||
if diff_hrv > 5:
|
||||
trend_hrv = "increasing"
|
||||
elif diff_hrv < -5:
|
||||
trend_hrv = "decreasing"
|
||||
|
||||
return {
|
||||
"latest": r2d(latest) if latest else None,
|
||||
"avg_resting_hr_7d": round(stats_row['avg_hr_7d'], 1) if stats_row['avg_hr_7d'] else None,
|
||||
"avg_resting_hr_30d": round(stats_row['avg_hr_30d'], 1) if stats_row['avg_hr_30d'] else None,
|
||||
"avg_hrv_7d": round(stats_row['avg_hrv_7d'], 1) if stats_row['avg_hrv_7d'] else None,
|
||||
"avg_hrv_30d": round(stats_row['avg_hrv_30d'], 1) if stats_row['avg_hrv_30d'] else None,
|
||||
"avg_bp_systolic_7d": round(stats_row['avg_bp_sys_7d'], 1) if stats_row['avg_bp_sys_7d'] else None,
|
||||
"avg_bp_systolic_30d": round(stats_row['avg_bp_sys_30d'], 1) if stats_row['avg_bp_sys_30d'] else None,
|
||||
"avg_bp_diastolic_7d": round(stats_row['avg_bp_dia_7d'], 1) if stats_row['avg_bp_dia_7d'] else None,
|
||||
"avg_bp_diastolic_30d": round(stats_row['avg_bp_dia_30d'], 1) if stats_row['avg_bp_dia_30d'] else None,
|
||||
"avg_spo2_7d": round(stats_row['avg_spo2_7d'], 1) if stats_row['avg_spo2_7d'] else None,
|
||||
"avg_spo2_30d": round(stats_row['avg_spo2_30d'], 1) if stats_row['avg_spo2_30d'] else None,
|
||||
"latest_vo2_max": float(latest_vo2) if latest_vo2 else None,
|
||||
"total_entries": stats_row['total_entries'],
|
||||
"trend_resting_hr": trend_hr,
|
||||
"trend_hrv": trend_hrv,
|
||||
"period_days": days
|
||||
}
|
||||
|
||||
|
||||
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# Import Endpoints
|
||||
# ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
|
||||
|
||||
def parse_omron_date(date_str: str) -> str:
|
||||
"""
|
||||
Parse Omron German date format to YYYY-MM-DD.
|
||||
|
||||
Examples:
|
||||
- "13 März 2026" -> "2026-03-13"
|
||||
- "28 Feb. 2026" -> "2026-02-28"
|
||||
"""
|
||||
parts = date_str.strip().split()
|
||||
if len(parts) != 3:
|
||||
raise ValueError(f"Invalid date format: {date_str}")
|
||||
|
||||
day = parts[0].zfill(2)
|
||||
month_str = parts[1]
|
||||
year = parts[2]
|
||||
|
||||
# Map German month to number
|
||||
month = GERMAN_MONTHS.get(month_str)
|
||||
if not month:
|
||||
raise ValueError(f"Unknown month: {month_str}")
|
||||
|
||||
return f"{year}-{month}-{day}"
|
||||
|
||||
|
||||
@router.post("/import/omron")
|
||||
async def import_omron_csv(
|
||||
file: UploadFile = File(...),
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""
|
||||
Import Omron blood pressure CSV export.
|
||||
|
||||
Expected format:
|
||||
Datum,Zeit,Systolisch (mmHg),Diastolisch (mmHg),Puls (bpm),...
|
||||
"""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
# Read file
|
||||
content = await file.read()
|
||||
content_str = content.decode('utf-8')
|
||||
|
||||
# Parse CSV
|
||||
reader = csv.DictReader(io.StringIO(content_str))
|
||||
|
||||
inserted = 0
|
||||
updated = 0
|
||||
skipped = 0
|
||||
errors = []
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Parse date
|
||||
date_str = parse_omron_date(row['Datum'])
|
||||
|
||||
# Parse values
|
||||
systolic = int(row['Systolisch (mmHg)']) if row['Systolisch (mmHg)'] and row['Systolisch (mmHg)'] != '-' else None
|
||||
diastolic = int(row['Diastolisch (mmHg)']) if row['Diastolisch (mmHg)'] and row['Diastolisch (mmHg)'] != '-' else None
|
||||
pulse = int(row['Puls (bpm)']) if row['Puls (bpm)'] and row['Puls (bpm)'] != '-' else None
|
||||
|
||||
# Skip if no data
|
||||
if not systolic and not diastolic and not pulse:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Parse flags (optional columns)
|
||||
irregular = row.get('Unregelmäßiger Herzschlag festgestellt', '').strip() not in ('', '-', ' ')
|
||||
afib = row.get('Mögliches AFib', '').strip() not in ('', '-', ' ')
|
||||
|
||||
# Upsert
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vitals_log (
|
||||
profile_id, date, blood_pressure_systolic, blood_pressure_diastolic,
|
||||
pulse, irregular_heartbeat, possible_afib, source
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, 'omron')
|
||||
ON CONFLICT (profile_id, date)
|
||||
DO UPDATE SET
|
||||
blood_pressure_systolic = COALESCE(EXCLUDED.blood_pressure_systolic, vitals_log.blood_pressure_systolic),
|
||||
blood_pressure_diastolic = COALESCE(EXCLUDED.blood_pressure_diastolic, vitals_log.blood_pressure_diastolic),
|
||||
pulse = COALESCE(EXCLUDED.pulse, vitals_log.pulse),
|
||||
irregular_heartbeat = COALESCE(EXCLUDED.irregular_heartbeat, vitals_log.irregular_heartbeat),
|
||||
possible_afib = COALESCE(EXCLUDED.possible_afib, vitals_log.possible_afib),
|
||||
source = CASE WHEN vitals_log.source = 'manual' THEN vitals_log.source ELSE 'omron' END,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
RETURNING (xmax = 0) AS inserted
|
||||
""",
|
||||
(pid, date_str, systolic, diastolic, pulse, irregular, afib)
|
||||
)
|
||||
|
||||
result = cur.fetchone()
|
||||
if result['inserted']:
|
||||
inserted += 1
|
||||
else:
|
||||
updated += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Zeile {row_num}: {str(e)}")
|
||||
logger.error(f"[OMRON-IMPORT] Error at row {row_num}: {e}")
|
||||
continue
|
||||
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"[OMRON-IMPORT] {pid}: {inserted} inserted, {updated} updated, {skipped} skipped, {len(errors)} errors")
|
||||
|
||||
return {
|
||||
"message": "Omron CSV Import abgeschlossen",
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": skipped,
|
||||
"errors": errors[:10] # Limit to first 10 errors
|
||||
}
|
||||
|
||||
|
||||
@router.post("/import/apple-health")
|
||||
async def import_apple_health_csv(
|
||||
file: UploadFile = File(...),
|
||||
x_profile_id: Optional[str] = Header(default=None),
|
||||
session: dict = Depends(require_auth)
|
||||
):
|
||||
"""
|
||||
Import Apple Health vitals CSV export.
|
||||
|
||||
Expected columns:
|
||||
- Datum/Uhrzeit
|
||||
- Ruhepuls (count/min)
|
||||
- Herzfrequenzvariabilität (ms)
|
||||
- VO2 max (ml/(kg·min))
|
||||
- Blutsauerstoffsättigung (%)
|
||||
- Atemfrequenz (count/min)
|
||||
"""
|
||||
pid = get_pid(x_profile_id, session)
|
||||
|
||||
# Read file
|
||||
content = await file.read()
|
||||
content_str = content.decode('utf-8')
|
||||
|
||||
# Parse CSV
|
||||
reader = csv.DictReader(io.StringIO(content_str))
|
||||
|
||||
inserted = 0
|
||||
updated = 0
|
||||
skipped = 0
|
||||
errors = []
|
||||
|
||||
with get_db() as conn:
|
||||
cur = get_cursor(conn)
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Parse date (format: "2026-02-21 00:00:00")
|
||||
date_str = row.get('Datum/Uhrzeit', '').split()[0] # Extract date part
|
||||
if not date_str:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Parse values (columns might be empty)
|
||||
resting_hr = None
|
||||
hrv = None
|
||||
vo2_max = None
|
||||
spo2 = None
|
||||
respiratory_rate = None
|
||||
|
||||
if 'Ruhepuls (count/min)' in row and row['Ruhepuls (count/min)']:
|
||||
resting_hr = int(float(row['Ruhepuls (count/min)']))
|
||||
|
||||
if 'Herzfrequenzvariabilität (ms)' in row and row['Herzfrequenzvariabilität (ms)']:
|
||||
hrv = int(float(row['Herzfrequenzvariabilität (ms)']))
|
||||
|
||||
if 'VO2 max (ml/(kg·min))' in row and row['VO2 max (ml/(kg·min))']:
|
||||
vo2_max = float(row['VO2 max (ml/(kg·min))'])
|
||||
|
||||
if 'Blutsauerstoffsättigung (%)' in row and row['Blutsauerstoffsättigung (%)']:
|
||||
spo2 = int(float(row['Blutsauerstoffsättigung (%)']))
|
||||
|
||||
if 'Atemfrequenz (count/min)' in row and row['Atemfrequenz (count/min)']:
|
||||
respiratory_rate = float(row['Atemfrequenz (count/min)'])
|
||||
|
||||
# Skip if no vitals data
|
||||
if not any([resting_hr, hrv, vo2_max, spo2, respiratory_rate]):
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Upsert
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vitals_log (
|
||||
profile_id, date, resting_hr, hrv, vo2_max, spo2,
|
||||
respiratory_rate, source
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, 'apple_health')
|
||||
ON CONFLICT (profile_id, date)
|
||||
DO UPDATE SET
|
||||
resting_hr = COALESCE(EXCLUDED.resting_hr, vitals_log.resting_hr),
|
||||
hrv = COALESCE(EXCLUDED.hrv, vitals_log.hrv),
|
||||
vo2_max = COALESCE(EXCLUDED.vo2_max, vitals_log.vo2_max),
|
||||
spo2 = COALESCE(EXCLUDED.spo2, vitals_log.spo2),
|
||||
respiratory_rate = COALESCE(EXCLUDED.respiratory_rate, vitals_log.respiratory_rate),
|
||||
source = CASE WHEN vitals_log.source = 'manual' THEN vitals_log.source ELSE 'apple_health' END,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
RETURNING (xmax = 0) AS inserted
|
||||
""",
|
||||
(pid, date_str, resting_hr, hrv, vo2_max, spo2, respiratory_rate)
|
||||
)
|
||||
|
||||
result = cur.fetchone()
|
||||
if result['inserted']:
|
||||
inserted += 1
|
||||
else:
|
||||
updated += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Zeile {row_num}: {str(e)}")
|
||||
logger.error(f"[APPLE-HEALTH-IMPORT] Error at row {row_num}: {e}")
|
||||
continue
|
||||
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"[APPLE-HEALTH-IMPORT] {pid}: {inserted} inserted, {updated} updated, {skipped} skipped, {len(errors)} errors")
|
||||
|
||||
return {
|
||||
"message": "Apple Health CSV Import abgeschlossen",
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": skipped,
|
||||
"errors": errors[:10] # Limit to first 10 errors
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user