9b
This commit is contained in:
parent
c4cead9e27
commit
0a871fea22
12
.env.example
12
.env.example
|
|
@ -1,9 +1,9 @@
|
||||||
# ── Datenbank ──────────────────────────────────────────────────
|
# ── Datenbank (PostgreSQL v9b+) ────────────────────────────────
|
||||||
# v9 (PostgreSQL):
|
DB_HOST=postgres
|
||||||
DB_PASSWORD=sicheres_passwort_hier
|
DB_PORT=5432
|
||||||
|
DB_NAME=mitai
|
||||||
# v8 (SQLite, legacy):
|
DB_USER=mitai
|
||||||
# DATA_DIR=/app/data
|
DB_PASSWORD=CHANGE_ME_STRONG_PASSWORD_HERE
|
||||||
|
|
||||||
# ── KI ─────────────────────────────────────────────────────────
|
# ── KI ─────────────────────────────────────────────────────────
|
||||||
# OpenRouter (empfohlen):
|
# OpenRouter (empfohlen):
|
||||||
|
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -58,3 +58,6 @@ coverage/
|
||||||
# Temp
|
# Temp
|
||||||
tmp/
|
tmp/
|
||||||
*.tmp
|
*.tmp
|
||||||
|
|
||||||
|
#.claude Konfiguration
|
||||||
|
.claude/
|
||||||
151
CLAUDE.md
151
CLAUDE.md
|
|
@ -293,3 +293,154 @@ Wortmarke: Jin(light) + ken(bold #1D9E75) + do(light)
|
||||||
/add-endpoint → Neuen API-Endpoint hinzufügen
|
/add-endpoint → Neuen API-Endpoint hinzufügen
|
||||||
/db-add-column → Neue DB-Spalte hinzufügen
|
/db-add-column → Neue DB-Spalte hinzufügen
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Jinkendo App-Familie & Markenarchitektur
|
||||||
|
|
||||||
|
### Philosophie
|
||||||
|
**Jinkendo** (人拳道) = Jin (人 Mensch) + Ken (拳 Faust) + Do (道 Weg)
|
||||||
|
"Der menschliche Weg der Kampfkunst" – ruhig aber kraftvoll, Selbstwahrnehmung, Meditation, Zielorientiert
|
||||||
|
|
||||||
|
### App-Familie (Subdomain-Architektur)
|
||||||
|
```
|
||||||
|
mitai.jinkendo.de → Körper-Tracker (身体 = eigener Körper) ← DIESE APP
|
||||||
|
miken.jinkendo.de → Meditation (眉間 = drittes Auge)
|
||||||
|
ikigai.jinkendo.de → Lebenssinn/Ziele (生き甲斐)
|
||||||
|
shinkan.jinkendo.de → Kampfsport (真観 = wahre Wahrnehmung)
|
||||||
|
kenkou.jinkendo.de → Gesundheit allgemein (健康) – für später aufsparen
|
||||||
|
```
|
||||||
|
|
||||||
|
### Registrierte Domains
|
||||||
|
- jinkendo.de, jinkendo.com, jinkendo.life – alle registriert bei Strato
|
||||||
|
|
||||||
|
## v9b Detailplan – Freemium Tier-System
|
||||||
|
|
||||||
|
### Tier-Modell
|
||||||
|
```
|
||||||
|
free → Selbst-Registrierung, 14-Tage Trial, eingeschränkt
|
||||||
|
basic → Kernfunktionen (Abo Stufe 1)
|
||||||
|
premium → Alles inkl. KI und Connectoren (Abo Stufe 2)
|
||||||
|
selfhosted → Lars' Heimversion, keine Einschränkungen
|
||||||
|
```
|
||||||
|
|
||||||
|
### Geplante DB-Erweiterungen (profiles Tabelle)
|
||||||
|
```sql
|
||||||
|
tier TEXT DEFAULT 'free'
|
||||||
|
trial_ends_at TEXT -- ISO datetime
|
||||||
|
sub_valid_until TEXT -- ISO datetime
|
||||||
|
email_verified INTEGER DEFAULT 0
|
||||||
|
email_verify_token TEXT
|
||||||
|
invited_by TEXT -- profile_id FK
|
||||||
|
invitation_token TEXT
|
||||||
|
```
|
||||||
|
|
||||||
|
### Tier-Limits (geplant)
|
||||||
|
| Feature | free | basic | premium | selfhosted |
|
||||||
|
|---------|------|-------|---------|------------|
|
||||||
|
| Gewicht-Einträge | 30 | unbegrenzt | unbegrenzt | unbegrenzt |
|
||||||
|
| KI-Analysen/Monat | 0 | 3 | unbegrenzt | unbegrenzt |
|
||||||
|
| Ernährung Import | ❌ | ✅ | ✅ | ✅ |
|
||||||
|
| Export | ❌ | ✅ | ✅ | ✅ |
|
||||||
|
| Fitness-Connectoren | ❌ | ❌ | ✅ | ✅ |
|
||||||
|
|
||||||
|
### Registrierungs-Flow (geplant)
|
||||||
|
```
|
||||||
|
1. Selbst-Registrierung: Name + E-Mail + Passwort
|
||||||
|
2. Auto-Trial: tier='free', trial_ends_at=now+14d
|
||||||
|
3. E-Mail-Bestätigung → email_verified=1
|
||||||
|
4. Trial läuft ab → Upgrade-Prompt
|
||||||
|
5. Einladungslinks: Admin generiert Token → direkt basic-Tier
|
||||||
|
6. Stripe Integration: später (v9b ohne Stripe, nur Tier-Logik)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Infrastruktur Details
|
||||||
|
|
||||||
|
### Heimnetzwerk
|
||||||
|
```
|
||||||
|
Internet
|
||||||
|
→ Fritz!Box 7530 AX (DynDNS: privat.stommer.com)
|
||||||
|
→ Synology NAS (192.168.2.63, Reverse Proxy + Let's Encrypt)
|
||||||
|
→ Raspberry Pi 5 (192.168.2.49, Docker)
|
||||||
|
→ MiniPC (192.168.2.144, Gitea auf Port 3000)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Synology Reverse Proxy Regeln
|
||||||
|
```
|
||||||
|
mitai.jinkendo.de → HTTP 192.168.2.49:3002 (Prod Frontend)
|
||||||
|
dev.mitai.jinkendo.de → HTTP 192.168.2.49:3099 (Dev Frontend)
|
||||||
|
```
|
||||||
|
|
||||||
|
### AdGuard DNS Rewrites (für internes Routing)
|
||||||
|
```
|
||||||
|
mitai.jinkendo.de → 192.168.2.63
|
||||||
|
dev.mitai.jinkendo.de → 192.168.2.63
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fritz!Box DNS-Rebind Ausnahmen
|
||||||
|
```
|
||||||
|
jinkendo.de
|
||||||
|
mitai.jinkendo.de
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pi Verzeichnisstruktur
|
||||||
|
```
|
||||||
|
/home/lars/docker/
|
||||||
|
├── bodytrack/ → Prod (main branch, docker-compose.yml)
|
||||||
|
└── bodytrack-dev/ → Dev (develop branch, docker-compose.dev-env.yml)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Gitea Runner
|
||||||
|
```
|
||||||
|
Runner: raspberry-pi (auf Pi installiert)
|
||||||
|
Service: /etc/systemd/system/gitea-runner.service
|
||||||
|
Binary: /home/lars/gitea-runner/act_runner
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container Namen
|
||||||
|
```
|
||||||
|
Prod: mitai-api, mitai-ui
|
||||||
|
Dev: dev-mitai-api, dev-mitai-ui
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bekannte Probleme & Lösungen
|
||||||
|
|
||||||
|
### dayjs.week() – NIEMALS verwenden
|
||||||
|
```javascript
|
||||||
|
// ❌ Falsch:
|
||||||
|
const week = dayjs(date).week()
|
||||||
|
|
||||||
|
// ✅ Richtig (ISO 8601):
|
||||||
|
const weekNum = (() => {
|
||||||
|
const dt = new Date(date)
|
||||||
|
dt.setHours(0,0,0,0)
|
||||||
|
dt.setDate(dt.getDate()+4-(dt.getDay()||7))
|
||||||
|
const y = new Date(dt.getFullYear(),0,1)
|
||||||
|
return Math.ceil(((dt-y)/86400000+1)/7)
|
||||||
|
})()
|
||||||
|
```
|
||||||
|
|
||||||
|
### session=Depends(require_auth) – Korrekte Platzierung
|
||||||
|
```python
|
||||||
|
# ❌ Falsch (führt zu NameError oder ungeschütztem Endpoint):
|
||||||
|
def endpoint(x_profile_id: Optional[str] = Header(default=None, session=Depends(require_auth))):
|
||||||
|
|
||||||
|
# ✅ Richtig (separater Parameter):
|
||||||
|
def endpoint(x_profile_id: Optional[str] = Header(default=None),
|
||||||
|
session: dict = Depends(require_auth)):
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recharts Bar fill=function – nicht unterstützt
|
||||||
|
```jsx
|
||||||
|
// ❌ Falsch:
|
||||||
|
<Bar fill={(entry) => entry.color}/>
|
||||||
|
|
||||||
|
// ✅ Richtig:
|
||||||
|
<Bar fill="#1D9E75"/>
|
||||||
|
```
|
||||||
|
|
||||||
|
### SQLite neue Spalten hinzufügen
|
||||||
|
```python
|
||||||
|
# In _safe_alters Liste hinzufügen (NICHT direkt ALTER TABLE):
|
||||||
|
_safe_alters = [
|
||||||
|
("profiles", "neue_spalte TEXT DEFAULT NULL"),
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,22 @@
|
||||||
FROM python:3.12-slim
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
# Install PostgreSQL client for psql (needed for startup.sh)
|
||||||
|
RUN apt-get update && apt-get install -y postgresql-client && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
# Create directories
|
||||||
RUN mkdir -p /app/data /app/photos
|
RUN mkdir -p /app/data /app/photos
|
||||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
|
# Make startup script executable
|
||||||
|
RUN chmod +x /app/startup.sh
|
||||||
|
|
||||||
|
# Use startup script instead of direct uvicorn
|
||||||
|
CMD ["/app/startup.sh"]
|
||||||
|
|
|
||||||
150
backend/db.py
Normal file
150
backend/db.py
Normal file
|
|
@ -0,0 +1,150 @@
|
||||||
|
"""
|
||||||
|
PostgreSQL Database Connector for Mitai Jinkendo (v9b)
|
||||||
|
|
||||||
|
Provides connection pooling and helper functions for database operations.
|
||||||
|
Compatible drop-in replacement for the previous SQLite get_db() pattern.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.extras import RealDictCursor
|
||||||
|
import psycopg2.pool
|
||||||
|
|
||||||
|
|
||||||
|
# Global connection pool
|
||||||
|
_pool: Optional[psycopg2.pool.SimpleConnectionPool] = None
|
||||||
|
|
||||||
|
|
||||||
|
def init_pool():
|
||||||
|
"""Initialize PostgreSQL connection pool."""
|
||||||
|
global _pool
|
||||||
|
if _pool is None:
|
||||||
|
_pool = psycopg2.pool.SimpleConnectionPool(
|
||||||
|
minconn=1,
|
||||||
|
maxconn=10,
|
||||||
|
host=os.getenv("DB_HOST", "postgres"),
|
||||||
|
port=int(os.getenv("DB_PORT", "5432")),
|
||||||
|
database=os.getenv("DB_NAME", "mitai"),
|
||||||
|
user=os.getenv("DB_USER", "mitai"),
|
||||||
|
password=os.getenv("DB_PASSWORD", "")
|
||||||
|
)
|
||||||
|
print(f"✓ PostgreSQL connection pool initialized ({os.getenv('DB_HOST', 'postgres')}:{os.getenv('DB_PORT', '5432')})")
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_db():
|
||||||
|
"""
|
||||||
|
Context manager for database connections.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
with get_db() as conn:
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT * FROM profiles")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
|
||||||
|
Auto-commits on success, auto-rolls back on exception.
|
||||||
|
"""
|
||||||
|
if _pool is None:
|
||||||
|
init_pool()
|
||||||
|
|
||||||
|
conn = _pool.getconn()
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
conn.commit()
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
_pool.putconn(conn)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cursor(conn):
|
||||||
|
"""
|
||||||
|
Get cursor with RealDictCursor for dict-like row access.
|
||||||
|
|
||||||
|
Returns rows as dictionaries: {'column_name': value, ...}
|
||||||
|
Compatible with previous sqlite3.Row behavior.
|
||||||
|
"""
|
||||||
|
return conn.cursor(cursor_factory=RealDictCursor)
|
||||||
|
|
||||||
|
|
||||||
|
def r2d(row) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Convert row to dict (compatibility helper).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
row: RealDictRow from psycopg2
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary or None if row is None
|
||||||
|
"""
|
||||||
|
return dict(row) if row else None
|
||||||
|
|
||||||
|
|
||||||
|
def execute_one(conn, query: str, params: tuple = ()) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Execute query and return one row as dict.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Database connection from get_db()
|
||||||
|
query: SQL query with %s placeholders
|
||||||
|
params: Tuple of parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with column:value pairs, or None if no row found
|
||||||
|
|
||||||
|
Example:
|
||||||
|
profile = execute_one(conn, "SELECT * FROM profiles WHERE id=%s", (pid,))
|
||||||
|
if profile:
|
||||||
|
print(profile['name'])
|
||||||
|
"""
|
||||||
|
with get_cursor(conn) as cur:
|
||||||
|
cur.execute(query, params)
|
||||||
|
row = cur.fetchone()
|
||||||
|
return r2d(row)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_all(conn, query: str, params: tuple = ()) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Execute query and return all rows as list of dicts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Database connection from get_db()
|
||||||
|
query: SQL query with %s placeholders
|
||||||
|
params: Tuple of parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dictionaries (one per row)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
weights = execute_all(conn,
|
||||||
|
"SELECT * FROM weight_log WHERE profile_id=%s ORDER BY date DESC",
|
||||||
|
(pid,)
|
||||||
|
)
|
||||||
|
for w in weights:
|
||||||
|
print(w['date'], w['weight'])
|
||||||
|
"""
|
||||||
|
with get_cursor(conn) as cur:
|
||||||
|
cur.execute(query, params)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return [r2d(r) for r in rows]
|
||||||
|
|
||||||
|
|
||||||
|
def execute_write(conn, query: str, params: tuple = ()) -> None:
|
||||||
|
"""
|
||||||
|
Execute INSERT/UPDATE/DELETE query.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Database connection from get_db()
|
||||||
|
query: SQL query with %s placeholders
|
||||||
|
params: Tuple of parameters
|
||||||
|
|
||||||
|
Example:
|
||||||
|
execute_write(conn,
|
||||||
|
"UPDATE profiles SET name=%s WHERE id=%s",
|
||||||
|
("New Name", pid)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
with get_cursor(conn) as cur:
|
||||||
|
cur.execute(query, params)
|
||||||
2233
backend/main.py
2233
backend/main.py
File diff suppressed because it is too large
Load Diff
369
backend/migrate_to_postgres.py
Normal file
369
backend/migrate_to_postgres.py
Normal file
|
|
@ -0,0 +1,369 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
SQLite → PostgreSQL Migration Script für Mitai Jinkendo (v9a → v9b)
|
||||||
|
|
||||||
|
Migrates all data from SQLite to PostgreSQL with type conversions and validation.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
# Inside Docker container:
|
||||||
|
python migrate_to_postgres.py
|
||||||
|
|
||||||
|
# Or locally with custom paths:
|
||||||
|
DATA_DIR=./data DB_HOST=localhost python migrate_to_postgres.py
|
||||||
|
|
||||||
|
Environment Variables:
|
||||||
|
SQLite Source:
|
||||||
|
DATA_DIR (default: ./data)
|
||||||
|
|
||||||
|
PostgreSQL Target:
|
||||||
|
DB_HOST (default: postgres)
|
||||||
|
DB_PORT (default: 5432)
|
||||||
|
DB_NAME (default: mitai)
|
||||||
|
DB_USER (default: mitai)
|
||||||
|
DB_PASSWORD (required)
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.extras import execute_values, RealDictCursor
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# CONFIGURATION
|
||||||
|
# ================================================================
|
||||||
|
|
||||||
|
# SQLite Source
|
||||||
|
DATA_DIR = Path(os.getenv("DATA_DIR", "./data"))
|
||||||
|
SQLITE_DB = DATA_DIR / "bodytrack.db"
|
||||||
|
|
||||||
|
# PostgreSQL Target
|
||||||
|
PG_CONFIG = {
|
||||||
|
'host': os.getenv("DB_HOST", "postgres"),
|
||||||
|
'port': int(os.getenv("DB_PORT", "5432")),
|
||||||
|
'database': os.getenv("DB_NAME", "mitai"),
|
||||||
|
'user': os.getenv("DB_USER", "mitai"),
|
||||||
|
'password': os.getenv("DB_PASSWORD", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Tables to migrate (in order - respects foreign keys)
|
||||||
|
TABLES = [
|
||||||
|
'profiles',
|
||||||
|
'sessions',
|
||||||
|
'ai_usage',
|
||||||
|
'ai_prompts',
|
||||||
|
'weight_log',
|
||||||
|
'circumference_log',
|
||||||
|
'caliper_log',
|
||||||
|
'nutrition_log',
|
||||||
|
'activity_log',
|
||||||
|
'photos',
|
||||||
|
'ai_insights',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Columns that need INTEGER (0/1) → BOOLEAN conversion
|
||||||
|
BOOLEAN_COLUMNS = {
|
||||||
|
'profiles': ['ai_enabled', 'export_enabled'],
|
||||||
|
'ai_prompts': ['active'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# CONVERSION HELPERS
|
||||||
|
# ================================================================
|
||||||
|
|
||||||
|
def convert_value(value: Any, column: str, table: str) -> Any:
|
||||||
|
"""
|
||||||
|
Convert SQLite value to PostgreSQL-compatible format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: Raw value from SQLite
|
||||||
|
column: Column name
|
||||||
|
table: Table name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Converted value suitable for PostgreSQL
|
||||||
|
"""
|
||||||
|
# NULL values pass through
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# INTEGER → BOOLEAN conversion
|
||||||
|
if table in BOOLEAN_COLUMNS and column in BOOLEAN_COLUMNS[table]:
|
||||||
|
return bool(value)
|
||||||
|
|
||||||
|
# All other values pass through
|
||||||
|
# (PostgreSQL handles TEXT timestamps, UUIDs, and numerics automatically)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def convert_row(row: Dict[str, Any], table: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Convert entire row from SQLite to PostgreSQL format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
row: Dictionary with column:value pairs from SQLite
|
||||||
|
table: Table name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Converted dictionary
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
column: convert_value(value, column, table)
|
||||||
|
for column, value in row.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# MIGRATION LOGIC
|
||||||
|
# ================================================================
|
||||||
|
|
||||||
|
def get_sqlite_rows(table: str) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetch all rows from SQLite table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table: Table name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dictionaries (one per row)
|
||||||
|
"""
|
||||||
|
conn = sqlite3.connect(SQLITE_DB)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cur = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
rows = cur.execute(f"SELECT * FROM {table}").fetchall()
|
||||||
|
return [dict(row) for row in rows]
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
# Table doesn't exist in SQLite (OK, might be new in v9b)
|
||||||
|
print(f" ⚠ Table '{table}' not found in SQLite: {e}")
|
||||||
|
return []
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_table(pg_conn, table: str) -> Dict[str, int]:
|
||||||
|
"""
|
||||||
|
Migrate one table from SQLite to PostgreSQL.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pg_conn: PostgreSQL connection
|
||||||
|
table: Table name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with stats: {'sqlite_count': N, 'postgres_count': M}
|
||||||
|
"""
|
||||||
|
print(f" Migrating '{table}'...", end=' ', flush=True)
|
||||||
|
|
||||||
|
# Fetch from SQLite
|
||||||
|
sqlite_rows = get_sqlite_rows(table)
|
||||||
|
sqlite_count = len(sqlite_rows)
|
||||||
|
|
||||||
|
if sqlite_count == 0:
|
||||||
|
print("(empty)")
|
||||||
|
return {'sqlite_count': 0, 'postgres_count': 0}
|
||||||
|
|
||||||
|
# Convert rows
|
||||||
|
converted_rows = [convert_row(row, table) for row in sqlite_rows]
|
||||||
|
|
||||||
|
# Get column names
|
||||||
|
columns = list(converted_rows[0].keys())
|
||||||
|
cols_str = ', '.join(columns)
|
||||||
|
placeholders = ', '.join(['%s'] * len(columns))
|
||||||
|
|
||||||
|
# Insert into PostgreSQL
|
||||||
|
pg_cur = pg_conn.cursor()
|
||||||
|
|
||||||
|
# Build INSERT query
|
||||||
|
query = f"INSERT INTO {table} ({cols_str}) VALUES %s"
|
||||||
|
|
||||||
|
# Prepare values (list of tuples)
|
||||||
|
values = [
|
||||||
|
tuple(row[col] for col in columns)
|
||||||
|
for row in converted_rows
|
||||||
|
]
|
||||||
|
|
||||||
|
# Batch insert with execute_values (faster than executemany)
|
||||||
|
try:
|
||||||
|
execute_values(pg_cur, query, values, page_size=100)
|
||||||
|
except psycopg2.Error as e:
|
||||||
|
print(f"\n ✗ Insert failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Verify row count
|
||||||
|
pg_cur.execute(f"SELECT COUNT(*) FROM {table}")
|
||||||
|
postgres_count = pg_cur.fetchone()[0]
|
||||||
|
|
||||||
|
print(f"✓ {sqlite_count} rows → {postgres_count} rows")
|
||||||
|
|
||||||
|
return {
|
||||||
|
'sqlite_count': sqlite_count,
|
||||||
|
'postgres_count': postgres_count
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def verify_migration(pg_conn, stats: Dict[str, Dict[str, int]]):
|
||||||
|
"""
|
||||||
|
Verify migration integrity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pg_conn: PostgreSQL connection
|
||||||
|
stats: Migration stats per table
|
||||||
|
"""
|
||||||
|
print("\n═══════════════════════════════════════════════════════════")
|
||||||
|
print("VERIFICATION")
|
||||||
|
print("═══════════════════════════════════════════════════════════")
|
||||||
|
|
||||||
|
all_ok = True
|
||||||
|
|
||||||
|
for table, counts in stats.items():
|
||||||
|
sqlite_count = counts['sqlite_count']
|
||||||
|
postgres_count = counts['postgres_count']
|
||||||
|
|
||||||
|
status = "✓" if sqlite_count == postgres_count else "✗"
|
||||||
|
print(f" {status} {table:20s} SQLite: {sqlite_count:5d} → PostgreSQL: {postgres_count:5d}")
|
||||||
|
|
||||||
|
if sqlite_count != postgres_count:
|
||||||
|
all_ok = False
|
||||||
|
|
||||||
|
# Sample some data
|
||||||
|
print("\n───────────────────────────────────────────────────────────")
|
||||||
|
print("SAMPLE DATA (first profile)")
|
||||||
|
print("───────────────────────────────────────────────────────────")
|
||||||
|
|
||||||
|
cur = pg_conn.cursor(cursor_factory=RealDictCursor)
|
||||||
|
cur.execute("SELECT * FROM profiles LIMIT 1")
|
||||||
|
profile = cur.fetchone()
|
||||||
|
|
||||||
|
if profile:
|
||||||
|
for key, value in dict(profile).items():
|
||||||
|
print(f" {key:20s} = {value}")
|
||||||
|
else:
|
||||||
|
print(" (no profiles found)")
|
||||||
|
|
||||||
|
print("\n───────────────────────────────────────────────────────────")
|
||||||
|
print("SAMPLE DATA (latest weight entry)")
|
||||||
|
print("───────────────────────────────────────────────────────────")
|
||||||
|
|
||||||
|
cur.execute("SELECT * FROM weight_log ORDER BY date DESC LIMIT 1")
|
||||||
|
weight = cur.fetchone()
|
||||||
|
|
||||||
|
if weight:
|
||||||
|
for key, value in dict(weight).items():
|
||||||
|
print(f" {key:20s} = {value}")
|
||||||
|
else:
|
||||||
|
print(" (no weight entries found)")
|
||||||
|
|
||||||
|
print("\n═══════════════════════════════════════════════════════════")
|
||||||
|
|
||||||
|
if all_ok:
|
||||||
|
print("✓ MIGRATION SUCCESSFUL - All row counts match!")
|
||||||
|
else:
|
||||||
|
print("✗ MIGRATION FAILED - Row count mismatch detected!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# MAIN
|
||||||
|
# ================================================================
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print("═══════════════════════════════════════════════════════════")
|
||||||
|
print("MITAI JINKENDO - SQLite → PostgreSQL Migration (v9a → v9b)")
|
||||||
|
print("═══════════════════════════════════════════════════════════\n")
|
||||||
|
|
||||||
|
# Check SQLite DB exists
|
||||||
|
if not SQLITE_DB.exists():
|
||||||
|
print(f"✗ SQLite database not found: {SQLITE_DB}")
|
||||||
|
print(f" Set DATA_DIR environment variable if needed.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"✓ SQLite source: {SQLITE_DB}")
|
||||||
|
print(f"✓ PostgreSQL target: {PG_CONFIG['user']}@{PG_CONFIG['host']}:{PG_CONFIG['port']}/{PG_CONFIG['database']}\n")
|
||||||
|
|
||||||
|
# Check PostgreSQL password
|
||||||
|
if not PG_CONFIG['password']:
|
||||||
|
print("✗ DB_PASSWORD environment variable not set!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Connect to PostgreSQL
|
||||||
|
print("Connecting to PostgreSQL...", end=' ', flush=True)
|
||||||
|
try:
|
||||||
|
pg_conn = psycopg2.connect(**PG_CONFIG)
|
||||||
|
print("✓")
|
||||||
|
except psycopg2.Error as e:
|
||||||
|
print(f"\n✗ Connection failed: {e}")
|
||||||
|
print("\nTroubleshooting:")
|
||||||
|
print(" - Is PostgreSQL running? (docker compose ps)")
|
||||||
|
print(" - Is DB_PASSWORD correct?")
|
||||||
|
print(" - Is the schema initialized? (schema.sql loaded?)")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Check if schema is initialized
|
||||||
|
print("Checking PostgreSQL schema...", end=' ', flush=True)
|
||||||
|
cur = pg_conn.cursor()
|
||||||
|
cur.execute("""
|
||||||
|
SELECT COUNT(*) FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public' AND table_name = 'profiles'
|
||||||
|
""")
|
||||||
|
if cur.fetchone()[0] == 0:
|
||||||
|
print("\n✗ Schema not initialized!")
|
||||||
|
print("\nRun this first:")
|
||||||
|
print(" docker compose exec backend python -c \"from main import init_db; init_db()\"")
|
||||||
|
print(" Or manually load schema.sql")
|
||||||
|
sys.exit(1)
|
||||||
|
print("✓")
|
||||||
|
|
||||||
|
# Check if PostgreSQL is empty
|
||||||
|
print("Checking if PostgreSQL is empty...", end=' ', flush=True)
|
||||||
|
cur.execute("SELECT COUNT(*) FROM profiles")
|
||||||
|
existing_profiles = cur.fetchone()[0]
|
||||||
|
if existing_profiles > 0:
|
||||||
|
print(f"\n⚠ WARNING: PostgreSQL already has {existing_profiles} profiles!")
|
||||||
|
response = input(" Continue anyway? This will create duplicates! (yes/no): ")
|
||||||
|
if response.lower() != 'yes':
|
||||||
|
print("Migration cancelled.")
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print("✓")
|
||||||
|
|
||||||
|
print("\n───────────────────────────────────────────────────────────")
|
||||||
|
print("MIGRATION")
|
||||||
|
print("───────────────────────────────────────────────────────────")
|
||||||
|
|
||||||
|
stats = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
for table in TABLES:
|
||||||
|
stats[table] = migrate_table(pg_conn, table)
|
||||||
|
|
||||||
|
# Commit all changes
|
||||||
|
pg_conn.commit()
|
||||||
|
print("\n✓ All changes committed to PostgreSQL")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n✗ Migration failed: {e}")
|
||||||
|
print("Rolling back...")
|
||||||
|
pg_conn.rollback()
|
||||||
|
pg_conn.close()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Verification
|
||||||
|
verify_migration(pg_conn, stats)
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
pg_conn.close()
|
||||||
|
|
||||||
|
print("\n✓ Migration complete!")
|
||||||
|
print("\nNext steps:")
|
||||||
|
print(" 1. Test login with existing credentials")
|
||||||
|
print(" 2. Check Dashboard (weight chart, stats)")
|
||||||
|
print(" 3. Verify KI-Analysen work")
|
||||||
|
print(" 4. If everything works: commit + push to develop")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
@ -7,3 +7,4 @@ aiofiles==23.2.1
|
||||||
pydantic==2.7.1
|
pydantic==2.7.1
|
||||||
bcrypt==4.1.3
|
bcrypt==4.1.3
|
||||||
slowapi==0.1.9
|
slowapi==0.1.9
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
|
|
|
||||||
260
backend/schema.sql
Normal file
260
backend/schema.sql
Normal file
|
|
@ -0,0 +1,260 @@
|
||||||
|
-- ================================================================
|
||||||
|
-- MITAI JINKENDO v9b – PostgreSQL Schema
|
||||||
|
-- ================================================================
|
||||||
|
-- Migration from SQLite to PostgreSQL
|
||||||
|
-- Includes v9b Tier System features
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
-- Enable UUID Extension
|
||||||
|
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||||
|
|
||||||
|
-- ================================================================
|
||||||
|
-- CORE TABLES
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
-- ── Profiles Table ──────────────────────────────────────────────
|
||||||
|
-- User/Profile management with auth and permissions
|
||||||
|
CREATE TABLE IF NOT EXISTS profiles (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
name VARCHAR(255) NOT NULL DEFAULT 'Nutzer',
|
||||||
|
avatar_color VARCHAR(7) DEFAULT '#1D9E75',
|
||||||
|
photo_id UUID,
|
||||||
|
sex VARCHAR(1) DEFAULT 'm' CHECK (sex IN ('m', 'w', 'd')),
|
||||||
|
dob DATE,
|
||||||
|
height NUMERIC(5,2) DEFAULT 178,
|
||||||
|
goal_weight NUMERIC(5,2),
|
||||||
|
goal_bf_pct NUMERIC(4,2),
|
||||||
|
|
||||||
|
-- Auth & Permissions
|
||||||
|
role VARCHAR(20) DEFAULT 'user' CHECK (role IN ('user', 'admin')),
|
||||||
|
pin_hash TEXT,
|
||||||
|
auth_type VARCHAR(20) DEFAULT 'pin' CHECK (auth_type IN ('pin', 'email')),
|
||||||
|
session_days INTEGER DEFAULT 30,
|
||||||
|
ai_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
ai_limit_day INTEGER,
|
||||||
|
export_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
email VARCHAR(255) UNIQUE,
|
||||||
|
|
||||||
|
-- v9b: Tier System
|
||||||
|
tier VARCHAR(20) DEFAULT 'free' CHECK (tier IN ('free', 'basic', 'premium', 'selfhosted')),
|
||||||
|
tier_expires_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
trial_ends_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
invited_by UUID REFERENCES profiles(id),
|
||||||
|
|
||||||
|
-- Timestamps
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_profiles_email ON profiles(email) WHERE email IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_profiles_tier ON profiles(tier);
|
||||||
|
|
||||||
|
-- ── Sessions Table ──────────────────────────────────────────────
|
||||||
|
-- Auth token management
|
||||||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
|
token VARCHAR(64) PRIMARY KEY,
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_profile_id ON sessions(profile_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_expires_at ON sessions(expires_at);
|
||||||
|
|
||||||
|
-- ── AI Usage Tracking ───────────────────────────────────────────
|
||||||
|
-- Daily AI call limits per profile
|
||||||
|
CREATE TABLE IF NOT EXISTS ai_usage (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
call_count INTEGER DEFAULT 0,
|
||||||
|
UNIQUE(profile_id, date)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_ai_usage_profile_date ON ai_usage(profile_id, date);
|
||||||
|
|
||||||
|
-- ================================================================
|
||||||
|
-- TRACKING TABLES
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
-- ── Weight Log ──────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS weight_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
weight NUMERIC(5,2) NOT NULL,
|
||||||
|
note TEXT,
|
||||||
|
source VARCHAR(20) DEFAULT 'manual',
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_weight_log_profile_date ON weight_log(profile_id, date DESC);
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_weight_log_profile_date_unique ON weight_log(profile_id, date);
|
||||||
|
|
||||||
|
-- ── Circumference Log ───────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS circumference_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
c_neck NUMERIC(5,2),
|
||||||
|
c_chest NUMERIC(5,2),
|
||||||
|
c_waist NUMERIC(5,2),
|
||||||
|
c_belly NUMERIC(5,2),
|
||||||
|
c_hip NUMERIC(5,2),
|
||||||
|
c_thigh NUMERIC(5,2),
|
||||||
|
c_calf NUMERIC(5,2),
|
||||||
|
c_arm NUMERIC(5,2),
|
||||||
|
notes TEXT,
|
||||||
|
photo_id UUID,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_circumference_profile_date ON circumference_log(profile_id, date DESC);
|
||||||
|
|
||||||
|
-- ── Caliper Log ─────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS caliper_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
sf_method VARCHAR(20) DEFAULT 'jackson3',
|
||||||
|
sf_chest NUMERIC(5,2),
|
||||||
|
sf_axilla NUMERIC(5,2),
|
||||||
|
sf_triceps NUMERIC(5,2),
|
||||||
|
sf_subscap NUMERIC(5,2),
|
||||||
|
sf_suprailiac NUMERIC(5,2),
|
||||||
|
sf_abdomen NUMERIC(5,2),
|
||||||
|
sf_thigh NUMERIC(5,2),
|
||||||
|
sf_calf_med NUMERIC(5,2),
|
||||||
|
sf_lowerback NUMERIC(5,2),
|
||||||
|
sf_biceps NUMERIC(5,2),
|
||||||
|
body_fat_pct NUMERIC(4,2),
|
||||||
|
lean_mass NUMERIC(5,2),
|
||||||
|
fat_mass NUMERIC(5,2),
|
||||||
|
notes TEXT,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_caliper_profile_date ON caliper_log(profile_id, date DESC);
|
||||||
|
|
||||||
|
-- ── Nutrition Log ───────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS nutrition_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
kcal NUMERIC(7,2),
|
||||||
|
protein_g NUMERIC(6,2),
|
||||||
|
fat_g NUMERIC(6,2),
|
||||||
|
carbs_g NUMERIC(6,2),
|
||||||
|
source VARCHAR(20) DEFAULT 'csv',
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_nutrition_profile_date ON nutrition_log(profile_id, date DESC);
|
||||||
|
|
||||||
|
-- ── Activity Log ────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS activity_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
start_time TIME,
|
||||||
|
end_time TIME,
|
||||||
|
activity_type VARCHAR(50) NOT NULL,
|
||||||
|
duration_min NUMERIC(6,2),
|
||||||
|
kcal_active NUMERIC(7,2),
|
||||||
|
kcal_resting NUMERIC(7,2),
|
||||||
|
hr_avg NUMERIC(5,2),
|
||||||
|
hr_max NUMERIC(5,2),
|
||||||
|
distance_km NUMERIC(7,2),
|
||||||
|
rpe INTEGER CHECK (rpe >= 1 AND rpe <= 10),
|
||||||
|
source VARCHAR(20) DEFAULT 'manual',
|
||||||
|
notes TEXT,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_activity_profile_date ON activity_log(profile_id, date DESC);
|
||||||
|
|
||||||
|
-- ── Photos ──────────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS photos (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
date DATE,
|
||||||
|
path TEXT NOT NULL,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_photos_profile_date ON photos(profile_id, date DESC);
|
||||||
|
|
||||||
|
-- ================================================================
|
||||||
|
-- AI TABLES
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
-- ── AI Insights ─────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS ai_insights (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
profile_id UUID NOT NULL REFERENCES profiles(id) ON DELETE CASCADE,
|
||||||
|
scope VARCHAR(50) NOT NULL,
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_ai_insights_profile_scope ON ai_insights(profile_id, scope, created DESC);
|
||||||
|
|
||||||
|
-- ── AI Prompts ──────────────────────────────────────────────────
|
||||||
|
CREATE TABLE IF NOT EXISTS ai_prompts (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
slug VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
template TEXT NOT NULL,
|
||||||
|
active BOOLEAN DEFAULT TRUE,
|
||||||
|
sort_order INTEGER DEFAULT 0,
|
||||||
|
created TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_ai_prompts_slug ON ai_prompts(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_ai_prompts_active_sort ON ai_prompts(active, sort_order);
|
||||||
|
|
||||||
|
-- ================================================================
|
||||||
|
-- TRIGGERS
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
-- Auto-update timestamp trigger for profiles
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_timestamp()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated = CURRENT_TIMESTAMP;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE TRIGGER IF NOT EXISTS trigger_profiles_updated
|
||||||
|
BEFORE UPDATE ON profiles
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_updated_timestamp();
|
||||||
|
|
||||||
|
CREATE TRIGGER IF NOT EXISTS trigger_ai_prompts_updated
|
||||||
|
BEFORE UPDATE ON ai_prompts
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_updated_timestamp();
|
||||||
|
|
||||||
|
-- ================================================================
|
||||||
|
-- COMMENTS (Documentation)
|
||||||
|
-- ================================================================
|
||||||
|
|
||||||
|
COMMENT ON TABLE profiles IS 'User profiles with auth, permissions, and tier system';
|
||||||
|
COMMENT ON TABLE sessions IS 'Active auth tokens';
|
||||||
|
COMMENT ON TABLE ai_usage IS 'Daily AI call tracking per profile';
|
||||||
|
COMMENT ON TABLE weight_log IS 'Weight measurements';
|
||||||
|
COMMENT ON TABLE circumference_log IS 'Body circumference measurements (8 points)';
|
||||||
|
COMMENT ON TABLE caliper_log IS 'Skinfold measurements with body fat calculations';
|
||||||
|
COMMENT ON TABLE nutrition_log IS 'Daily nutrition intake (calories + macros)';
|
||||||
|
COMMENT ON TABLE activity_log IS 'Training sessions and activities';
|
||||||
|
COMMENT ON TABLE photos IS 'Progress photos';
|
||||||
|
COMMENT ON TABLE ai_insights IS 'AI-generated analysis results';
|
||||||
|
COMMENT ON TABLE ai_prompts IS 'Configurable AI prompt templates';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN profiles.tier IS 'Subscription tier: free, basic, premium, selfhosted';
|
||||||
|
COMMENT ON COLUMN profiles.trial_ends_at IS 'Trial expiration timestamp (14 days from registration)';
|
||||||
|
COMMENT ON COLUMN profiles.tier_expires_at IS 'Paid tier expiration timestamp';
|
||||||
|
COMMENT ON COLUMN profiles.invited_by IS 'Profile ID of inviter (for beta invitations)';
|
||||||
73
backend/startup.sh
Normal file
73
backend/startup.sh
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "═══════════════════════════════════════════════════════════"
|
||||||
|
echo "MITAI JINKENDO - Backend Startup (v9b)"
|
||||||
|
echo "═══════════════════════════════════════════════════════════"
|
||||||
|
|
||||||
|
# ── PostgreSQL Connection Check ───────────────────────────────
|
||||||
|
echo ""
|
||||||
|
echo "Checking PostgreSQL connection..."
|
||||||
|
|
||||||
|
MAX_RETRIES=30
|
||||||
|
RETRY_COUNT=0
|
||||||
|
|
||||||
|
until PGPASSWORD=$DB_PASSWORD psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -c '\q' 2>/dev/null; do
|
||||||
|
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||||
|
if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then
|
||||||
|
echo "✗ PostgreSQL not ready after ${MAX_RETRIES} attempts"
|
||||||
|
echo " Exiting..."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo " Waiting for PostgreSQL... (attempt $RETRY_COUNT/$MAX_RETRIES)"
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✓ PostgreSQL ready"
|
||||||
|
|
||||||
|
# ── Schema Initialization ──────────────────────────────────────
|
||||||
|
echo ""
|
||||||
|
echo "Checking database schema..."
|
||||||
|
|
||||||
|
# Check if profiles table exists
|
||||||
|
TABLE_EXISTS=$(PGPASSWORD=$DB_PASSWORD psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -tAc \
|
||||||
|
"SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='public' AND table_name='profiles'")
|
||||||
|
|
||||||
|
if [ "$TABLE_EXISTS" = "0" ]; then
|
||||||
|
echo " Schema not found, initializing..."
|
||||||
|
PGPASSWORD=$DB_PASSWORD psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -f /app/schema.sql
|
||||||
|
echo "✓ Schema loaded from schema.sql"
|
||||||
|
else
|
||||||
|
echo "✓ Schema already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── Auto-Migration (SQLite → PostgreSQL) ───────────────────────
|
||||||
|
echo ""
|
||||||
|
echo "Checking for SQLite data migration..."
|
||||||
|
|
||||||
|
SQLITE_DB="/app/data/bodytrack.db"
|
||||||
|
PROFILE_COUNT=$(PGPASSWORD=$DB_PASSWORD psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -tAc \
|
||||||
|
"SELECT COUNT(*) FROM profiles")
|
||||||
|
|
||||||
|
if [ -f "$SQLITE_DB" ] && [ "$PROFILE_COUNT" = "0" ]; then
|
||||||
|
echo " SQLite database found and PostgreSQL is empty"
|
||||||
|
echo " Starting automatic migration..."
|
||||||
|
python /app/migrate_to_postgres.py
|
||||||
|
echo "✓ Migration completed"
|
||||||
|
elif [ -f "$SQLITE_DB" ] && [ "$PROFILE_COUNT" != "0" ]; then
|
||||||
|
echo "⚠ SQLite DB exists but PostgreSQL already has $PROFILE_COUNT profiles"
|
||||||
|
echo " Skipping migration (already migrated)"
|
||||||
|
elif [ ! -f "$SQLITE_DB" ]; then
|
||||||
|
echo "✓ No SQLite database found (fresh install or already migrated)"
|
||||||
|
else
|
||||||
|
echo "✓ No migration needed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── Start Application ──────────────────────────────────────────
|
||||||
|
echo ""
|
||||||
|
echo "═══════════════════════════════════════════════════════════"
|
||||||
|
echo "Starting FastAPI application..."
|
||||||
|
echo "═══════════════════════════════════════════════════════════"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
exec uvicorn main:app --host 0.0.0.0 --port 8000
|
||||||
|
|
@ -1,24 +1,55 @@
|
||||||
services:
|
services:
|
||||||
|
postgres-dev:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
container_name: dev-mitai-postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: mitai_dev
|
||||||
|
POSTGRES_USER: mitai_dev
|
||||||
|
POSTGRES_PASSWORD: dev_password_change_me
|
||||||
|
volumes:
|
||||||
|
- mitai_dev_postgres_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:5433:5432"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U mitai_dev"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
build: ./backend
|
build: ./backend
|
||||||
container_name: dev-mitai-api
|
container_name: dev-mitai-api
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- "8099:8000"
|
- "8099:8000"
|
||||||
|
depends_on:
|
||||||
|
postgres-dev:
|
||||||
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- bodytrack_bodytrack-data:/app/data
|
|
||||||
- bodytrack_bodytrack-photos:/app/photos
|
- bodytrack_bodytrack-photos:/app/photos
|
||||||
environment:
|
environment:
|
||||||
|
# Database
|
||||||
|
- DB_HOST=postgres-dev
|
||||||
|
- DB_PORT=5432
|
||||||
|
- DB_NAME=mitai_dev
|
||||||
|
- DB_USER=mitai_dev
|
||||||
|
- DB_PASSWORD=dev_password_change_me
|
||||||
|
|
||||||
|
# AI
|
||||||
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
||||||
- OPENROUTER_MODEL=${OPENROUTER_MODEL:-anthropic/claude-sonnet-4}
|
- OPENROUTER_MODEL=${OPENROUTER_MODEL:-anthropic/claude-sonnet-4}
|
||||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
||||||
|
|
||||||
|
# Email
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
- SMTP_PORT=${SMTP_PORT:-587}
|
- SMTP_PORT=${SMTP_PORT:-587}
|
||||||
- SMTP_USER=${SMTP_USER}
|
- SMTP_USER=${SMTP_USER}
|
||||||
- SMTP_PASS=${SMTP_PASS}
|
- SMTP_PASS=${SMTP_PASS}
|
||||||
- SMTP_FROM=${SMTP_FROM}
|
- SMTP_FROM=${SMTP_FROM}
|
||||||
|
|
||||||
|
# App
|
||||||
- APP_URL=${APP_URL_DEV:-https://dev.mitai.jinkendo.de}
|
- APP_URL=${APP_URL_DEV:-https://dev.mitai.jinkendo.de}
|
||||||
- DATA_DIR=/app/data
|
|
||||||
- PHOTOS_DIR=/app/photos
|
- PHOTOS_DIR=/app/photos
|
||||||
- ALLOWED_ORIGINS=${ALLOWED_ORIGINS_DEV:-*}
|
- ALLOWED_ORIGINS=${ALLOWED_ORIGINS_DEV:-*}
|
||||||
- ENVIRONMENT=development
|
- ENVIRONMENT=development
|
||||||
|
|
@ -33,7 +64,6 @@ services:
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
bodytrack_bodytrack-data:
|
mitai_dev_postgres_data:
|
||||||
external: true
|
|
||||||
bodytrack_bodytrack-photos:
|
bodytrack_bodytrack-photos:
|
||||||
external: true
|
external: true
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,55 @@
|
||||||
services:
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
container_name: mitai-postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: ${DB_NAME:-mitai}
|
||||||
|
POSTGRES_USER: ${DB_USER:-mitai}
|
||||||
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
|
volumes:
|
||||||
|
- mitai_postgres_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:5432:5432"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-mitai}"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
build: ./backend
|
build: ./backend
|
||||||
container_name: mitai-api
|
container_name: mitai-api
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- "8002:8000"
|
- "8002:8000"
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- bodytrack_bodytrack-data:/app/data
|
|
||||||
- bodytrack_bodytrack-photos:/app/photos
|
- bodytrack_bodytrack-photos:/app/photos
|
||||||
environment:
|
environment:
|
||||||
|
# Database
|
||||||
|
- DB_HOST=${DB_HOST:-postgres}
|
||||||
|
- DB_PORT=${DB_PORT:-5432}
|
||||||
|
- DB_NAME=${DB_NAME:-mitai}
|
||||||
|
- DB_USER=${DB_USER:-mitai}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
|
||||||
|
# AI
|
||||||
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
||||||
- OPENROUTER_MODEL=${OPENROUTER_MODEL:-anthropic/claude-sonnet-4}
|
- OPENROUTER_MODEL=${OPENROUTER_MODEL:-anthropic/claude-sonnet-4}
|
||||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
||||||
|
|
||||||
|
# Email
|
||||||
- SMTP_HOST=${SMTP_HOST}
|
- SMTP_HOST=${SMTP_HOST}
|
||||||
- SMTP_PORT=${SMTP_PORT:-587}
|
- SMTP_PORT=${SMTP_PORT:-587}
|
||||||
- SMTP_USER=${SMTP_USER}
|
- SMTP_USER=${SMTP_USER}
|
||||||
- SMTP_PASS=${SMTP_PASS}
|
- SMTP_PASS=${SMTP_PASS}
|
||||||
- SMTP_FROM=${SMTP_FROM}
|
- SMTP_FROM=${SMTP_FROM}
|
||||||
|
|
||||||
|
# App
|
||||||
- APP_URL=${APP_URL}
|
- APP_URL=${APP_URL}
|
||||||
- DATA_DIR=/app/data
|
|
||||||
- PHOTOS_DIR=/app/photos
|
- PHOTOS_DIR=/app/photos
|
||||||
- ALLOWED_ORIGINS=${ALLOWED_ORIGINS:-*}
|
- ALLOWED_ORIGINS=${ALLOWED_ORIGINS:-*}
|
||||||
- ENVIRONMENT=production
|
- ENVIRONMENT=production
|
||||||
|
|
@ -33,7 +64,7 @@ services:
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
bodytrack_bodytrack-data:
|
mitai_postgres_data:
|
||||||
external: true
|
name: mitai_postgres_data
|
||||||
bodytrack_bodytrack-photos:
|
bodytrack_bodytrack-photos:
|
||||||
external: true
|
external: true
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user