feat: v9c Phase 1 - Feature consolidation & cleanup migration
All checks were successful
Deploy Development / deploy (push) Successful in 33s
Build Test / lint-backend (push) Successful in 0s
Build Test / build-frontend (push) Successful in 13s

PHASE 1: Cleanup & Analyse
- Feature-Konsolidierung: export_csv/json/zip → data_export (1 Feature)
- Umbenennung: csv_import → data_import
- Auto-Migration bei Container-Start (apply_v9c_migration.py)
- Diagnose-Script (check_features.sql)

Lessons Learned angewendet:
- Ein Feature für Export, nicht drei
- Migration ist idempotent (kann mehrfach laufen)
- Zeigt BEFORE/AFTER State im Log

Finaler Feature-Katalog (10 statt 13):
- Data: weight, circumference, caliper, nutrition, activity, photos
- AI: ai_calls, ai_pipeline
- Export/Import: data_export, data_import

Tier Limits:
- FREE: 30 data entries, 0 AI/export/import
- BASIC: unlimited data, 3 AI/month, 5 export/month, 3 import/month
- PREMIUM/SELFHOSTED: unlimited

Migration läuft automatisch auf dev UND prod beim Container-Start.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Lars 2026-03-20 18:57:39 +01:00
parent 7040931816
commit 73bea5ee86
4 changed files with 477 additions and 1245 deletions

1415
CLAUDE.md

File diff suppressed because it is too large Load Diff

View File

@ -60,6 +60,9 @@ def apply_migration():
if not migration_needed(conn):
print("[v9c Migration] Already applied, skipping.")
conn.close()
# Even if main migration is done, check cleanup
apply_cleanup_migration()
return
print("[v9c Migration] Applying subscription system migration...")
@ -128,10 +131,123 @@ def apply_migration():
cur.close()
conn.close()
# After successful migration, apply cleanup
apply_cleanup_migration()
except Exception as e:
print(f"[v9c Migration] ❌ Error: {e}")
raise
def cleanup_features_needed(conn):
"""Check if feature cleanup migration is needed."""
cur = conn.cursor()
# Check if old export features still exist
cur.execute("""
SELECT COUNT(*) as count FROM features
WHERE id IN ('export_csv', 'export_json', 'export_zip')
""")
old_exports = cur.fetchone()['count']
# Check if csv_import needs to be renamed
cur.execute("""
SELECT COUNT(*) as count FROM features
WHERE id = 'csv_import'
""")
old_import = cur.fetchone()['count']
cur.close()
# Cleanup needed if old features exist
return old_exports > 0 or old_import > 0
def apply_cleanup_migration():
"""Apply v9c feature cleanup migration."""
print("[v9c Cleanup] Checking if cleanup migration is needed...")
try:
conn = get_db_connection()
if not cleanup_features_needed(conn):
print("[v9c Cleanup] Already applied, skipping.")
conn.close()
return
print("[v9c Cleanup] Applying feature consolidation...")
# Show BEFORE state
cur = conn.cursor()
cur.execute("SELECT id, name FROM features ORDER BY category, id")
features_before = [f"{r['id']} ({r['name']})" for r in cur.fetchall()]
print(f"[v9c Cleanup] Features BEFORE: {len(features_before)} features")
for f in features_before:
print(f" - {f}")
cur.close()
# Read cleanup migration SQL
cleanup_path = os.path.join(
os.path.dirname(__file__),
"migrations",
"v9c_cleanup_features.sql"
)
if not os.path.exists(cleanup_path):
print(f"[v9c Cleanup] ⚠️ Cleanup migration file not found: {cleanup_path}")
conn.close()
return
with open(cleanup_path, 'r', encoding='utf-8') as f:
cleanup_sql = f.read()
# Execute cleanup migration
cur = conn.cursor()
cur.execute(cleanup_sql)
conn.commit()
cur.close()
# Show AFTER state
cur = conn.cursor()
cur.execute("SELECT id, name, category FROM features ORDER BY category, id")
features_after = cur.fetchall()
print(f"[v9c Cleanup] Features AFTER: {len(features_after)} features")
# Group by category
categories = {}
for f in features_after:
cat = f['category'] or 'other'
if cat not in categories:
categories[cat] = []
categories[cat].append(f"{f['id']} ({f['name']})")
for cat, feats in sorted(categories.items()):
print(f" {cat.upper()}:")
for f in feats:
print(f" - {f}")
# Verify tier_limits updated
cur.execute("""
SELECT tier_id, feature_id, limit_value
FROM tier_limits
WHERE feature_id IN ('data_export', 'data_import')
ORDER BY tier_id, feature_id
""")
limits = cur.fetchall()
print(f"[v9c Cleanup] Tier limits for data_export/data_import:")
for lim in limits:
limit_str = 'unlimited' if lim['limit_value'] is None else lim['limit_value']
print(f" {lim['tier_id']}.{lim['feature_id']} = {limit_str}")
cur.close()
conn.close()
print("[v9c Cleanup] ✅ Feature cleanup completed successfully!")
except Exception as e:
print(f"[v9c Cleanup] ❌ Error: {e}")
raise
if __name__ == "__main__":
apply_migration()

View File

@ -0,0 +1,50 @@
-- ============================================================================
-- Feature Check Script - Diagnose vor/nach Migration
-- ============================================================================
-- Usage: psql -U mitai_dev -d mitai_dev -f check_features.sql
-- ============================================================================
\echo '=== CURRENT FEATURES ==='
SELECT id, name, category, limit_type, reset_period, default_limit, active
FROM features
ORDER BY category, id;
\echo ''
\echo '=== TIER LIMITS MATRIX ==='
SELECT
f.id as feature,
f.category,
MAX(CASE WHEN tl.tier_id = 'free' THEN COALESCE(tl.limit_value::text, '') END) as free,
MAX(CASE WHEN tl.tier_id = 'basic' THEN COALESCE(tl.limit_value::text, '') END) as basic,
MAX(CASE WHEN tl.tier_id = 'premium' THEN COALESCE(tl.limit_value::text, '') END) as premium,
MAX(CASE WHEN tl.tier_id = 'selfhosted' THEN COALESCE(tl.limit_value::text, '') END) as selfhosted
FROM features f
LEFT JOIN tier_limits tl ON f.id = tl.feature_id
GROUP BY f.id, f.category
ORDER BY f.category, f.id;
\echo ''
\echo '=== FEATURE COUNT BY CATEGORY ==='
SELECT category, COUNT(*) as count
FROM features
WHERE active = true
GROUP BY category
ORDER BY category;
\echo ''
\echo '=== ORPHANED TIER LIMITS (feature not exists) ==='
SELECT tl.tier_id, tl.feature_id, tl.limit_value
FROM tier_limits tl
LEFT JOIN features f ON tl.feature_id = f.id
WHERE f.id IS NULL;
\echo ''
\echo '=== USER FEATURE USAGE (current usage tracking) ==='
SELECT
p.name as user,
ufu.feature_id,
ufu.usage_count,
ufu.reset_at
FROM user_feature_usage ufu
JOIN profiles p ON ufu.profile_id = p.id
ORDER BY p.name, ufu.feature_id;

View File

@ -0,0 +1,141 @@
-- ============================================================================
-- v9c Cleanup: Feature-Konsolidierung
-- ============================================================================
-- Created: 2026-03-20
-- Purpose: Konsolidiere Export-Features (export_csv/json/zip → data_export)
-- und Import-Features (csv_import → data_import)
--
-- Idempotent: Kann mehrfach ausgeführt werden
--
-- Lessons Learned:
-- "Ein Feature für Export, nicht drei (csv/json/zip)"
-- ============================================================================
-- ============================================================================
-- 1. Rename csv_import to data_import
-- ============================================================================
UPDATE features
SET
id = 'data_import',
name = 'Daten importieren',
description = 'CSV-Import (FDDB, Apple Health) + ZIP-Backup-Import'
WHERE id = 'csv_import';
-- Update tier_limits references
UPDATE tier_limits
SET feature_id = 'data_import'
WHERE feature_id = 'csv_import';
-- Update user_feature_restrictions references
UPDATE user_feature_restrictions
SET feature_id = 'data_import'
WHERE feature_id = 'csv_import';
-- Update user_feature_usage references
UPDATE user_feature_usage
SET feature_id = 'data_import'
WHERE feature_id = 'csv_import';
-- ============================================================================
-- 2. Remove old export_csv/json/zip features
-- ============================================================================
-- Remove tier_limits for old features
DELETE FROM tier_limits
WHERE feature_id IN ('export_csv', 'export_json', 'export_zip');
-- Remove user restrictions for old features
DELETE FROM user_feature_restrictions
WHERE feature_id IN ('export_csv', 'export_json', 'export_zip');
-- Remove usage tracking for old features
DELETE FROM user_feature_usage
WHERE feature_id IN ('export_csv', 'export_json', 'export_zip');
-- Remove old feature definitions
DELETE FROM features
WHERE id IN ('export_csv', 'export_json', 'export_zip');
-- ============================================================================
-- 3. Ensure data_export exists and is properly configured
-- ============================================================================
INSERT INTO features (id, name, description, category, limit_type, reset_period, default_limit, active)
VALUES ('data_export', 'Daten exportieren', 'CSV/JSON/ZIP Export', 'export', 'count', 'monthly', 0, true)
ON CONFLICT (id) DO UPDATE SET
name = EXCLUDED.name,
description = EXCLUDED.description,
category = EXCLUDED.category,
limit_type = EXCLUDED.limit_type,
reset_period = EXCLUDED.reset_period;
-- ============================================================================
-- 4. Ensure data_import exists and is properly configured
-- ============================================================================
INSERT INTO features (id, name, description, category, limit_type, reset_period, default_limit, active)
VALUES ('data_import', 'Daten importieren', 'CSV-Import (FDDB, Apple Health) + ZIP-Backup-Import', 'import', 'count', 'monthly', 0, true)
ON CONFLICT (id) DO UPDATE SET
name = EXCLUDED.name,
description = EXCLUDED.description,
category = EXCLUDED.category,
limit_type = EXCLUDED.limit_type,
reset_period = EXCLUDED.reset_period;
-- ============================================================================
-- 5. Update tier_limits for data_export (consolidate from old features)
-- ============================================================================
-- FREE tier: no export
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('free', 'data_export', 0)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- BASIC tier: 5 exports/month
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('basic', 'data_export', 5)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- PREMIUM tier: unlimited
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('premium', 'data_export', NULL)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- SELFHOSTED tier: unlimited
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('selfhosted', 'data_export', NULL)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- ============================================================================
-- 6. Update tier_limits for data_import
-- ============================================================================
-- FREE tier: no import
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('free', 'data_import', 0)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- BASIC tier: 3 imports/month
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('basic', 'data_import', 3)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- PREMIUM tier: unlimited
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('premium', 'data_import', NULL)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- SELFHOSTED tier: unlimited
INSERT INTO tier_limits (tier_id, feature_id, limit_value)
VALUES ('selfhosted', 'data_import', NULL)
ON CONFLICT (tier_id, feature_id) DO UPDATE SET limit_value = EXCLUDED.limit_value;
-- ============================================================================
-- Cleanup complete
-- ============================================================================
-- Final feature list:
-- Data: weight_entries, circumference_entries, caliper_entries,
-- nutrition_entries, activity_entries, photos
-- AI: ai_calls, ai_pipeline
-- Export/Import: data_export, data_import
--
-- Total: 10 features (down from 13)
-- ============================================================================