mindnet/app/core/ingestion/ingestion_processor.py

313 lines
15 KiB
Python

"""
FILE: app/core/ingestion/ingestion_processor.py
DESCRIPTION: Der zentrale IngestionService (Orchestrator).
WP-24c: Integration der Symmetrie-Logik (Automatische inverse Kanten).
WP-25a: Integration der Mixture of Experts (MoE) Architektur.
WP-15b: Two-Pass Workflow mit globalem AUTHORITY-SET.
AUDIT v3.3.0: Einführung der Global Authority Map. Verhindert
zuverlässig das Überschreiben expliziter Kanten.
VERSION: 3.3.0 (WP-24c: Multi-Pass Authority Enforcement)
STATUS: Active
"""
import logging
import asyncio
import os
import re
from typing import Dict, List, Optional, Tuple, Any, Set
# Core Module Imports
from app.core.parser import (
read_markdown, pre_scan_markdown, normalize_frontmatter,
validate_required_frontmatter, NoteContext
)
from app.core.chunking import assemble_chunks
# WP-24c: Import für die deterministische UUID-Vorabberechnung
from app.core.graph.graph_utils import _mk_edge_id
# Datenbank-Ebene
from app.core.database.qdrant import QdrantConfig, get_client, ensure_collections, ensure_payload_indexes
from app.core.database.qdrant_points import points_for_chunks, points_for_note, points_for_edges, upsert_batch
from qdrant_client.http import models as rest
# Services
from app.services.embeddings_client import EmbeddingsClient
from app.services.edge_registry import registry as edge_registry
from app.services.llm_service import LLMService
# Package-Interne Imports
from .ingestion_utils import load_type_registry, resolve_note_type, get_chunk_config_by_profile
from .ingestion_db import fetch_note_payload, artifacts_missing, purge_artifacts, is_explicit_edge_present
from .ingestion_validation import validate_edge_candidate
from .ingestion_note_payload import make_note_payload
from .ingestion_chunk_payload import make_chunk_payloads
# Fallback für Edges (Struktur-Verknüpfung)
try:
from app.core.graph.graph_derive_edges import build_edges_for_note
except ImportError:
def build_edges_for_note(*args, **kwargs): return []
logger = logging.getLogger(__name__)
class IngestionService:
def __init__(self, collection_prefix: str = None):
"""Initialisiert den Service und nutzt die neue database-Infrastruktur."""
from app.config import get_settings
self.settings = get_settings()
# --- LOGGING CLEANUP (Business Focus) ---
# Unterdrückt Bibliotheks-Lärm in Konsole und Datei
for lib in ["httpx", "httpcore", "qdrant_client", "urllib3", "openai"]:
logging.getLogger(lib).setLevel(logging.WARNING)
self.prefix = collection_prefix or self.settings.COLLECTION_PREFIX
self.cfg = QdrantConfig.from_env()
self.cfg.prefix = self.prefix
self.client = get_client(self.cfg)
self.registry = load_type_registry()
self.embedder = EmbeddingsClient()
self.llm = LLMService()
# WP-25a: Auflösung der Dimension über das Embedding-Profil (MoE)
embed_cfg = self.llm.profiles.get("embedding_expert", {})
self.dim = embed_cfg.get("dimensions") or self.settings.VECTOR_SIZE
self.active_hash_mode = self.settings.CHANGE_DETECTION_MODE
self.batch_cache: Dict[str, NoteContext] = {} # Globaler Kontext-Cache
# WP-24c: Globaler Speicher für alle expliziten Kanten-IDs im gesamten Vault
self.vault_authority_ids: Set[str] = set()
try:
# Aufruf der modularisierten Schema-Logik
ensure_collections(self.client, self.prefix, self.dim)
ensure_payload_indexes(self.client, self.prefix)
except Exception as e:
logger.warning(f"DB initialization warning: {e}")
def _resolve_target_id(self, target_raw: str) -> Optional[str]:
"""
Löst einen Ziel-String (Titel, ID oder Pfad) gegen den batch_cache auf.
Dies ist der zentrale Filter gegen Junk-Links.
"""
if not target_raw: return None
# Direkter Look-up im 3-Wege-Index (ID, Titel, Filename)
ctx = self.batch_cache.get(target_raw)
return ctx.note_id if ctx else None
async def run_batch(self, file_paths: List[str], vault_root: str) -> List[Dict[str, Any]]:
"""
WP-15b: Two-Pass Ingestion Workflow mit Global Authority Mapping.
"""
self.vault_authority_ids.clear()
self.batch_cache.clear()
logger.info(f"🔍 [Pass 1] Pre-Scanning {len(file_paths)} Dateien & Erstelle Authority-Map...")
# 1. Schritt: Context-Cache füllen (Grundlage für ID-Auflösung)
for path in file_paths:
try:
ctx = pre_scan_markdown(path, registry=self.registry)
if ctx:
self.batch_cache[ctx.note_id] = ctx
self.batch_cache[ctx.title] = ctx
fname = os.path.splitext(os.path.basename(path))[0]
self.batch_cache[fname] = ctx
except Exception as e:
logger.warning(f" ⚠️ Pre-scan fehlgeschlagen für {path}: {e}")
# 2. Schritt: Alle expliziten Links im gesamten Vault registrieren
# Wir berechnen die UUIDs aller manuellen Links, um sie später zu schützen.
for note_id, ctx in self.batch_cache.items():
# Wir nutzen nur die Note_ID Einträge (Regex für Datums-ID)
if not re.match(r'^\d{12}', note_id): continue
if hasattr(ctx, 'links'):
for link in ctx.links:
t_id = self._resolve_target_id(link.get("to"))
if t_id:
# Link-Typ kanonisieren
kind = edge_registry.resolve(link.get("kind", "related_to"))
# Eindeutige ID generieren (exakt wie sie in Qdrant landen würde)
edge_id = _mk_edge_id(kind, ctx.note_id, t_id, "note")
self.vault_authority_ids.add(edge_id)
logger.info(f"✅ Context bereit. Authority-Map enthält {len(self.vault_authority_ids)} geschützte manuelle Kanten.")
# 3. Schritt: Verarbeitung der Dateien (Pass 2)
results = []
for p in file_paths:
res = await self.process_file(p, vault_root, apply=True, purge_before=True)
results.append(res)
logger.info(f"--- ✅ BATCH IMPORT BEENDET ---")
return results
async def process_file(self, file_path: str, vault_root: str, **kwargs) -> Dict[str, Any]:
"""Transformiert eine Markdown-Datei und schützt die Authority-Kanten."""
apply = kwargs.get("apply", False)
force_replace = kwargs.get("force_replace", False)
purge_before = kwargs.get("purge_before", False)
note_scope_refs = kwargs.get("note_scope_refs", False)
hash_source = kwargs.get("hash_source", "parsed")
hash_normalize = kwargs.get("hash_normalize", "canonical")
result = {"path": file_path, "status": "skipped", "changed": False, "error": None}
# 1. Parse & Lifecycle Gate
try:
parsed = read_markdown(file_path)
if not parsed: return {**result, "error": "Empty file"}
fm = normalize_frontmatter(parsed.frontmatter)
validate_required_frontmatter(fm)
except Exception as e:
return {**result, "error": f"Validation failed: {str(e)}"}
ingest_cfg = self.registry.get("ingestion_settings", {})
ignore_list = ingest_cfg.get("ignore_statuses", ["system", "template", "archive", "hidden"])
current_status = fm.get("status", "draft").lower().strip()
if current_status in ignore_list:
return {**result, "status": "skipped", "reason": "lifecycle_filter"}
# 2. Payload & Change Detection
note_type = resolve_note_type(self.registry, fm.get("type"))
note_pl = make_note_payload(
parsed, vault_root=vault_root, file_path=file_path,
hash_source=hash_source, hash_normalize=hash_normalize,
types_cfg=self.registry
)
note_id = note_pl["note_id"]
logger.info(f"📄 Bearbeite: '{note_id}' (Typ: {note_type})")
old_payload = None if force_replace else fetch_note_payload(self.client, self.prefix, note_id)
check_key = f"{self.active_hash_mode}:{hash_source}:{hash_normalize}"
old_hash = (old_payload or {}).get("hashes", {}).get(check_key)
new_hash = note_pl.get("hashes", {}).get(check_key)
c_miss, e_miss = artifacts_missing(self.client, self.prefix, note_id)
if not (force_replace or not old_payload or old_hash != new_hash or c_miss or e_miss):
return {**result, "status": "unchanged", "note_id": note_id}
if not apply:
return {**result, "status": "dry-run", "changed": True, "note_id": note_id}
# 3. Deep Processing (Chunking, Validation, Embedding)
try:
body_text = getattr(parsed, "body", "") or ""
edge_registry.ensure_latest()
profile = note_pl.get("chunk_profile", "sliding_standard")
chunk_cfg = get_chunk_config_by_profile(self.registry, profile, note_type)
enable_smart = chunk_cfg.get("enable_smart_edge_allocation", False)
chunks = await assemble_chunks(note_id, body_text, note_type, config=chunk_cfg)
# --- WP-25a: MoE Semantische Kanten-Validierung ---
for ch in chunks:
new_pool = []
for cand in getattr(ch, "candidate_pool", []):
if cand.get("provenance") == "global_pool" and enable_smart:
is_valid = await validate_edge_candidate(
ch.text, cand, self.batch_cache, self.llm, profile_name="ingest_validator"
)
label = cand.get('target_id') or cand.get('note_id') or "Unknown"
logger.info(f" 🧠 [SMART EDGE] {label} -> {'✅ OK' if is_valid else '❌ SKIP'}")
if is_valid: new_pool.append(cand)
else:
new_pool.append(cand)
ch.candidate_pool = new_pool
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks, file_path=file_path, types_cfg=self.registry)
vecs = await self.embedder.embed_documents([c.get("window") or "" for c in chunk_pls]) if chunk_pls else []
# Aggregation aller finalen Kanten
raw_edges = build_edges_for_note(
note_id, chunk_pls,
note_level_references=note_pl.get("references", []),
include_note_scope_refs=note_scope_refs
)
# --- WP-24c: Symmetrie-Injektion mit Authority-Schutz ---
final_edges = []
# PHASE 1: Explizite Kanten (Priorität)
for e in raw_edges:
t_id = self._resolve_target_id(e.get("target_id"))
if not t_id:
continue # Anti-Junk: Nur Kanten zu existierenden Notizen erlauben
resolved_kind = edge_registry.resolve(
e.get("kind", "related_to"),
provenance=e.get("provenance", "explicit"),
context={"file": file_path, "note_id": note_id}
)
e.update({
"kind": resolved_kind, "target_id": t_id,
"origin_note_id": note_id, "virtual": False, "confidence": 1.0
})
final_edges.append(e)
# PHASE 2: Symmetrische Kanten (Invers)
explicit_only = [x for x in final_edges if not x.get("virtual")]
for e in explicit_only:
kind = e["kind"]
inv_kind = edge_registry.get_inverse(kind)
t_id = e["target_id"]
if (inv_kind and t_id and t_id != note_id):
# ID der potenziellen virtuellen Kante berechnen
potential_id = _mk_edge_id(inv_kind, t_id, note_id, "note")
# AUTHORITY-CHECK: Wurde diese Relation irgendwo im Vault manuell gesetzt?
if potential_id not in self.vault_authority_ids:
# Zusätzlicher Check gegen bereits persistierte DB-Autorität
if not is_explicit_edge_present(self.client, self.prefix, potential_id):
inv_edge = e.copy()
inv_edge.update({
"note_id": t_id, "target_id": note_id, "kind": inv_kind,
"virtual": True, "provenance": "structure", "confidence": 1.0,
"origin_note_id": note_id
})
final_edges.append(inv_edge)
logger.info(f" 🔄 [SYMMETRY] Gegenkante: {t_id} --({inv_kind})--> {note_id}")
edges = final_edges
# 4. DB Upsert
if apply:
if purge_before and old_payload:
purge_artifacts(self.client, self.prefix, note_id)
# Speichern der Haupt-Note
n_name, n_pts = points_for_note(self.prefix, note_pl, None, self.dim)
upsert_batch(self.client, n_name, n_pts)
if chunk_pls and vecs:
c_pts = points_for_chunks(self.prefix, chunk_pls, vecs)[1]
upsert_batch(self.client, f"{self.prefix}_chunks", c_pts)
if edges:
e_pts = points_for_edges(self.prefix, edges)[1]
upsert_batch(self.client, f"{self.prefix}_edges", e_pts)
logger.info(f" ✨ Fertig: {len(chunk_pls)} Chunks, {len(edges)} Kanten.")
return {
"path": file_path, "status": "success", "changed": True, "note_id": note_id,
"chunks_count": len(chunk_pls), "edges_count": len(edges)
}
except Exception as e:
logger.error(f"❌ Fehler bei {file_path}: {e}", exc_info=True)
return {**result, "error": str(e)}
async def create_from_text(self, markdown_content: str, filename: str, vault_root: str, folder: str = "00_Inbox") -> Dict[str, Any]:
"""Erstellt eine Note aus einem Textstream."""
target_path = os.path.join(vault_root, folder, filename)
os.makedirs(os.path.dirname(target_path), exist_ok=True)
with open(target_path, "w", encoding="utf-8") as f:
f.write(markdown_content)
await asyncio.sleep(0.1)
return await self.process_file(file_path=target_path, vault_root=vault_root, apply=True, force_replace=True, purge_before=True)