""" app/core/ingestion.py Zentraler Service für die Transformation von Markdown-Dateien in Qdrant-Objekte (Notes, Chunks, Edges). Dient als Shared Logic für: 1. CLI-Imports (scripts/import_markdown.py) 2. API-Uploads (WP-11) """ import os import json from typing import Dict, List, Optional, Tuple, Any, Set from app.core.parser import ( read_markdown, normalize_frontmatter, validate_required_frontmatter, ) from app.core.note_payload import make_note_payload from app.core.chunker import assemble_chunks from app.core.chunk_payload import make_chunk_payloads # Fallback Imports wie im Original-Skript try: from app.core.derive_edges import build_edges_for_note except ImportError: from app.core.edges import build_edges_for_note # type: ignore from app.core.qdrant import QdrantConfig, get_client, ensure_collections, ensure_payload_indexes from app.core.qdrant_points import ( points_for_chunks, points_for_note, points_for_edges, upsert_batch, ) # Optionales Embedding try: from app.core.embed import embed_texts except ImportError: embed_texts = None # --- Helper für Type-Registry (ausgelagert aus Script) --- def load_type_registry(custom_path: Optional[str] = None) -> dict: import yaml path = custom_path or os.getenv("MINDNET_TYPES_FILE", "config/types.yaml") if not os.path.exists(path): # Fallback auf Root-Ebene (für Tests/CLI) if os.path.exists("types.yaml"): path = "types.yaml" else: return {} try: with open(path, "r", encoding="utf-8") as f: return yaml.safe_load(f) or {} except Exception: return {} def resolve_note_type(requested: Optional[str], reg: dict) -> str: types = reg.get("types", {}) if requested and requested in types: return requested return "concept" # Default Fallback def effective_chunk_profile(note_type: str, reg: dict) -> str: # 1. Specific Type t_cfg = reg.get("types", {}).get(note_type, {}) if t_cfg and t_cfg.get("chunk_profile"): return t_cfg.get("chunk_profile") # 2. Defaults return reg.get("defaults", {}).get("chunk_profile", "default") def effective_retriever_weight(note_type: str, reg: dict) -> float: t_cfg = reg.get("types", {}).get(note_type, {}) if t_cfg and "retriever_weight" in t_cfg: return float(t_cfg["retriever_weight"]) return float(reg.get("defaults", {}).get("retriever_weight", 1.0)) class IngestionService: def __init__(self, collection_prefix: str = "mindnet"): self.prefix = collection_prefix self.cfg = QdrantConfig.from_env() self.cfg.prefix = collection_prefix # Override env if needed self.client = get_client(self.cfg) self.dim = self.cfg.dim # Registry laden self.registry = load_type_registry() # Init DB Checks ensure_collections(self.client, self.prefix, self.dim) ensure_payload_indexes(self.client, self.prefix) def process_file( self, file_path: str, vault_root: str, force_replace: bool = False, apply: bool = False, purge_before: bool = False, note_scope_refs: bool = False, hash_mode: str = "body", hash_source: str = "parsed", hash_normalize: str = "canonical" ) -> Dict[str, Any]: """ Verarbeitet eine einzelne Datei. Return: Summary Dict (Erfolg, Änderungen, Stats). """ result = { "path": file_path, "status": "skipped", "changed": False, "error": None } # 1. Parse & Frontmatter try: parsed = read_markdown(file_path) if not parsed: return {**result, "error": "Empty or unreadable file"} fm = normalize_frontmatter(parsed.frontmatter) validate_required_frontmatter(fm) except Exception as e: return {**result, "error": f"Validation failed: {str(e)}"} # 2. Type & Config Resolution note_type = resolve_note_type(fm.get("type"), self.registry) fm["type"] = note_type fm["chunk_profile"] = effective_chunk_profile(note_type, self.registry) # Weight Resolution (Frontmatter override > Registry) weight = fm.get("retriever_weight") if weight is None: weight = effective_retriever_weight(note_type, self.registry) fm["retriever_weight"] = float(weight) # 3. Build Note Payload try: note_pl = make_note_payload( parsed, vault_root=vault_root, hash_mode=hash_mode, hash_normalize=hash_normalize, hash_source=hash_source, file_path=file_path ) # Ensure fulltext & weight if not note_pl.get("fulltext"): note_pl["fulltext"] = getattr(parsed, "body", "") or "" note_pl["retriever_weight"] = fm["retriever_weight"] note_id = note_pl["note_id"] except Exception as e: return {**result, "error": f"Payload build failed: {str(e)}"} # 4. Change Detection (Hash Check) # Wir holen den alten Payload aus Qdrant, wenn wir nicht forcen old_payload = None if not force_replace: old_payload = self._fetch_note_payload(note_id) has_old = old_payload is not None key_current = f"{hash_mode}:{hash_source}:{hash_normalize}" old_hash = (old_payload or {}).get("hashes", {}).get(key_current) new_hash = note_pl.get("hashes", {}).get(key_current) hash_changed = (old_hash != new_hash) # Artefakte prüfen (Chunks/Edges) chunks_missing, edges_missing = self._artifacts_missing(note_id) should_write = force_replace or (not has_old) or hash_changed or chunks_missing or edges_missing if not should_write: return {**result, "status": "unchanged", "note_id": note_id} if not apply: return {**result, "status": "dry-run", "changed": True, "note_id": note_id} # 5. Processing (Chunking, Embedding, Edges) try: body_text = getattr(parsed, "body", "") or "" chunks = assemble_chunks(fm["id"], body_text, fm["type"]) chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks, note_text=body_text) # Embeddings vecs = [] if embed_texts and chunk_pls: texts = [c.get("window") or c.get("text") or "" for c in chunk_pls] vecs = embed_texts(texts) else: vecs = [[0.0] * self.dim for _ in chunk_pls] # Edges note_refs = note_pl.get("references") or [] edges = build_edges_for_note( note_id, chunk_pls, note_level_references=note_refs, include_note_scope_refs=note_scope_refs ) except Exception as e: return {**result, "error": f"Processing failed: {str(e)}"} # 6. Upsert Action if purge_before and has_old: self._purge_artifacts(note_id) # Upsert Note n_name, n_pts = points_for_note(self.prefix, note_pl, None, self.dim) upsert_batch(self.client, n_name, n_pts) # Upsert Chunks if chunk_pls: c_name, c_pts = points_for_chunks(self.prefix, chunk_pls, vecs) upsert_batch(self.client, c_name, c_pts) # Upsert Edges if edges: e_name, e_pts = points_for_edges(self.prefix, edges) upsert_batch(self.client, e_name, e_pts) return { "path": file_path, "status": "success", "changed": True, "note_id": note_id, "chunks_count": len(chunk_pls), "edges_count": len(edges) } # --- Interne Qdrant Helper --- def _fetch_note_payload(self, note_id: str) -> Optional[dict]: from qdrant_client.http import models as rest col = f"{self.prefix}_notes" f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))]) pts, _ = self.client.scroll(collection_name=col, scroll_filter=f, limit=1, with_payload=True) return pts[0].payload if pts else None def _artifacts_missing(self, note_id: str) -> Tuple[bool, bool]: from qdrant_client.http import models as rest c_col = f"{self.prefix}_chunks" e_col = f"{self.prefix}_edges" f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))]) # Check Chunks c_pts, _ = self.client.scroll(collection_name=c_col, scroll_filter=f, limit=1) # Check Edges e_pts, _ = self.client.scroll(collection_name=e_col, scroll_filter=f, limit=1) return (not bool(c_pts)), (not bool(e_pts)) def _purge_artifacts(self, note_id: str): from qdrant_client.http import models as rest f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))]) selector = rest.FilterSelector(filter=f) for suffix in ["chunks", "edges"]: try: self.client.delete(collection_name=f"{self.prefix}_{suffix}", points_selector=selector) except Exception: pass def create_from_text( self, markdown_content: str, filename: str, vault_root: str, folder: str = "00_Inbox" ) -> Dict[str, Any]: """ WP-11 Persistence: Schreibt Text sicher und indiziert ihn. Erstellt Verzeichnisse automatisch. """ # 1. Zielordner vorbereiten target_dir = os.path.join(vault_root, folder) try: os.makedirs(target_dir, exist_ok=True) except Exception as e: return {"status": "error", "error": f"Could not create folder {target_dir}: {e}"} # 2. Dateiname bereinigen safe_filename = os.path.basename(filename) if not safe_filename.endswith(".md"): safe_filename += ".md" file_path = os.path.join(target_dir, safe_filename) # 3. Schreiben try: with open(file_path, "w", encoding="utf-8") as f: f.write(markdown_content) except Exception as e: return {"status": "error", "error": f"Disk write failed at {file_path}: {str(e)}"} # 4. Indizieren (Single File Upsert) return self.process_file( file_path=file_path, vault_root=vault_root, apply=True, force_replace=True, purge_before=True )