diff --git a/scripts/import_markdown.py b/scripts/import_markdown.py index 6fa2a01..546b4e9 100644 --- a/scripts/import_markdown.py +++ b/scripts/import_markdown.py @@ -2,8 +2,48 @@ # -*- coding: utf-8 -*- """ Script: scripts/import_markdown.py — Markdown → Qdrant (Notes, Chunks, Edges) -Version: 3.6.0 +Version: 3.6.1 Datum: 2025-09-09 + +Kurzbeschreibung +---------------- +- Liest Markdown-Dateien ein und erzeugt Notes/Chunks/Edges **idempotent**. +- Robuste Änderungserkennung: Mehrfach-Hashes werden parallel in der Note gespeichert + (Option C). Vergleich erfolgt **modusgenau** anhand von `hashes[::]`. + Ein Wechsel des Vergleichsmodus führt so **nicht** zu Massenänderungen. +- Baseline-Modus: Mit `--baseline-modes` werden **fehlende** Hash-Varianten + im Feld `hashes` „still“ nachgetragen (Upsert NUR Notes; Legacy-Hashfelder bleiben unangetastet). + +Konfiguration Hash/Compare +-------------------------- +- Vergleichsmodus: `--hash-mode body|frontmatter|full` + - oder ENV: `MINDNET_HASH_MODE` / `MINDNET_HASH_COMPARE` (Body|Frontmatter|Full) +- Quelle: `--hash-source parsed|raw` (ENV: `MINDNET_HASH_SOURCE`, Default parsed) +- Normalisierung: `--hash-normalize canonical|none` (ENV: `MINDNET_HASH_NORMALIZE`, Default canonical) +- Optional: `--compare-text` (oder `MINDNET_COMPARE_TEXT=true`) vergleicht zusätzlich den parsed Body-Text direkt. + +Weitere ENV / Qdrant +-------------------- +- QDRANT_URL | QDRANT_HOST/QDRANT_PORT | QDRANT_API_KEY +- COLLECTION_PREFIX (Default: mindnet) +- VECTOR_DIM (Default: 384) +- MINDNET_NOTE_SCOPE_REFS: true|false (Default: false) + +Aufruf-Beispiele +---------------- + # Standard (Body, parsed, canonical) + python3 -m scripts.import_markdown --vault ./vault + + # Baseline (füllt hashes für parsed:canonical Tripel „still“ auf) + MINDNET_HASH_SOURCE=parsed MINDNET_HASH_NORMALIZE=canonical \ + python3 -m scripts.import_markdown --vault ./vault --apply --baseline-modes + + # Frontmatter-Vergleich ohne Massenänderungen (nach Baseline) + MINDNET_HASH_COMPARE=Frontmatter \ + python3 -m scripts.import_markdown --vault ./vault + + # Sehr sensibel (raw + none) und direkten Textvergleich + python3 -m scripts.import_markdown --vault ./vault --apply --hash-source raw --hash-normalize none --compare-text """ from __future__ import annotations @@ -45,6 +85,7 @@ try: except Exception: embed_texts = None + # --------------------------------------------------------------------- # Helpers # --------------------------------------------------------------------- @@ -80,12 +121,29 @@ def fetch_existing_note_payload(client, prefix: str, note_id: str) -> Optional[D return points[0].payload or {} def purge_note_artifacts(client, prefix: str, note_id: str) -> None: + """ + Löscht alle Chunks & Edges zu einer Note mittels Filter-Selector (kompatibel mit aktuellen Qdrant-Clients). + """ _, chunks_col, edges_col = collections(prefix) - f_chunks = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))]) - client.delete(collection_name=chunks_col, points_selector=f_chunks, wait=True) - f_edges = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.ValueList(list=[]))) # type: ignore - # Hinweis: neuere Qdrant-Clients unterstützen Filter bei delete via 'points_selector'. - # Falls dein Client hier zickt, nutze eine Scroll+Delete-Implementierung. (Aus Platzgründen hier nicht expandiert.) + filt = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))]) + + try: + client.delete( + collection_name=chunks_col, + points_selector=rest.FilterSelector(filter=filt), + wait=True + ) + except Exception as e: + print(json.dumps({"note_id": note_id, "warn": f"delete chunks via filter failed: {e}"})) + + try: + client.delete( + collection_name=edges_col, + points_selector=rest.FilterSelector(filter=filt), + wait=True + ) + except Exception as e: + print(json.dumps({"note_id": note_id, "warn": f"delete edges via filter failed: {e}"})) def _normalize_rel_path(abs_path: str, vault_root: str) -> str: try: @@ -103,7 +161,8 @@ def _resolve_mode(val: Optional[str]) -> str: return "body" def _env(key: str, default: str) -> str: - return os.environ.get(key, default).strip().lower() + return (os.environ.get(key) or default).strip().lower() + # --------------------------------------------------------------------- # Main @@ -135,9 +194,9 @@ def main() -> None: help="Fehlende Hash-Varianten im Feld 'hashes' still nachtragen (Upsert NUR Notes)") args = ap.parse_args() - mode = _resolve_mode(args.hash_mode) - src = _env("MINDNET_HASH_SOURCE", args.hash_source or "parsed") - norm = _env("MINDNET_HASH_NORMALIZE", args.hash_normalize or "canonical") + mode = _resolve_mode(args.hash_mode) # body|frontmatter|full + src = _env("MINDNET_HASH_SOURCE", args.hash_source or "parsed") # parsed|raw + norm = _env("MINDNET_HASH_NORMALIZE", args.hash_normalize or "canonical") # canonical|none note_scope_refs_env = (_env("MINDNET_NOTE_SCOPE_REFS", "false") == "true") note_scope_refs = args.note_scope_refs or note_scope_refs_env compare_text = args.compare_text or (_env("MINDNET_COMPARE_TEXT", "false") == "true") @@ -157,6 +216,7 @@ def main() -> None: processed = 0 for path in files: + # -------- Parse & Validate -------- try: parsed = read_markdown(path) except Exception as e: @@ -178,7 +238,7 @@ def main() -> None: processed += 1 - # Payload neu berechnen (liefert 'hashes' inkl. parsed:canonical Tripel) + # -------- Build new payload (includes 'hashes') -------- note_pl = make_note_payload( parsed, vault_root=root, @@ -195,37 +255,38 @@ def main() -> None: print(json.dumps({"path": path, "error": "Missing note_id after payload build"})) continue - # Alt-Payload holen + # -------- Fetch old payload -------- old_payload = None if args.force_replace else fetch_existing_note_payload(client, cfg.prefix, note_id) old_hashes = (old_payload or {}).get("hashes") or {} old_hash_exact = old_hashes.get(key_current) - # Neu-Hash für den aktuellen Modus aus neuem Payload + # Neu-Hash (aktueller Modus) aus neuem Payload new_hash_exact = (note_pl.get("hashes") or {}).get(key_current) needs_baseline = (old_hash_exact is None) - # Change-Detection + # Change-Detection: nur wenn Baseline existiert und Hash differiert, + # oder wenn force/text_changed explizit Änderungen anzeigt. hash_changed = (old_hash_exact is not None and new_hash_exact is not None and old_hash_exact != new_hash_exact) + text_changed = False if compare_text: old_text = (old_payload or {}).get("fulltext") or "" new_text = note_pl.get("fulltext") or "" text_changed = (old_text != new_text) - # Wichtig: Moduswechsel alleine zählt *nicht* als Änderung (Option C). - # changed => nur wenn baseline existiert und Hash anders, ODER wir force-replace/text_changed haben. changed = args.force_replace or hash_changed or text_changed # Soll Baseline „still“ ergänzt werden? do_baseline_only = (args.baseline_modes and needs_baseline and not changed) - # Edges/Chunks vorbereiten (nur wenn wir potenziell wirklich schreiben) + # -------- Optional: Chunks / Embeddings / Edges vorbereiten -------- + # Nur notwendig, wenn wir tatsächlich Änderungen schreiben (nicht für baseline-only). chunks = [] chunk_pls = [] edges = [] vecs = [] - if changed or args.apply: + if (changed and args.apply): try: chunks = assemble_chunks(fm["id"], getattr(parsed, "body", "") or "", fm.get("type", "concept")) chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks) @@ -254,7 +315,7 @@ def main() -> None: print(json.dumps({"path": path, "note_id": note_id, "error": f"build_edges_for_note failed: {e}"})) continue - # Zusammenfassung + # -------- Summary -------- summary = { "note_id": note_id, "title": fm.get("title"), @@ -273,7 +334,7 @@ def main() -> None: } print(json.dumps(summary, ensure_ascii=False)) - # Schreiben? + # -------- Writes -------- if not args.apply: continue @@ -282,7 +343,7 @@ def main() -> None: merged_hashes = {} merged_hashes.update(old_hashes) merged_hashes.update(note_pl.get("hashes") or {}) - # Nur Notes upserten; legacy Hashfelder lassen wir wie im Altpayload + # Legacy-Hashfelder unverändert lassen, falls vorhanden if old_payload: note_pl["hash_fulltext"] = old_payload.get("hash_fulltext", note_pl.get("hash_fulltext")) note_pl["hash_signature"] = old_payload.get("hash_signature", note_pl.get("hash_signature")) @@ -310,5 +371,6 @@ def main() -> None: print(f"Done. Processed notes: {processed}") + if __name__ == "__main__": main()