mindnet/scripts/import_markdown.py
Lars 51f08f193c
Some checks failed
Deploy mindnet to llm-node / deploy (push) Failing after 2s
scripts/import_markdown.py aktualisiert
2025-09-09 19:42:35 +02:00

315 lines
12 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script: scripts/import_markdown.py — Markdown → Qdrant (Notes, Chunks, Edges)
Version: 3.6.0
Datum: 2025-09-09
"""
from __future__ import annotations
import argparse
import difflib
import json
import os
import sys
from typing import Dict, List, Optional, Tuple, Any
from collections.abc import Mapping
from dotenv import load_dotenv
from qdrant_client.http import models as rest
from app.core.parser import (
read_markdown,
normalize_frontmatter,
validate_required_frontmatter,
)
from app.core.note_payload import make_note_payload
from app.core.chunker import assemble_chunks
from app.core.chunk_payload import make_chunk_payloads
from app.core.edges import build_edges_for_note
from app.core.qdrant import (
QdrantConfig,
get_client,
ensure_collections,
ensure_payload_indexes,
)
from app.core.qdrant_points import (
points_for_chunks,
points_for_note,
points_for_edges,
upsert_batch,
)
try:
from app.core.embed import embed_texts # optional
except Exception:
embed_texts = None
# ---------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------
def iter_md(root: str) -> List[str]:
out: List[str] = []
for dirpath, _, filenames in os.walk(root):
for fn in filenames:
if not fn.lower().endswith(".md"):
continue
p = os.path.join(dirpath, fn)
pn = p.replace("\\", "/")
if any(ex in pn for ex in ["/.obsidian/", "/_backup_frontmatter/", "/_imported/"]):
continue
out.append(p)
return sorted(out)
def collections(prefix: str) -> Tuple[str, str, str]:
return f"{prefix}_notes", f"{prefix}_chunks", f"{prefix}_edges"
def fetch_existing_note_payload(client, prefix: str, note_id: str) -> Optional[Dict]:
notes_col, _, _ = collections(prefix)
f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
points, _ = client.scroll(
collection_name=notes_col,
scroll_filter=f,
with_payload=True,
with_vectors=False,
limit=1,
)
if not points:
return None
return points[0].payload or {}
def purge_note_artifacts(client, prefix: str, note_id: str) -> None:
_, chunks_col, edges_col = collections(prefix)
f_chunks = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
client.delete(collection_name=chunks_col, points_selector=f_chunks, wait=True)
f_edges = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.ValueList(list=[]))) # type: ignore
# Hinweis: neuere Qdrant-Clients unterstützen Filter bei delete via 'points_selector'.
# Falls dein Client hier zickt, nutze eine Scroll+Delete-Implementierung. (Aus Platzgründen hier nicht expandiert.)
def _normalize_rel_path(abs_path: str, vault_root: str) -> str:
try:
rel = os.path.relpath(abs_path, vault_root)
except Exception:
rel = abs_path
return rel.replace("\\", "/").lstrip("/")
def _resolve_mode(val: Optional[str]) -> str:
v = (val or os.environ.get("MINDNET_HASH_MODE") or os.environ.get("MINDNET_HASH_COMPARE") or "body").strip().lower()
if v in ("full", "fulltext", "body+frontmatter", "bodyplusfrontmatter"):
return "full"
if v in ("frontmatter", "fm"):
return "frontmatter"
return "body"
def _env(key: str, default: str) -> str:
return os.environ.get(key, default).strip().lower()
# ---------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------
def main() -> None:
load_dotenv()
ap = argparse.ArgumentParser()
ap.add_argument("--vault", required=True, help="Pfad zum Obsidian-Vault (Root-Ordner)")
ap.add_argument("--apply", action="store_true", help="Schreibt in Qdrant; ohne Flag nur Dry-Run")
ap.add_argument("--purge-before-upsert", action="store_true",
help="Vor Upsert Chunks & Edges der GEÄNDERTEN Note löschen")
ap.add_argument("--note-id", help="Nur eine bestimmte Note-ID verarbeiten")
ap.add_argument("--embed-note", action="store_true", help="Optional: Note-Volltext einbetten")
ap.add_argument("--force-replace", action="store_true",
help="Änderungserkennung ignorieren und immer upserten (+ optional Purge)")
ap.add_argument("--hash-mode", choices=["body", "frontmatter", "full"], default=None,
help="Vergleichsmodus (Body | Frontmatter | Full)")
ap.add_argument("--hash-normalize", choices=["canonical", "none"], default=None)
ap.add_argument("--hash-source", choices=["parsed", "raw"], default=None,
help="Quelle für die Hash-Berechnung (Default: parsed)")
ap.add_argument("--note-scope-refs", action="store_true",
help="(Optional) erzeugt zusätzlich references:note (Default: aus)")
ap.add_argument("--debug-hash-diff", action="store_true",
help="Zeigt einen kurzen Diff zwischen altem und neuem Body")
ap.add_argument("--compare-text", action="store_true",
help="Parsed fulltext zusätzlich direkt vergleichen (über Hash hinaus)")
ap.add_argument("--baseline-modes", action="store_true",
help="Fehlende Hash-Varianten im Feld 'hashes' still nachtragen (Upsert NUR Notes)")
args = ap.parse_args()
mode = _resolve_mode(args.hash_mode)
src = _env("MINDNET_HASH_SOURCE", args.hash_source or "parsed")
norm = _env("MINDNET_HASH_NORMALIZE", args.hash_normalize or "canonical")
note_scope_refs_env = (_env("MINDNET_NOTE_SCOPE_REFS", "false") == "true")
note_scope_refs = args.note_scope_refs or note_scope_refs_env
compare_text = args.compare_text or (_env("MINDNET_COMPARE_TEXT", "false") == "true")
cfg = QdrantConfig.from_env()
client = get_client(cfg)
ensure_collections(client, cfg.prefix, cfg.dim)
ensure_payload_indexes(client, cfg.prefix)
root = os.path.abspath(args.vault)
files = iter_md(root)
if not files:
print("Keine Markdown-Dateien gefunden.", file=sys.stderr)
sys.exit(2)
key_current = f"{mode}:{src}:{norm}"
processed = 0
for path in files:
try:
parsed = read_markdown(path)
except Exception as e:
print(json.dumps({"path": path, "error": f"read_markdown failed: {e}"}))
continue
if parsed is None:
print(json.dumps({"path": path, "error": "read_markdown returned None"}))
continue
try:
fm = normalize_frontmatter(parsed.frontmatter)
validate_required_frontmatter(fm)
except Exception as e:
print(json.dumps({"path": path, "error": f"Frontmatter invalid: {e}"}))
continue
if args.note_id and fm.get("id") != args.note_id:
continue
processed += 1
# Payload neu berechnen (liefert 'hashes' inkl. parsed:canonical Tripel)
note_pl = make_note_payload(
parsed,
vault_root=root,
hash_mode=mode,
hash_normalize=norm,
hash_source=src,
file_path=path,
)
if not note_pl.get("fulltext"):
note_pl["fulltext"] = getattr(parsed, "body", "") or ""
note_id = note_pl.get("note_id") or fm.get("id")
if not note_id:
print(json.dumps({"path": path, "error": "Missing note_id after payload build"}))
continue
# Alt-Payload holen
old_payload = None if args.force_replace else fetch_existing_note_payload(client, cfg.prefix, note_id)
old_hashes = (old_payload or {}).get("hashes") or {}
old_hash_exact = old_hashes.get(key_current)
# Neu-Hash für den aktuellen Modus aus neuem Payload
new_hash_exact = (note_pl.get("hashes") or {}).get(key_current)
needs_baseline = (old_hash_exact is None)
# Change-Detection
hash_changed = (old_hash_exact is not None and new_hash_exact is not None and old_hash_exact != new_hash_exact)
text_changed = False
if compare_text:
old_text = (old_payload or {}).get("fulltext") or ""
new_text = note_pl.get("fulltext") or ""
text_changed = (old_text != new_text)
# Wichtig: Moduswechsel alleine zählt *nicht* als Änderung (Option C).
# changed => nur wenn baseline existiert und Hash anders, ODER wir force-replace/text_changed haben.
changed = args.force_replace or hash_changed or text_changed
# Soll Baseline „still“ ergänzt werden?
do_baseline_only = (args.baseline_modes and needs_baseline and not changed)
# Edges/Chunks vorbereiten (nur wenn wir potenziell wirklich schreiben)
chunks = []
chunk_pls = []
edges = []
vecs = []
if changed or args.apply:
try:
chunks = assemble_chunks(fm["id"], getattr(parsed, "body", "") or "", fm.get("type", "concept"))
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks)
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "error": f"chunk build failed: {e}"}))
continue
if embed_texts:
try:
vecs = embed_texts([getattr(c, "text", "") for c in chunks]) # type: ignore[attr-defined]
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "warn": f"embed_texts failed, using zeros: {e}"}))
vecs = [[0.0] * cfg.dim for _ in chunks]
else:
vecs = [[0.0] * cfg.dim for _ in chunks]
try:
note_refs = note_pl.get("references") or []
edges = build_edges_for_note(
note_id,
chunk_pls,
note_refs,
include_note_scope_refs=note_scope_refs,
)
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "error": f"build_edges_for_note failed: {e}"}))
continue
# Zusammenfassung
summary = {
"note_id": note_id,
"title": fm.get("title"),
"chunks": len(chunk_pls),
"edges": len(edges),
"changed": changed,
"needs_baseline_for_mode": needs_baseline,
"decision": ("baseline-only" if args.apply and do_baseline_only else
"apply" if args.apply and changed else
"apply-skip-unchanged" if args.apply and not changed else
"dry-run"),
"path": note_pl["path"],
"hash_mode": mode,
"hash_normalize": norm,
"hash_source": src,
}
print(json.dumps(summary, ensure_ascii=False))
# Schreiben?
if not args.apply:
continue
# BASELINE-ONLY: fehlenden Key nachtragen, ohne legacy Felder anzutasten
if do_baseline_only:
merged_hashes = {}
merged_hashes.update(old_hashes)
merged_hashes.update(note_pl.get("hashes") or {})
# Nur Notes upserten; legacy Hashfelder lassen wir wie im Altpayload
if old_payload:
note_pl["hash_fulltext"] = old_payload.get("hash_fulltext", note_pl.get("hash_fulltext"))
note_pl["hash_signature"] = old_payload.get("hash_signature", note_pl.get("hash_signature"))
note_pl["hashes"] = merged_hashes
notes_name, note_pts = points_for_note(cfg.prefix, note_pl, None, cfg.dim)
upsert_batch(client, notes_name, note_pts)
continue
# Normale Änderungen schreiben (Notes + Chunks + Edges)
if not changed:
continue
if args.purge_before_upsert:
try:
purge_note_artifacts(client, cfg.prefix, note_id)
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "warn": f"purge failed: {e}"}))
notes_name, note_pts = points_for_note(cfg.prefix, note_pl, None, cfg.dim)
upsert_batch(client, notes_name, note_pts)
chunks_name, chunk_pts = points_for_chunks(cfg.prefix, chunk_pls, vecs)
upsert_batch(client, chunks_name, chunk_pts)
edges_name, edge_pts = points_for_edges(cfg.prefix, edges)
upsert_batch(client, edges_name, edge_pts)
print(f"Done. Processed notes: {processed}")
if __name__ == "__main__":
main()