402 lines
16 KiB
Python
402 lines
16 KiB
Python
#!/usr/bin/env python3
|
|
# -*- coding: utf-8 -*-
|
|
"""
|
|
Script: scripts/import_markdown.py — Markdown → Qdrant (Notes, Chunks, Edges)
|
|
Version: 3.5.2
|
|
Datum: 2025-09-09
|
|
|
|
Kurzbeschreibung
|
|
----------------
|
|
- Liest Markdown-Dateien ein, erzeugt Notes/Chunks/Edges **idempotent**.
|
|
- **Änderungserkennung** (nur Inhalte, keine FS-Zeitstempel) konfigurierbar:
|
|
* ``--hash-mode``: body | frontmatter | body+frontmatter | full (Alias)
|
|
- Env: ``MINDNET_HASH_MODE`` **oder** ``MINDNET_HASH_COMPARE`` (Body/Frontmatter/Full)
|
|
* ``--hash-normalize``: canonical | none (Default: canonical)
|
|
* ``--hash-source``: parsed (Default) | raw
|
|
- "raw" hasht den **ungeparsten** Body (Frontmatter via Regex entfernt).
|
|
* Optional: ``--compare-text`` (oder ENV ``MINDNET_COMPARE_TEXT=true``)
|
|
- parsed ``fulltext`` zusätzlich direkt vergleichen (falls Normalisierung Unterschiede glättet).
|
|
* Signaturabgleich:
|
|
- Wenn sich die Signatur (z. B. body→full, parsed→raw, canonical→none) zwischen Alt/Neu unterscheidet,
|
|
gilt die Note als **geändert** (Einmal-Update, um die neue Signatur zu persistieren).
|
|
|
|
Robustheit
|
|
----------
|
|
- Rückgaben aus ``make_note_payload`` werden **koerziert** (Tuple, Mapping, Pydantic v1/v2, Objekt) → ``dict``.
|
|
- Bei Nicht-Erfolg präzise Debug-Ausgabe (Typname + kurzer Preview).
|
|
- Defensive Fehlerbehandlung in allen Schritten (Parsing, Chunks, Edges, Upserts).
|
|
|
|
ENV / Qdrant
|
|
------------
|
|
- QDRANT_URL | QDRANT_HOST/QDRANT_PORT | QDRANT_API_KEY
|
|
- COLLECTION_PREFIX (Default: mindnet)
|
|
- VECTOR_DIM (Default: 384)
|
|
- MINDNET_NOTE_SCOPE_REFS: true|false (Default: false)
|
|
- MINDNET_COMPARE_TEXT: true|false (Default: false)
|
|
|
|
Aufruf-Beispiele
|
|
----------------
|
|
# Standard (nur Body, kanonisiert, parsed-Quelle)
|
|
python3 -m scripts.import_markdown --vault ./vault
|
|
|
|
# Sensibel (jede Kleinigkeit): raw-Quelle + keine Normalisierung
|
|
python3 -m scripts.import_markdown --vault ./vault --apply --hash-source raw --hash-normalize none
|
|
|
|
# Full-Vergleich (Body+Frontmatter)
|
|
MINDNET_HASH_COMPARE=Full python3 -m scripts.import_markdown --vault ./vault --apply
|
|
|
|
# Zusätzlich Body-Text direkt vergleichen (max. Sicherheit)
|
|
python3 -m scripts.import_markdown --vault ./vault --apply --compare-text
|
|
"""
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import difflib
|
|
import json
|
|
import os
|
|
import sys
|
|
from typing import Dict, List, Optional, Tuple, Any
|
|
from collections.abc import Mapping
|
|
|
|
from dotenv import load_dotenv
|
|
from qdrant_client.http import models as rest
|
|
|
|
from app.core.parser import (
|
|
read_markdown,
|
|
normalize_frontmatter,
|
|
validate_required_frontmatter,
|
|
)
|
|
from app.core.note_payload import make_note_payload
|
|
from app.core.chunker import assemble_chunks
|
|
from app.core.chunk_payload import make_chunk_payloads
|
|
from app.core.edges import build_edges_for_note
|
|
from app.core.qdrant import (
|
|
QdrantConfig,
|
|
get_client,
|
|
ensure_collections,
|
|
ensure_payload_indexes,
|
|
)
|
|
from app.core.qdrant_points import (
|
|
points_for_chunks,
|
|
points_for_note,
|
|
points_for_edges,
|
|
upsert_batch,
|
|
)
|
|
|
|
try:
|
|
from app.core.embed import embed_texts # optional
|
|
except Exception:
|
|
embed_texts = None
|
|
|
|
|
|
# ---------------------------------------------------------------------
|
|
# Helpers
|
|
# ---------------------------------------------------------------------
|
|
|
|
def iter_md(root: str) -> List[str]:
|
|
out: List[str] = []
|
|
for dirpath, _, filenames in os.walk(root):
|
|
for fn in filenames:
|
|
if not fn.lower().endswith(".md"):
|
|
continue
|
|
p = os.path.join(dirpath, fn)
|
|
pn = p.replace("\\", "/")
|
|
if any(ex in pn for ex in ["/.obsidian/", "/_backup_frontmatter/", "/_imported/"]):
|
|
continue
|
|
out.append(p)
|
|
return sorted(out)
|
|
|
|
def collections(prefix: str) -> Tuple[str, str, str]:
|
|
return f"{prefix}_notes", f"{prefix}_chunks", f"{prefix}_edges"
|
|
|
|
def fetch_existing_note_payload(client, prefix: str, note_id: str) -> Optional[Dict]:
|
|
notes_col, _, _ = collections(prefix)
|
|
f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
|
|
points, _ = client.scroll(
|
|
collection_name=notes_col,
|
|
scroll_filter=f,
|
|
with_payload=True,
|
|
with_vectors=False,
|
|
limit=1,
|
|
)
|
|
if not points:
|
|
return None
|
|
return points[0].payload or {}
|
|
|
|
def purge_note_artifacts(client, prefix: str, note_id: str) -> None:
|
|
_, chunks_col, edges_col = collections(prefix)
|
|
f_chunks = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
|
|
client.delete(collection_name=chunks_col, points_selector=f_chunks, wait=True)
|
|
f_edges = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
|
|
client.delete(collection_name=edges_col, points_selector=f_edges, wait=True)
|
|
|
|
def _normalize_rel_path(abs_path: str, vault_root: str) -> str:
|
|
try:
|
|
rel = os.path.relpath(abs_path, vault_root)
|
|
except Exception:
|
|
rel = abs_path
|
|
return rel.replace("\\", "/").lstrip("/")
|
|
|
|
def _coerce_to_dict(obj: Any) -> Optional[Dict[str, Any]]:
|
|
"""
|
|
Versucht, verschiedenartige Rückgaben (Mapping, Tuple, Pydantic, Objekt) in ein dict zu konvertieren.
|
|
- dict → dict
|
|
- Mapping → dict(obj)
|
|
- (dict, ...) oder [dict, ...] → erster dict-ähnlicher Eintrag
|
|
- Pydantic v2: .model_dump()
|
|
- Pydantic v1: .dict()
|
|
- Objekt mit __dict__ → dict(__dict__)
|
|
- sonst: None
|
|
"""
|
|
if obj is None:
|
|
return None
|
|
if isinstance(obj, dict):
|
|
return obj
|
|
if isinstance(obj, Mapping):
|
|
try:
|
|
return dict(obj)
|
|
except Exception:
|
|
pass
|
|
if isinstance(obj, (list, tuple)):
|
|
for it in obj:
|
|
d = _coerce_to_dict(it)
|
|
if isinstance(d, dict):
|
|
return d
|
|
return None
|
|
# Pydantic v2
|
|
md = getattr(obj, "model_dump", None)
|
|
if callable(md):
|
|
try:
|
|
return md()
|
|
except Exception:
|
|
pass
|
|
# Pydantic v1
|
|
dd = getattr(obj, "dict", None)
|
|
if callable(dd):
|
|
try:
|
|
return dd()
|
|
except Exception:
|
|
pass
|
|
# generischer Fallback
|
|
dct = getattr(obj, "__dict__", None)
|
|
if isinstance(dct, dict):
|
|
return dict(dct)
|
|
return None
|
|
|
|
|
|
# ---------------------------------------------------------------------
|
|
# Main
|
|
# ---------------------------------------------------------------------
|
|
|
|
def main() -> None:
|
|
load_dotenv()
|
|
ap = argparse.ArgumentParser()
|
|
ap.add_argument("--vault", required=True, help="Pfad zum Obsidian-Vault (Root-Ordner)")
|
|
ap.add_argument("--apply", action="store_true", help="Schreibt in Qdrant; ohne Flag nur Dry-Run")
|
|
ap.add_argument("--purge-before-upsert", action="store_true",
|
|
help="Vor Upsert Chunks & Edges der GEÄNDERTEN Note löschen")
|
|
ap.add_argument("--note-id", help="Nur eine bestimmte Note-ID verarbeiten")
|
|
ap.add_argument("--embed-note", action="store_true", help="Optional: Note-Volltext einbetten")
|
|
ap.add_argument("--force-replace", action="store_true",
|
|
help="Änderungserkennung ignorieren und immer upserten (+ optional Purge)")
|
|
ap.add_argument("--hash-mode", choices=["body", "frontmatter", "body+frontmatter", "full"], default=None,
|
|
help="Vergleichsmodus: Body | Frontmatter | body+frontmatter (Alias: full)")
|
|
ap.add_argument("--hash-normalize", choices=["canonical", "none"], default=None)
|
|
ap.add_argument("--hash-source", choices=["parsed", "raw"], default=None,
|
|
help="Quelle für die Hash-Berechnung (Default: parsed)")
|
|
ap.add_argument("--note-scope-refs", action="store_true",
|
|
help="(Optional) erzeugt zusätzlich references:note (Default: aus)")
|
|
ap.add_argument("--debug-hash-diff", action="store_true",
|
|
help="Zeigt bei Bedarf einen kurzen Diff zwischen altem und neuem Body")
|
|
ap.add_argument("--compare-text", action="store_true",
|
|
help="Parsed fulltext zusätzlich direkt vergleichen (über Hash hinaus)")
|
|
args = ap.parse_args()
|
|
|
|
note_scope_refs_env = (os.environ.get("MINDNET_NOTE_SCOPE_REFS", "false").strip().lower() == "true")
|
|
note_scope_refs = args.note_scope_refs or note_scope_refs_env
|
|
|
|
compare_text = args.compare_text or (os.environ.get("MINDNET_COMPARE_TEXT", "false").strip().lower() == "true")
|
|
|
|
cfg = QdrantConfig.from_env()
|
|
client = get_client(cfg)
|
|
ensure_collections(client, cfg.prefix, cfg.dim)
|
|
ensure_payload_indexes(client, cfg.prefix)
|
|
|
|
root = os.path.abspath(args.vault)
|
|
files = iter_md(root)
|
|
if not files:
|
|
print("Keine Markdown-Dateien gefunden.", file=sys.stderr)
|
|
sys.exit(2)
|
|
|
|
processed = 0
|
|
for path in files:
|
|
# ----------------- robustes Parsing -----------------
|
|
try:
|
|
parsed = read_markdown(path)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "error": f"read_markdown failed: {e}"}))
|
|
continue
|
|
if parsed is None:
|
|
print(json.dumps({"path": path, "error": "read_markdown returned None"}))
|
|
continue
|
|
|
|
try:
|
|
fm = normalize_frontmatter(parsed.frontmatter)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "error": f"normalize_frontmatter failed: {e}"}))
|
|
continue
|
|
|
|
try:
|
|
validate_required_frontmatter(fm)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "error": f"Frontmatter invalid: {e}"}))
|
|
continue
|
|
|
|
if args.note_id and fm.get("id") != args.note_id:
|
|
continue
|
|
|
|
processed += 1
|
|
|
|
# -------------- Note-Payload (defensiv + Koerzierung) --------------
|
|
try:
|
|
note_pl_raw = make_note_payload(
|
|
parsed,
|
|
vault_root=root,
|
|
hash_mode=args.hash_mode,
|
|
hash_normalize=args.hash_normalize,
|
|
hash_source=args.hash_source,
|
|
file_path=path,
|
|
)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": fm.get("id"), "error": f"make_note_payload failed: {e}"}))
|
|
continue
|
|
|
|
note_pl = _coerce_to_dict(note_pl_raw)
|
|
if not isinstance(note_pl, dict):
|
|
preview = repr(note_pl_raw)
|
|
if len(preview) > 240:
|
|
preview = preview[:240] + "…"
|
|
print(json.dumps({
|
|
"path": path,
|
|
"note_id": fm.get("id"),
|
|
"error": "make_note_payload returned non-dict",
|
|
"returned_type": type(note_pl_raw).__name__,
|
|
"preview": preview
|
|
}))
|
|
continue
|
|
|
|
# fulltext sicherstellen + Pfad normalisieren
|
|
if not note_pl.get("fulltext"):
|
|
note_pl["fulltext"] = getattr(parsed, "body", "") or ""
|
|
if note_pl.get("path"):
|
|
note_pl["path"] = _normalize_rel_path(
|
|
os.path.join(root, note_pl["path"]) if not os.path.isabs(note_pl["path"]) else note_pl["path"], root
|
|
)
|
|
else:
|
|
p_path = getattr(parsed, "path", None) or path
|
|
note_pl["path"] = _normalize_rel_path(p_path, root)
|
|
|
|
note_id = note_pl.get("note_id") or fm.get("id")
|
|
if not note_id:
|
|
print(json.dumps({"path": path, "error": "Missing note_id after payload build"}))
|
|
continue
|
|
|
|
# -------------- Change-Detection --------------
|
|
old_payload = None if args.force_replace else fetch_existing_note_payload(client, cfg.prefix, note_id)
|
|
old_hash = None if not old_payload else old_payload.get("hash_fulltext")
|
|
old_sig = (old_payload or {}).get("hash_signature")
|
|
new_hash = note_pl.get("hash_fulltext")
|
|
new_sig = note_pl.get("hash_signature")
|
|
|
|
sig_changed = bool(old_sig) and bool(new_sig) and (old_sig.split(":")[:3] != new_sig.split(":")[:3])
|
|
hash_changed = (old_hash != new_hash)
|
|
|
|
text_changed = False
|
|
if compare_text:
|
|
old_text = (old_payload or {}).get("fulltext") or ""
|
|
new_text = note_pl.get("fulltext") or ""
|
|
text_changed = (old_text != new_text)
|
|
|
|
changed = args.force_replace or sig_changed or hash_changed or text_changed
|
|
|
|
# -------------- Chunks / Embeddings / Edges --------------
|
|
try:
|
|
chunks = assemble_chunks(fm["id"], getattr(parsed, "body", "") or "", fm.get("type", "concept"))
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "error": f"assemble_chunks failed: {e}"}))
|
|
continue
|
|
|
|
try:
|
|
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "error": f"make_chunk_payloads failed: {e}"}))
|
|
continue
|
|
|
|
if embed_texts:
|
|
try:
|
|
vecs = embed_texts([getattr(c, "text", "") for c in chunks]) # type: ignore[attr-defined]
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "warn": f"embed_texts failed, using zeros: {e}"}))
|
|
vecs = [[0.0] * cfg.dim for _ in chunks]
|
|
else:
|
|
vecs = [[0.0] * cfg.dim for _ in chunks]
|
|
|
|
try:
|
|
note_refs = note_pl.get("references") or []
|
|
edges = build_edges_for_note(
|
|
note_id,
|
|
chunk_pls,
|
|
note_refs,
|
|
include_note_scope_refs=note_scope_refs,
|
|
)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "error": f"build_edges_for_note failed: {e}"}))
|
|
continue
|
|
|
|
# -------------- Zusammenfassung / Ausgabe --------------
|
|
summary = {
|
|
"note_id": note_id,
|
|
"title": fm.get("title"),
|
|
"chunks": len(chunk_pls),
|
|
"edges": len(edges),
|
|
"changed": changed,
|
|
"decision": ("apply" if args.apply and changed else
|
|
"apply-skip-unchanged" if args.apply and not changed else
|
|
"dry-run"),
|
|
"path": note_pl["path"],
|
|
"hash_mode": args.hash_mode or os.environ.get("MINDNET_HASH_MODE") or os.environ.get("MINDNET_HASH_COMPARE", "body"),
|
|
"hash_normalize": args.hash_normalize or os.environ.get("MINDNET_HASH_NORMALIZE", "canonical"),
|
|
"hash_source": args.hash_source or os.environ.get("MINDNET_HASH_SOURCE", "parsed"),
|
|
"hash_signature": note_pl.get("hash_signature"),
|
|
"sig_changed": sig_changed,
|
|
"hash_changed": hash_changed,
|
|
"text_changed": text_changed,
|
|
}
|
|
print(json.dumps(summary, ensure_ascii=False))
|
|
|
|
# -------------- Upserts --------------
|
|
if not args.apply:
|
|
continue
|
|
|
|
try:
|
|
if changed and args.purge_before_upsert:
|
|
purge_note_artifacts(client, cfg.prefix, note_id)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "warn": f"purge failed: {e}"}))
|
|
|
|
try:
|
|
notes_name, note_pts = points_for_note(cfg.prefix, note_pl, None, cfg.dim)
|
|
upsert_batch(client, notes_name, note_pts)
|
|
chunks_name, chunk_pts = points_for_chunks(cfg.prefix, chunk_pls, vecs)
|
|
upsert_batch(client, chunks_name, chunk_pts)
|
|
edges_name, edge_pts = points_for_edges(cfg.prefix, edges)
|
|
upsert_batch(client, edges_name, edge_pts)
|
|
except Exception as e:
|
|
print(json.dumps({"path": path, "note_id": note_id, "error": f"upsert failed: {e}"}))
|
|
|
|
print(f"Done. Processed notes: {processed}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|