mindnet/scripts/import_markdown.py
Lars da70f0e00c
All checks were successful
Deploy mindnet to llm-node / deploy (push) Successful in 3s
Dateien nach "scripts" hochladen
2025-11-08 18:13:07 +01:00

507 lines
20 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script: scripts/import_markdown.py - Markdown -> Qdrant (Notes, Chunks, Edges)
Version: 3.7.3
Date: 2025-11-08
Summary
-------
- Reads Markdown files and creates Notes/Chunks/Edges idempotently.
- Change detection "Option C": multiple hash variants are stored in note payload
(field `hashes` with keys `<mode>:<source>:<normalize>`). Comparison uses ONLY
the current-mode key. Switching modes no longer triggers bulk "changed".
- "First import" fix: if Qdrant is empty for the note, we treat it as changed.
- `--baseline-modes`: silently add any missing hash variants (notes only).
- `--sync-deletes`: selective deletes (Dry-Run + Apply).
- `--only-path`: process exactly one file (useful for diagnostics).
- NEW in 3.7.3: Type registry (if present) is loaded and the derived
`retriever_weight` is written to both note and chunk payload. If the registry
defines a `chunk_profile`, it is injected into frontmatter for payloads
(does NOT change the chunking behavior here).
Hash/Compare configuration
--------------------------
- Compare mode:
--hash-mode body|frontmatter|full
or ENV: MINDNET_HASH_MODE | MINDNET_HASH_COMPARE
- Source:
--hash-source parsed|raw (ENV: MINDNET_HASH_SOURCE, Default parsed)
- Normalization:
--hash-normalize canonical|none (ENV: MINDNET_HASH_NORMALIZE, Default canonical)
- Optional: --compare-text (or ENV MINDNET_COMPARE_TEXT=true) compares
parsed body text, in addition to the hash.
Qdrant / ENV
------------
- QDRANT_URL | QDRANT_HOST/QDRANT_PORT | QDRANT_API_KEY
- COLLECTION_PREFIX (Default: mindnet), can be overridden with --prefix
- VECTOR_DIM (Default: 384)
- MINDNET_NOTE_SCOPE_REFS: true|false (Default: false)
- MINDNET_TYPES_PATH: optional path to config/types.yaml
Examples
--------
# Standard (Body, parsed, canonical)
python3 -m scripts.import_markdown --vault ./vault
# First import after truncate (create case)
python3 -m scripts.import_markdown --vault ./vault --apply --purge-before-upsert
# Single file (diagnostics)
python3 -m scripts.import_markdown --vault ./vault --only-path ./vault/30_projects/project-demo.md --apply
# Sync-Deletes (Dry-Run -> Apply)
python3 -m scripts.import_markdown --vault ./vault --sync-deletes
python3 -m scripts.import_markdown --vault ./vault --sync-deletes --apply
"""
from __future__ import annotations
import argparse
import json
import os
import sys
from typing import Dict, List, Optional, Tuple, Any, Set
from dotenv import load_dotenv
from qdrant_client.http import models as rest
from app.core.parser import (
read_markdown,
normalize_frontmatter,
validate_required_frontmatter,
)
from app.core.note_payload import make_note_payload
from app.core.chunker import assemble_chunks
from app.core.chunk_payload import make_chunk_payloads
try:
from app.core.derive_edges import build_edges_for_note
except Exception: # pragma: no cover
from app.core.edges import build_edges_for_note # type: ignore
from app.core.qdrant import (
QdrantConfig,
get_client,
ensure_collections,
ensure_payload_indexes,
)
from app.core.qdrant_points import (
points_for_chunks,
points_for_note,
points_for_edges,
upsert_batch,
)
# ---- Type-Registry (optional) ------------------------------------------------
try:
# Expected API
from app.core.type_registry import load_type_registry # type: ignore
except Exception: # pragma: no cover
def load_type_registry(path: str) -> dict:
"""Fallback loader if module is absent. Returns empty dict."""
return {}
# ---------------------------------------------------------------------
# Helper
# ---------------------------------------------------------------------
def iter_md(root: str) -> List[str]:
out: List[str] = []
for dirpath, _, filenames in os.walk(root):
for fn in filenames:
if not fn.lower().endswith(".md"):
continue
p = os.path.join(dirpath, fn)
pn = p.replace("\\", "/")
if any(ex in pn for ex in ["/.obsidian/", "/_backup_frontmatter/", "/_imported/"]):
continue
out.append(p)
return sorted(out)
def collections(prefix: str) -> Tuple[str, str, str]:
return f"{prefix}_notes", f"{prefix}_chunks", f"{prefix}_edges"
def fetch_existing_note_payload(client, prefix: str, note_id: str) -> Optional[Dict]:
notes_col, _, _ = collections(prefix)
f = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
points, _ = client.scroll(
collection_name=notes_col,
scroll_filter=f,
with_payload=True,
with_vectors=False,
limit=1,
)
if not points:
return None
return points[0].payload or {}
def list_qdrant_note_ids(client, prefix: str) -> Set[str]:
notes_col, _, _ = collections(prefix)
out: Set[str] = set()
next_page = None
while True:
pts, next_page = client.scroll(
collection_name=notes_col,
with_payload=True,
with_vectors=False,
limit=256,
offset=next_page,
)
if not pts:
break
for p in pts:
pl = p.payload or {}
nid = pl.get("note_id")
if isinstance(nid, str):
out.add(nid)
if next_page is None:
break
return out
def purge_note_artifacts(client, prefix: str, note_id: str) -> None:
_, chunks_col, edges_col = collections(prefix)
filt = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
for col in (chunks_col, edges_col):
try:
client.delete(
collection_name=col,
points_selector=rest.FilterSelector(filter=filt),
wait=True
)
except Exception as e:
print(json.dumps({"note_id": note_id, "warn": f"delete in {col} via filter failed: {e}"}))
def delete_note_everywhere(client, prefix: str, note_id: str) -> None:
notes_col, chunks_col, edges_col = collections(prefix)
filt = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
for col in (edges_col, chunks_col, notes_col):
try:
client.delete(
collection_name=col,
points_selector=rest.FilterSelector(filter=filt),
wait=True
)
except Exception as e:
print(json.dumps({"note_id": note_id, "warn": f"delete in {col} failed: {e}"}))
def _resolve_mode(val: Optional[str]) -> str:
v = (val or os.environ.get("MINDNET_HASH_MODE") or os.environ.get("MINDNET_HASH_COMPARE") or "body").strip().lower()
if v in ("full", "fulltext", "body+frontmatter", "bodyplusfrontmatter"):
return "full"
if v in ("frontmatter", "fm"):
return "frontmatter"
return "body"
def _env(key: str, default: str) -> str:
return (os.environ.get(key) or default).strip().lower()
# --- Type-Registry helpers (pure) --------------------------------------------
def _effective_chunk_profile(note_type: str, registry: dict) -> Optional[str]:
try:
types = (registry or {}).get("types", {}) if isinstance(registry, dict) else {}
cfg = types.get(note_type) or types.get("concept") or {}
prof = cfg.get("chunk_profile")
if isinstance(prof, str) and prof in {"short", "medium", "long"}:
return prof
except Exception:
pass
return None
def _effective_retriever_weight(note_type: str, registry: dict) -> Optional[float]:
try:
types = (registry or {}).get("types", {}) if isinstance(registry, dict) else {}
cfg = types.get(note_type) or types.get("concept") or {}
w = cfg.get("retriever_weight")
if w is None:
return None
return float(w)
except Exception:
# be tolerant
return None
# ---------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------
def main() -> None:
load_dotenv()
ap = argparse.ArgumentParser()
ap.add_argument("--vault", required=True, help="Path to the Obsidian vault (root folder)")
ap.add_argument("--apply", action="store_true", help="Write to Qdrant; otherwise Dry-Run")
ap.add_argument("--purge-before-upsert", action="store_true",
help="Before upsert, delete Chunks & Edges of the CHANGED note")
ap.add_argument("--note-id", help="Process only a specific note-id")
ap.add_argument("--only-path", help="Process exactly this Markdown path (ignores --note-id)")
ap.add_argument("--embed-note", action="store_true", help="Optionally embed the full note text")
ap.add_argument("--force-replace", action="store_true",
help="Ignore change detection and always upsert (+ optional purge)")
ap.add_argument("--hash-mode", choices=["body", "frontmatter", "full"], default=None,
help="Compare mode (Body | Frontmatter | Full)")
ap.add_argument("--hash-normalize", choices=["canonical", "none"], default=None)
ap.add_argument("--hash-source", choices=["parsed", "raw"], default=None,
help="Source for hash calculation (Default: parsed)")
ap.add_argument("--note-scope-refs", action="store_true",
help="(Optional) also create references:note/backlink:note (Default: off)")
ap.add_argument("--debug-hash-diff", action="store_true",
help="(reserved) optional body diff")
ap.add_argument("--compare-text", action="store_true",
help="Additionally compare parsed fulltext (beyond the hash)")
ap.add_argument("--baseline-modes", action="store_true",
help="Add missing hash variants in 'hashes' silently (Upsert notes only)")
ap.add_argument("--sync-deletes", action="store_true",
help="Delete notes/chunks/edges that exist in Qdrant but are missing in the vault (Dry-Run; use --apply to execute)")
ap.add_argument("--prefix", help="Collection prefix (overrides ENV COLLECTION_PREFIX)")
args = ap.parse_args()
mode = _resolve_mode(args.hash_mode) # body|frontmatter|full
src = _env("MINDNET_HASH_SOURCE", args.hash_source or "parsed") # parsed|raw
norm = _env("MINDNET_HASH_NORMALIZE", args.hash_normalize or "canonical") # canonical|none
note_scope_refs_env = (_env("MINDNET_NOTE_SCOPE_REFS", "false") == "true")
note_scope_refs = args.note_scope_refs or note_scope_refs_env
compare_text = args.compare_text or (_env("MINDNET_COMPARE_TEXT", "false") == "true")
# Prepare Qdrant client
cfg = QdrantConfig.from_env()
if args.prefix:
cfg.prefix = args.prefix.strip()
client = get_client(cfg)
ensure_collections(client, cfg.prefix, cfg.dim)
ensure_payload_indexes(client, cfg.prefix)
# Load type registry (optional)
types_path = os.environ.get("MINDNET_TYPES_PATH") or os.path.join(os.getcwd(), "config", "types.yaml")
try:
type_registry = load_type_registry(types_path) or {}
except Exception as e: # tolerant
print(json.dumps({"warn": f"type-registry load failed ({types_path}): {type(e).__name__}: {e}"}))
type_registry = {}
root = os.path.abspath(args.vault)
# Build file list
if args.only_path:
only = os.path.abspath(args.only_path)
files = [only]
else:
files = iter_md(root)
if not files:
print("No Markdown files found.", file=sys.stderr)
sys.exit(2)
# Optional: Sync-Deletes upfront
if args.sync_deletes:
vault_note_ids: Set[str] = set()
for path in files:
try:
parsed = read_markdown(path)
if not parsed:
continue
fm = normalize_frontmatter(parsed.frontmatter)
nid = fm.get("id")
if isinstance(nid, str):
vault_note_ids.add(nid)
except Exception:
continue
qdrant_note_ids = list_qdrant_note_ids(client, cfg.prefix)
to_delete = sorted(qdrant_note_ids - vault_note_ids)
print(json.dumps({
"action": "sync-deletes",
"prefix": cfg.prefix,
"qdrant_total": len(qdrant_note_ids),
"vault_total": len(vault_note_ids),
"to_delete_count": len(to_delete),
"to_delete": to_delete[:50] + ([""] if len(to_delete) > 50 else [])
}, ensure_ascii=False))
if args.apply and to_delete:
for nid in to_delete:
print(json.dumps({"action": "delete", "note_id": nid, "decision": "apply"}))
delete_note_everywhere(client, cfg.prefix, nid)
key_current = f"{mode}:{src}:{norm}"
processed = 0
for path in files:
# -------- Parse & Validate --------
try:
parsed = read_markdown(path)
except Exception as e:
print(json.dumps({"path": path, "error": f"read_markdown failed: {type(e).__name__}: {e}"}))
continue
if parsed is None:
print(json.dumps({"path": path, "error": "read_markdown returned None"}))
continue
try:
fm = normalize_frontmatter(parsed.frontmatter)
validate_required_frontmatter(fm)
except Exception as e:
print(json.dumps({"path": path, "error": f"Frontmatter invalid: {type(e).__name__}: {e}"}))
continue
if args.note_id and not args.only_path and fm.get("id") != args.note_id:
continue
processed += 1
# -------- Type-Registry derivation (tolerant) --------
note_type = (fm.get("type") or "concept").strip().lower()
prof = _effective_chunk_profile(note_type, type_registry)
if prof and not fm.get("chunk_profile"):
fm["chunk_profile"] = prof
weight = _effective_retriever_weight(note_type, type_registry)
if weight is not None:
fm["retriever_weight"] = weight
# -------- Build new payload (includes 'hashes') --------
note_pl = make_note_payload(
parsed,
vault_root=root,
hash_mode=mode,
hash_normalize=norm,
hash_source=src,
file_path=path,
)
if not note_pl.get("fulltext"):
note_pl["fulltext"] = getattr(parsed, "body", "") or ""
if weight is not None:
note_pl["retriever_weight"] = weight
note_id = note_pl.get("note_id") or fm.get("id")
if not note_id:
print(json.dumps({"path": path, "error": "Missing note_id after payload build"}))
continue
# -------- Fetch old payload --------
old_payload = None if args.force_replace else fetch_existing_note_payload(client, cfg.prefix, note_id)
has_old = old_payload is not None
old_hashes = (old_payload or {}).get("hashes") or {}
key_current = f"{mode}:{src}:{norm}"
old_hash_exact = old_hashes.get(key_current)
new_hash_exact = (note_pl.get("hashes") or {}).get(key_current)
needs_baseline = (old_hash_exact is None)
hash_changed = (old_hash_exact is not None and new_hash_exact is not None and old_hash_exact != new_hash_exact)
text_changed = False
if compare_text:
old_text = (old_payload or {}).get("fulltext") or ""
new_text = note_pl.get("fulltext") or ""
text_changed = (old_text != new_text)
changed = args.force_replace or (not has_old) or hash_changed or text_changed
do_baseline_only = (args.baseline_modes and has_old and needs_baseline and not changed)
# -------- Chunks / Embeddings --------
chunk_pls: List[Dict[str, Any]] = []
try:
body_text = getattr(parsed, "body", "") or ""
chunks = assemble_chunks(fm["id"], body_text, note_type)
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks, note_text=body_text)
if weight is not None:
for pl in chunk_pls:
if pl.get("retriever_weight") is None:
pl["retriever_weight"] = weight
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "error": f"chunk build failed: {type(e).__name__}: {e}"}))
continue
vecs: List[List[float]] = [[0.0] * cfg.dim for _ in chunk_pls]
if embed_texts and chunk_pls:
try:
texts_for_embed = [(pl.get("window") or pl.get("text") or "") for pl in chunk_pls]
vecs = embed_texts(texts_for_embed)
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "warn": f"embed_texts failed, using zeros: {e}"}))
# -------- Edges (robust) --------
edges: List[Dict[str, Any]] = []
edges_failed = False
if changed and (not do_baseline_only):
try:
note_refs = note_pl.get("references") or []
edges = build_edges_for_note(
note_id,
chunk_pls,
note_level_references=note_refs,
include_note_scope_refs=note_scope_refs,
)
except Exception as e:
edges_failed = True
edges = []
print(json.dumps({"path": path, "note_id": note_id, "warn": f"build_edges_for_note failed, skipping edges: {type(e).__name__}: {e}"}))
# -------- Summary --------
summary = {
"note_id": note_id,
"title": fm.get("title"),
"type": note_type,
"chunk_profile": fm.get("chunk_profile"),
"retriever_weight": weight,
"chunks": len(chunk_pls),
"edges": len(edges),
"edges_failed": edges_failed,
"changed": changed,
"needs_baseline_for_mode": needs_baseline,
"decision": ("baseline-only" if args.apply and do_baseline_only else
"apply" if args.apply and changed else
"apply-skip-unchanged" if args.apply and not changed else
"dry-run"),
"path": note_pl["path"],
"hash_mode": mode,
"hash_normalize": norm,
"hash_source": src,
"prefix": cfg.prefix,
}
print(json.dumps(summary, ensure_ascii=False))
# -------- Writes --------
if not args.apply:
continue
if do_baseline_only:
merged_hashes = {}
merged_hashes.update(old_hashes)
merged_hashes.update(note_pl.get("hashes") or {})
if old_payload:
note_pl["hash_fulltext"] = old_payload.get("hash_fulltext", note_pl.get("hash_fulltext"))
note_pl["hash_signature"] = old_payload.get("hash_signature", note_pl.get("hash_signature"))
note_pl["hashes"] = merged_hashes
notes_name, note_pts = points_for_note(cfg.prefix, note_pl, None, cfg.dim)
upsert_batch(client, notes_name, note_pts)
continue
if not changed:
continue
if args.purge_before_upsert and has_old:
try:
purge_note_artifacts(client, cfg.prefix, note_id)
except Exception as e:
print(json.dumps({"path": path, "note_id": note_id, "warn": f"purge failed: {e}"}))
notes_name, note_pts = points_for_note(cfg.prefix, note_pl, None, cfg.dim)
upsert_batch(client, notes_name, note_pts)
if chunk_pls:
chunks_name, chunk_pts = points_for_chunks(cfg.prefix, chunk_pls, vecs)
upsert_batch(client, chunks_name, chunk_pts)
if edges:
edges_name, edge_pts = points_for_edges(cfg.prefix, edges)
upsert_batch(client, edges_name, edge_pts)
print(f"Done. Processed notes: {processed}")
if __name__ == "__main__":
main()