scripts/import_markdown.py aktualisiert
All checks were successful
Deploy mindnet to llm-node / deploy (push) Successful in 3s
All checks were successful
Deploy mindnet to llm-node / deploy (push) Successful in 3s
This commit is contained in:
parent
ebca9b4caf
commit
18cdbab885
|
|
@ -2,29 +2,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Script: scripts/import_markdown.py — Markdown → Qdrant (Notes, Chunks, Edges)
|
||||
Version: 3.7.1
|
||||
Version: 3.7.2
|
||||
Datum: 2025-09-30
|
||||
|
||||
Kurzbeschreibung
|
||||
----------------
|
||||
- Liest Markdown-Dateien ein und erzeugt Notes/Chunks/Edges **idempotent**.
|
||||
- Robuste Änderungserkennung (Option C): Mehrfach-Hashes werden parallel in der Note
|
||||
- Änderungserkennung „Option C“: mehrere Hash-Varianten werden parallel in der Note
|
||||
gespeichert (Feld `hashes` mit Schlüsseln `<mode>:<source>:<normalize>`). Der Vergleich
|
||||
erfolgt **modusgenau** anhand dieses Keys, sodass ein Wechsel des Vergleichsmodus **keine
|
||||
Massenänderungen** mehr triggert.
|
||||
- **Erstimport-Fix:** Bei leerem Qdrant gilt Create-Fall automatisch als geändert.
|
||||
- **--baseline-modes:** fehlende Hash-Varianten „still“ nachtragen (nur Notes upserten).
|
||||
- **--sync-deletes:** Punkte in Qdrant, die im Vault fehlen, sicher löschen (Dry-Run + Apply).
|
||||
- **--prefix**: CLI-Override für COLLECTION_PREFIX.
|
||||
nutzt NUR den aktuellen Modus-Key — ein Moduswechsel triggert keine Massenänderungen mehr.
|
||||
- „Erstimport-Fix“: Bei leerem Qdrant gilt Create-Fall automatisch als geändert.
|
||||
- `--baseline-modes`: fehlende Hash-Varianten still nachtragen (nur Notes upserten).
|
||||
- `--sync-deletes`: gezielte Lösch-Synchronisation (Dry-Run + Apply).
|
||||
- `--only-path`: exakt **eine** Datei (Pfad) importieren — nützlich für Diagnosefälle.
|
||||
|
||||
Neu in 3.7.1
|
||||
-------------
|
||||
- Chunk-Payloads enthalten nun
|
||||
* `window` (Fenster inkl. Overlap, für Embeddings)
|
||||
* `text` (überlappungsfreies Segment, für verlustfreie Rekonstruktion)
|
||||
* `start`, `end`, `overlap_left`, `overlap_right`
|
||||
- Embeddings werden aus `window` gebildet (mehr Kontext, bessere Retrieval-Qualität).
|
||||
- Die Offsets werden beim Build der Chunk-Payloads mitgegeben (`note_text`).
|
||||
Neu in 3.7.1/3.7.2
|
||||
------------------
|
||||
- Chunk-Payloads: `window` (für Embeddings), `text` (überlappungsfrei, verlustfrei rekonstruierbar),
|
||||
`start/end/overlap_*`. Embeddings nutzen `window`.
|
||||
- **3.7.2:** Edges-Fehler führen nicht mehr zum Abbruch der gesamten Note; Note/Chunks werden trotzdem geschrieben.
|
||||
|
||||
Hash/Compare Konfiguration
|
||||
--------------------------
|
||||
|
|
@ -53,20 +49,12 @@ Beispiele
|
|||
# Erstimport nach truncate (Create-Fall)
|
||||
python3 -m scripts.import_markdown --vault ./vault --apply --purge-before-upsert
|
||||
|
||||
# Baseline für parsed:canonical „still“ auffüllen
|
||||
MINDNET_HASH_SOURCE=parsed MINDNET_HASH_NORMALIZE=canonical \
|
||||
python3 -m scripts.import_markdown --vault ./vault --apply --baseline-modes
|
||||
|
||||
# Frontmatter-Vergleich ohne Massenänderungen (nach Baseline)
|
||||
MINDNET_HASH_COMPARE=Frontmatter \
|
||||
python3 -m scripts.import_markdown --vault ./vault
|
||||
# Nur eine Datei (Diagnose)
|
||||
python3 -m scripts.import_markdown --vault ./vault --only-path ./vault/30_projects/project-demo.md --apply
|
||||
|
||||
# Sync-Deletes (Dry-Run → Apply)
|
||||
python3 -m scripts.import_markdown --vault ./vault --sync-deletes
|
||||
python3 -m scripts.import_markdown --vault ./vault --sync-deletes --apply
|
||||
|
||||
# Prefix explizit setzen
|
||||
python3 -m scripts.import_markdown --vault ./vault --prefix mindnet
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
@ -87,12 +75,10 @@ from app.core.parser import (
|
|||
from app.core.note_payload import make_note_payload
|
||||
from app.core.chunker import assemble_chunks
|
||||
from app.core.chunk_payload import make_chunk_payloads
|
||||
# Kompatibel zu beiden Modulnamen:
|
||||
try:
|
||||
from app.core.derive_edges import build_edges_for_note
|
||||
except Exception: # pragma: no cover
|
||||
from app.core.edges import build_edges_for_note # type: ignore
|
||||
|
||||
from app.core.qdrant import (
|
||||
QdrantConfig,
|
||||
get_client,
|
||||
|
|
@ -150,7 +136,6 @@ def fetch_existing_note_payload(client, prefix: str, note_id: str) -> Optional[D
|
|||
|
||||
|
||||
def list_qdrant_note_ids(client, prefix: str) -> Set[str]:
|
||||
"""Liest alle note_ids aus der Notes-Collection (per Scroll)."""
|
||||
notes_col, _, _ = collections(prefix)
|
||||
out: Set[str] = set()
|
||||
next_page = None
|
||||
|
|
@ -175,36 +160,22 @@ def list_qdrant_note_ids(client, prefix: str) -> Set[str]:
|
|||
|
||||
|
||||
def purge_note_artifacts(client, prefix: str, note_id: str) -> None:
|
||||
"""
|
||||
Löscht alle Chunks & Edges zu einer Note mittels Filter-Selector (kompatibel mit aktuellen Qdrant-Clients).
|
||||
"""
|
||||
_, chunks_col, edges_col = collections(prefix)
|
||||
filt = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
|
||||
|
||||
try:
|
||||
client.delete(
|
||||
collection_name=chunks_col,
|
||||
points_selector=rest.FilterSelector(filter=filt),
|
||||
wait=True
|
||||
)
|
||||
except Exception as e:
|
||||
print(json.dumps({"note_id": note_id, "warn": f"delete chunks via filter failed: {e}"}))
|
||||
|
||||
try:
|
||||
client.delete(
|
||||
collection_name=edges_col,
|
||||
points_selector=rest.FilterSelector(filter=filt),
|
||||
wait=True
|
||||
)
|
||||
except Exception as e:
|
||||
print(json.dumps({"note_id": note_id, "warn": f"delete edges via filter failed: {e}"}))
|
||||
for col in (chunks_col, edges_col):
|
||||
try:
|
||||
client.delete(
|
||||
collection_name=col,
|
||||
points_selector=rest.FilterSelector(filter=filt),
|
||||
wait=True
|
||||
)
|
||||
except Exception as e:
|
||||
print(json.dumps({"note_id": note_id, "warn": f"delete in {col} via filter failed: {e}"}))
|
||||
|
||||
|
||||
def delete_note_everywhere(client, prefix: str, note_id: str) -> None:
|
||||
"""Löscht Note + zugehörige Chunks + Edges per Filter."""
|
||||
notes_col, chunks_col, edges_col = collections(prefix)
|
||||
filt = rest.Filter(must=[rest.FieldCondition(key="note_id", match=rest.MatchValue(value=note_id))])
|
||||
# Reihenfolge: erst edges/chunks, dann note
|
||||
for col in (edges_col, chunks_col, notes_col):
|
||||
try:
|
||||
client.delete(
|
||||
|
|
@ -241,6 +212,7 @@ def main() -> None:
|
|||
ap.add_argument("--purge-before-upsert", action="store_true",
|
||||
help="Vor Upsert Chunks & Edges der GEÄNDERTEN Note löschen")
|
||||
ap.add_argument("--note-id", help="Nur eine bestimmte Note-ID verarbeiten")
|
||||
ap.add_argument("--only-path", help="Exakt diesen Markdown-Pfad verarbeiten (ignoriert --note-id)")
|
||||
ap.add_argument("--embed-note", action="store_true", help="Optional: Note-Volltext einbetten")
|
||||
ap.add_argument("--force-replace", action="store_true",
|
||||
help="Änderungserkennung ignorieren und immer upserten (+ optional Purge)")
|
||||
|
|
@ -250,9 +222,9 @@ def main() -> None:
|
|||
ap.add_argument("--hash-source", choices=["parsed", "raw"], default=None,
|
||||
help="Quelle für die Hash-Berechnung (Default: parsed)")
|
||||
ap.add_argument("--note-scope-refs", action="store_true",
|
||||
help="(Optional) erzeugt zusätzlich references:note (Default: aus)")
|
||||
help="(Optional) erzeugt zusätzlich references:note/backlink:note (Default: aus)")
|
||||
ap.add_argument("--debug-hash-diff", action="store_true",
|
||||
help="(reserviert) optionaler Body-Diff (nicht nötig bei Option C)")
|
||||
help="(reserviert) optionaler Body-Diff")
|
||||
ap.add_argument("--compare-text", action="store_true",
|
||||
help="Parsed fulltext zusätzlich direkt vergleichen (über Hash hinaus)")
|
||||
ap.add_argument("--baseline-modes", action="store_true",
|
||||
|
|
@ -266,7 +238,7 @@ def main() -> None:
|
|||
src = _env("MINDNET_HASH_SOURCE", args.hash_source or "parsed") # parsed|raw
|
||||
norm = _env("MINDNET_HASH_NORMALIZE", args.hash_normalize or "canonical") # canonical|none
|
||||
note_scope_refs_env = (_env("MINDNET_NOTE_SCOPE_REFS", "false") == "true")
|
||||
note_scope_refs = args.note_scoop_refs if hasattr(args, "note_scoop_refs") else (args.note_scope_refs or note_scope_refs_env) # typo-safe
|
||||
note_scope_refs = args.note_scope_refs or note_scope_refs_env
|
||||
compare_text = args.compare_text or (_env("MINDNET_COMPARE_TEXT", "false") == "true")
|
||||
|
||||
cfg = QdrantConfig.from_env()
|
||||
|
|
@ -277,14 +249,19 @@ def main() -> None:
|
|||
ensure_payload_indexes(client, cfg.prefix)
|
||||
|
||||
root = os.path.abspath(args.vault)
|
||||
files = iter_md(root)
|
||||
|
||||
# Dateiliste bestimmen
|
||||
if args.only_path:
|
||||
only = os.path.abspath(args.only_path)
|
||||
files = [only]
|
||||
else:
|
||||
files = iter_md(root)
|
||||
if not files:
|
||||
print("Keine Markdown-Dateien gefunden.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
# Optional: Sync-Deletes vorab
|
||||
if args.sync_deletes:
|
||||
# Vault-Note-IDs sammeln
|
||||
vault_note_ids: Set[str] = set()
|
||||
for path in files:
|
||||
try:
|
||||
|
|
@ -297,7 +274,6 @@ def main() -> None:
|
|||
vault_note_ids.add(nid)
|
||||
except Exception:
|
||||
continue
|
||||
# Qdrant-Note-IDs sammeln
|
||||
qdrant_note_ids = list_qdrant_note_ids(client, cfg.prefix)
|
||||
to_delete = sorted(qdrant_note_ids - vault_note_ids)
|
||||
print(json.dumps({
|
||||
|
|
@ -312,7 +288,6 @@ def main() -> None:
|
|||
for nid in to_delete:
|
||||
print(json.dumps({"action": "delete", "note_id": nid, "decision": "apply"}))
|
||||
delete_note_everywhere(client, cfg.prefix, nid)
|
||||
# Danach normal mit Import fortfahren
|
||||
|
||||
key_current = f"{mode}:{src}:{norm}"
|
||||
|
||||
|
|
@ -322,7 +297,7 @@ def main() -> None:
|
|||
try:
|
||||
parsed = read_markdown(path)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "error": f"read_markdown failed: {e}"}))
|
||||
print(json.dumps({"path": path, "error": f"read_markdown failed: {type(e).__name__}: {e}"}))
|
||||
continue
|
||||
if parsed is None:
|
||||
print(json.dumps({"path": path, "error": "read_markdown returned None"}))
|
||||
|
|
@ -332,10 +307,10 @@ def main() -> None:
|
|||
fm = normalize_frontmatter(parsed.frontmatter)
|
||||
validate_required_frontmatter(fm)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "error": f"Frontmatter invalid: {e}"}))
|
||||
print(json.dumps({"path": path, "error": f"Frontmatter invalid: {type(e).__name__}: {e}"}))
|
||||
continue
|
||||
|
||||
if args.note_id and fm.get("id") != args.note_id:
|
||||
if args.note_id and not args.only_path and fm.get("id") != args.note_id:
|
||||
continue
|
||||
|
||||
processed += 1
|
||||
|
|
@ -363,16 +338,9 @@ def main() -> None:
|
|||
|
||||
old_hashes = (old_payload or {}).get("hashes") or {}
|
||||
old_hash_exact = old_hashes.get(key_current)
|
||||
|
||||
# Neu-Hash (aktueller Modus) aus neuem Payload
|
||||
new_hash_exact = (note_pl.get("hashes") or {}).get(key_current)
|
||||
|
||||
needs_baseline = (old_hash_exact is None)
|
||||
|
||||
# Change-Detection:
|
||||
# - CREATE: wenn es KEIN Alt-Payload gibt -> changed=True
|
||||
# - UPDATE: baseline existiert UND Hash differiert
|
||||
# - force/text_changed wie gehabt
|
||||
hash_changed = (old_hash_exact is not None and new_hash_exact is not None and old_hash_exact != new_hash_exact)
|
||||
|
||||
text_changed = False
|
||||
|
|
@ -382,49 +350,43 @@ def main() -> None:
|
|||
text_changed = (old_text != new_text)
|
||||
|
||||
changed = args.force_replace or (not has_old) or hash_changed or text_changed
|
||||
|
||||
# Baseline-only nur, wenn Alt-Payload existiert UND Key fehlt UND keine sonstige Änderung
|
||||
do_baseline_only = (args.baseline_modes and has_old and needs_baseline and not changed)
|
||||
|
||||
# -------- Optional: Chunks / Embeddings / Edges vorbereiten --------
|
||||
# -------- Chunks / Embeddings --------
|
||||
chunk_pls: List[Dict[str, Any]] = []
|
||||
edges: List[Dict[str, Any]] = []
|
||||
vecs: List[List[float]] = []
|
||||
try:
|
||||
body_text = getattr(parsed, "body", "") or ""
|
||||
chunks = assemble_chunks(fm["id"], body_text, fm.get("type", "concept"))
|
||||
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks, note_text=body_text)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "note_id": note_id, "error": f"chunk build failed: {type(e).__name__}: {e}"}))
|
||||
continue
|
||||
|
||||
if changed and (not do_baseline_only):
|
||||
vecs: List[List[float]] = [[0.0] * cfg.dim for _ in chunk_pls]
|
||||
if embed_texts and chunk_pls:
|
||||
try:
|
||||
# Chunks assemblen
|
||||
body_text = getattr(parsed, "body", "") or ""
|
||||
chunks = assemble_chunks(fm["id"], body_text, fm.get("type", "concept"))
|
||||
# Chunk-Payloads inkl. Offsets/Overlap (neu in 3.7.1)
|
||||
chunk_pls = make_chunk_payloads(fm, note_pl["path"], chunks, note_text=body_text)
|
||||
texts_for_embed = [(pl.get("window") or pl.get("text") or "") for pl in chunk_pls]
|
||||
vecs = embed_texts(texts_for_embed)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "note_id": note_id, "error": f"chunk build failed: {e}"}))
|
||||
continue
|
||||
print(json.dumps({"path": path, "note_id": note_id, "warn": f"embed_texts failed, using zeros: {e}"}))
|
||||
|
||||
# Embeddings: aus window (Fallback text)
|
||||
if embed_texts and chunk_pls:
|
||||
try:
|
||||
texts_for_embed = [(pl.get("window") or pl.get("text") or "") for pl in chunk_pls]
|
||||
vecs = embed_texts(texts_for_embed)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "note_id": note_id, "warn": f"embed_texts failed, using zeros: {e}"}))
|
||||
vecs = [[0.0] * cfg.dim for _ in chunk_pls]
|
||||
else:
|
||||
vecs = [[0.0] * cfg.dim for _ in chunk_pls]
|
||||
|
||||
# Edges
|
||||
# -------- Edges (robust) --------
|
||||
edges: List[Dict[str, Any]] = []
|
||||
edges_failed = False
|
||||
if changed and (not do_baseline_only):
|
||||
try:
|
||||
note_refs = note_pl.get("references") or []
|
||||
edges = build_edges_for_note(
|
||||
note_id,
|
||||
chunk_pls,
|
||||
note_refs,
|
||||
note_level_references=note_refs,
|
||||
include_note_scope_refs=note_scope_refs,
|
||||
)
|
||||
except Exception as e:
|
||||
print(json.dumps({"path": path, "note_id": note_id, "error": f"build_edges_for_note failed: {e}"}))
|
||||
continue
|
||||
edges_failed = True
|
||||
edges = []
|
||||
# WICHTIG: Wir brechen NICHT mehr ab — Note & Chunks werden geschrieben.
|
||||
print(json.dumps({"path": path, "note_id": note_id, "warn": f"build_edges_for_note failed, skipping edges: {type(e).__name__}: {e}"}))
|
||||
|
||||
# -------- Summary --------
|
||||
summary = {
|
||||
|
|
@ -432,6 +394,7 @@ def main() -> None:
|
|||
"title": fm.get("title"),
|
||||
"chunks": len(chunk_pls),
|
||||
"edges": len(edges),
|
||||
"edges_failed": edges_failed,
|
||||
"changed": changed,
|
||||
"needs_baseline_for_mode": needs_baseline,
|
||||
"decision": ("baseline-only" if args.apply and do_baseline_only else
|
||||
|
|
@ -450,12 +413,10 @@ def main() -> None:
|
|||
if not args.apply:
|
||||
continue
|
||||
|
||||
# BASELINE-ONLY: fehlenden Key nachtragen, ohne legacy Felder anzutasten
|
||||
if do_baseline_only:
|
||||
merged_hashes = {}
|
||||
merged_hashes.update(old_hashes)
|
||||
merged_hashes.update(note_pl.get("hashes") or {})
|
||||
# Legacy-Hashfelder unverändert lassen, falls vorhanden
|
||||
if old_payload:
|
||||
note_pl["hash_fulltext"] = old_payload.get("hash_fulltext", note_pl.get("hash_fulltext"))
|
||||
note_pl["hash_signature"] = old_payload.get("hash_signature", note_pl.get("hash_signature"))
|
||||
|
|
@ -464,7 +425,6 @@ def main() -> None:
|
|||
upsert_batch(client, notes_name, note_pts)
|
||||
continue
|
||||
|
||||
# Normale CREATE/UPDATE
|
||||
if not changed:
|
||||
continue
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user