mindnet/scripts/backfill_edges.py
Lars a9d4c2ec0e
Some checks failed
Deploy mindnet to llm-node / deploy (push) Failing after 2s
scripts/backfill_edges.py aktualisiert
2025-09-04 09:43:57 +02:00

84 lines
2.9 KiB
Python

# scripts/backfill_edges.py
#!/usr/bin/env python3
from __future__ import annotations
import argparse, glob, json, os
from typing import List, Tuple
from app.core.parser import read_markdown # liefert (frontmatter_dict, body_str)
from app.core.qdrant import QdrantConfig, get_client, ensure_collections
from app.core.qdrant_points import points_for_edges, upsert_batch
from app.core.derive_edges import build_note_index, derive_wikilink_edges
def make_note_stub(path: str, fm: dict, body: str) -> dict:
"""
Minimaler Note-Payload nur für die Link-Auflösung und Kanten:
- note_id (aus Frontmatter), title, path
- fulltext = body (für Links im Gesamtdokument)
"""
note_id = fm.get("id") or fm.get("note_id")
title = fm.get("title") or os.path.basename(path).rsplit(".", 1)[0]
return {
"note_id": note_id,
"title": title,
"path": path.replace("\\", "/"),
"fulltext": body,
}
def iter_notes(vault: str, excludes: List[str]) -> List[Tuple[dict, List[dict]]]:
"""
Liefert eine Liste von (note_stub, chunks_for_link_scan).
Für den Backfill reicht 1 Chunk (= gesamter Body), damit wir [[...]] finden.
"""
files = [p for p in glob.glob(os.path.join(vault, "**/*.md"), recursive=True)]
out: List[Tuple[dict, List[dict]]] = []
for path in files:
if any(ex in path for ex in excludes):
continue
try:
fm, body = read_markdown(path)
stub = make_note_stub(path=os.path.relpath(path, vault), fm=fm, body=body)
if not stub.get("note_id"):
# ohne stabile ID können wir keine Edges sinnvoll referenzieren
continue
chunk = {
"chunk_id": f"{stub['note_id']}#1",
"note_id": stub["note_id"],
"text": body,
}
out.append((stub, [chunk]))
except Exception as e:
print(f"skip {path}: {e}")
return out
def main():
ap = argparse.ArgumentParser()
ap.add_argument("--vault", required=True)
ap.add_argument("--exclude", nargs="*", default=["/.obsidian/", "/_backup_frontmatter/"])
args = ap.parse_args()
cfg = QdrantConfig.from_env()
client = get_client(cfg)
ensure_collections(client, cfg.prefix, cfg.dim)
# 1) Notizen sammeln (stubs) + 1-Chunk pro Note für den Scan
note_tuples = iter_notes(args.vault, args.exclude)
note_payloads = [n for n, _ in note_tuples]
# 2) Index für Zielauflösung
idx = build_note_index(note_payloads)
# 3) Edges ableiten
all_edges = []
for note_stub, chunks in note_tuples:
edges = derive_wikilink_edges(note_stub, chunks, idx)
all_edges.extend(edges)
# 4) Upsert
edges_col, edge_pts = points_for_edges(cfg.prefix, all_edges)
upsert_batch(client, edges_col, edge_pts)
print(json.dumps({"edges_upserted": len(edge_pts)}, ensure_ascii=False))
if __name__ == "__main__":
main()