app/core/chunk_payload.py aktualisiert
Some checks failed
Deploy mindnet to llm-node / deploy (push) Failing after 2s
Some checks failed
Deploy mindnet to llm-node / deploy (push) Failing after 2s
This commit is contained in:
parent
79e19c85eb
commit
23952aa3c5
|
|
@ -2,157 +2,251 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Modul: app/core/chunk_payload.py
|
||||
Version: 1.1.3
|
||||
Version: 2.0.0
|
||||
Datum: 2025-09-09
|
||||
|
||||
Kurzbeschreibung
|
||||
----------------
|
||||
Erzeugt Qdrant-Payloads für Text-Chunks einer Note. Jeder Chunk enthält
|
||||
den tatsächlichen Text unter dem Schlüssel ``text``. Dadurch kann der
|
||||
Exporter den vollständigen Body verlässlich aus Chunks rekonstruieren,
|
||||
falls ``notes.payload.fulltext`` fehlt.
|
||||
Erzeugt **Chunk-Payloads** für die Qdrant-Collection `<prefix>_chunks` auf Basis der
|
||||
vom Chunker gelieferten `Chunk`-Objekte. Ziel:
|
||||
- *Verlustfreie Rekonstruktion*: Jeder Chunk enthält seinen **Text** (payload["text"]).
|
||||
- *Schnelle Abfragen*: Wichtige Note-Metadaten werden gespiegelt, um Filter ohne Join zu erlauben.
|
||||
- *Graph-Kompatibilität*: Wikilinks und externe Links werden extrahiert; Nachbarschaften werden übernommen.
|
||||
- *Monitoring*: Token- und Längenmetriken sowie Text-Hash erleichtern Audits und Re-Embeddings.
|
||||
|
||||
Wesentliche Features
|
||||
--------------------
|
||||
- Stabile, idempotente Payload-Struktur für Chunks
|
||||
- Persistenter Chunk-Text (``text``)
|
||||
- Extraktion von Wikilinks pro Chunk (``wikilinks`` & ``references``)
|
||||
- Pfadübernahme (relativ zum Vault, wird vom Aufrufer geliefert)
|
||||
- Bereinigung leerer Felder (keine ``None``/leere Collections im Payload)
|
||||
|
||||
Abhängigkeiten
|
||||
Kompatibilität
|
||||
--------------
|
||||
- ``app.core.chunker.Chunk`` und ``assemble_chunks`` (nur für CLI-Demo)
|
||||
- ``app.core.parser.extract_wikilinks`` und ``read_markdown`` (nur CLI-Demo)
|
||||
- **Abwärtskompatibel** zur bisherigen `make_chunk_payloads`-Signatur.
|
||||
- Zusätzliche Felder stören bestehende Upserts nicht (Payload ist schema-flexibel).
|
||||
- Erwartet, dass `Chunk` u. a. die Attribute `id`, `index`, `text`, `char_start`, `char_end`,
|
||||
`section_title`, `section_path`, `neighbors_prev`, `neighbors_next` liefert.
|
||||
|
||||
Beispiele (CLI – Sichtprüfung)
|
||||
------------------------------
|
||||
python3 -m app.core.chunk_payload --from-file ./vault/demo.md --print
|
||||
CLI (Schnelltest)
|
||||
-----------------
|
||||
# Preview aus einer Markdown-Datei
|
||||
python3 -m app.core.chunk_payload --from-file ./test_vault/20_experiences/exp-two.md --vault-root ./test_vault
|
||||
|
||||
# Nur IDs & Tokenmengen
|
||||
python3 -m app.core.chunk_payload --from-file ./test_vault/20_experiences/exp-two.md --vault-root ./test_vault --summary
|
||||
|
||||
Felder (Auszug)
|
||||
---------------
|
||||
id : "<note_id>#cNN"
|
||||
scope : "chunk"
|
||||
note_id : "<note_id>"
|
||||
note_title : str
|
||||
note_type : str
|
||||
note_status : str
|
||||
area, project : optional
|
||||
tags : list[str]
|
||||
note_path : str (relativ, Slashes normalisiert)
|
||||
chunk_index : int
|
||||
section_title : str | None
|
||||
section_path : str | None
|
||||
char_start : int | None
|
||||
char_end : int | None
|
||||
char_len : int
|
||||
token_est : int (≈ len(text)/4)
|
||||
neighbors : {"prev": str|None, "next": str|None}
|
||||
text : str (Chunk-Text, **Pflicht**)
|
||||
text_sha256 : str "sha256:<hex>"
|
||||
lang : optional
|
||||
wikilinks : list[str]
|
||||
external_links : list[{"href": str, "label": str|None}]
|
||||
references : list[{"target_id": str, "kind": "wikilink"}]
|
||||
embed_model : optional (durchreichbar)
|
||||
embed_dim : optional
|
||||
embed_version : optional
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Dict, List, Any
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
# WICHTIG: Keine direkten Selbst- oder Kreuz-Imports, die Zyklen erzeugen!
|
||||
# Für die Kernfunktion reicht extract_wikilinks als Typ-unabhängige Hilfsfunktion.
|
||||
try:
|
||||
from app.core.parser import extract_wikilinks # nur Funktion, erzeugt keine Zyklen
|
||||
# Paket-Import (normaler Betrieb)
|
||||
from app.core.chunker import Chunk
|
||||
from app.core.parser import extract_wikilinks, read_markdown, normalize_frontmatter, validate_required_frontmatter
|
||||
except Exception: # pragma: no cover
|
||||
from .parser import extract_wikilinks # type: ignore
|
||||
|
||||
# Typ-Hinweis für Chunk (zur Laufzeit nicht notwendig)
|
||||
try:
|
||||
from app.core.chunker import Chunk # nur Typ-Annotation / Nutzung im CLI
|
||||
except Exception: # pragma: no cover
|
||||
class Chunk: # type: ignore
|
||||
pass
|
||||
# Relativ (lokale Tests)
|
||||
from .chunker import Chunk # type: ignore
|
||||
from .parser import extract_wikilinks, read_markdown, normalize_frontmatter, validate_required_frontmatter # type: ignore
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Utils
|
||||
# Utilities
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _drop_empty(d: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Entfernt leere/None-Felder aus einem Dict (für saubere Payloads)."""
|
||||
return {k: v for k, v in d.items() if v not in (None, [], {}, "")}
|
||||
RE_MD_LINK = re.compile(r"\[([^\]]*)\]\(([^)\s]+)(?:\s+\"([^\"]+)\")?\)")
|
||||
RE_HTTP_SCHEMES = ("http://", "https://", "mailto:", "obsidian://", "tel:")
|
||||
|
||||
def _estimate_tokens(text: str) -> int:
|
||||
"""Grobe Token-Schätzung (≈ 1 Token pro 4 Zeichen)."""
|
||||
return max(0, int(round(len((text or '').strip()) / 4)))
|
||||
|
||||
def _sha256_text(text: str) -> str:
|
||||
h = hashlib.sha256()
|
||||
h.update((text or "").encode("utf-8"))
|
||||
return "sha256:" + h.hexdigest()
|
||||
|
||||
def _normalize_rel_path(path: Optional[str], vault_root: Optional[str]) -> Optional[str]:
|
||||
if not path:
|
||||
return None
|
||||
p = str(path)
|
||||
p = p.replace("\\", "/")
|
||||
if vault_root and os.path.isabs(p):
|
||||
try:
|
||||
p = os.path.relpath(p, vault_root)
|
||||
except Exception:
|
||||
pass
|
||||
p = p.replace("\\", "/").lstrip("/")
|
||||
return p
|
||||
|
||||
def _extract_external_links(text: str) -> List[Dict[str, Optional[str]]]:
|
||||
"""Findet Markdown-Links [label](href "title") mit erlaubten Schemes."""
|
||||
out: List[Dict[str, Optional[str]]] = []
|
||||
if not text:
|
||||
return out
|
||||
for m in RE_MD_LINK.finditer(text):
|
||||
label = (m.group(1) or "").strip() or None
|
||||
href = (m.group(2) or "").strip()
|
||||
title = (m.group(3) or "").strip() or None
|
||||
if any(href.startswith(s) for s in RE_HTTP_SCHEMES):
|
||||
out.append({"href": href, "label": label or title})
|
||||
return out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Kernfunktion
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def make_chunk_payloads(note_meta: Dict[str, Any], path: str, chunks: List[Chunk]) -> List[Dict[str, Any]]:
|
||||
def make_chunk_payloads(note_meta: Dict, path: str, chunks: List[Chunk]) -> List[Dict]:
|
||||
"""
|
||||
Baut Payloads für alle ``chunks`` der gegebenen Note.
|
||||
Erzeugt Payload-Dicts für alle Chunks einer Note.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
note_meta : Dict[str, Any]
|
||||
Minimale Metadaten der Note (mind. ``id``, ``title``; optional ``type``,
|
||||
``area``, ``project``, ``tags``, ``lang``).
|
||||
note_meta : Dict
|
||||
Normalisierte Frontmatter der Note (mind.: id, title, type, status, tags, [area, project, lang]).
|
||||
path : str
|
||||
Relativer Pfad der Note innerhalb des Vaults (z. B. "area/topic/file.md").
|
||||
Pfad zur Note (sollte relativ zum Vault sein; wird hier zur Sicherheit normalisiert).
|
||||
chunks : List[Chunk]
|
||||
Liste vorsegmentierter Chunks (vgl. app.core.chunker.Chunk).
|
||||
Vom Chunker erzeugte Chunks.
|
||||
|
||||
Returns
|
||||
-------
|
||||
List[Dict[str, Any]]
|
||||
Payload-Objekte, bereit für Qdrant-Upserts.
|
||||
List[Dict]
|
||||
Liste von Payloads (ein Eintrag pro Chunk).
|
||||
"""
|
||||
res: List[Dict[str, Any]] = []
|
||||
res: List[Dict] = []
|
||||
rel_path = _normalize_rel_path(path, vault_root=None)
|
||||
|
||||
for ch in chunks:
|
||||
# 'text' kann bei leerem Abschnitt None/'' sein → robust behandeln
|
||||
text_val = getattr(ch, "text", "") or ""
|
||||
wikilinks = extract_wikilinks(text_val)
|
||||
payload = {
|
||||
text: str = getattr(ch, "text", "") or ""
|
||||
wikilinks = extract_wikilinks(text)
|
||||
ext_links = _extract_external_links(text)
|
||||
|
||||
payload: Dict = {
|
||||
"id": getattr(ch, "id", None),
|
||||
"scope": "chunk",
|
||||
"note_id": note_meta.get("id"),
|
||||
"note_title": note_meta.get("title"),
|
||||
# gespiegelt für schnelle Filter:
|
||||
"note_type": note_meta.get("type"),
|
||||
"note_status": note_meta.get("status"),
|
||||
"area": note_meta.get("area"),
|
||||
"project": note_meta.get("project"),
|
||||
"tags": note_meta.get("tags"),
|
||||
# Pfad
|
||||
"note_path": rel_path,
|
||||
"path": rel_path, # Back-compat
|
||||
# Reihenfolge & Section
|
||||
"chunk_index": getattr(ch, "index", None),
|
||||
"char_start": getattr(ch, "char_start", None),
|
||||
"char_end": getattr(ch, "char_end", None),
|
||||
"token_count": getattr(ch, "token_count", None),
|
||||
"section_title": getattr(ch, "section_title", None),
|
||||
"section_path": getattr(ch, "section_path", None),
|
||||
"lang": note_meta.get("lang"),
|
||||
"wikilinks": wikilinks,
|
||||
"external_links": [],
|
||||
"references": [{"target_id": w, "kind": "wikilink"} for w in wikilinks],
|
||||
# Position
|
||||
"char_start": getattr(ch, "char_start", None),
|
||||
"char_end": getattr(ch, "char_end", None),
|
||||
"char_len": max(0, int(getattr(ch, "char_end", 0) or 0) - int(getattr(ch, "char_start", 0) or 0)) or len(text),
|
||||
# Nachbarn
|
||||
"neighbors": {
|
||||
"prev": getattr(ch, "neighbors_prev", None),
|
||||
"next": getattr(ch, "neighbors_next", None),
|
||||
},
|
||||
"path": path, # vom Aufrufer relativ geliefert
|
||||
"text": text_val, # WICHTIG für Export/Rekonstruktion
|
||||
# Inhalt & Metrik
|
||||
"text": text,
|
||||
"text_sha256": _sha256_text(text),
|
||||
"token_est": _estimate_tokens(text),
|
||||
# Sprache
|
||||
"lang": note_meta.get("lang"),
|
||||
# Links
|
||||
"wikilinks": wikilinks,
|
||||
"external_links": ext_links,
|
||||
"references": [{"target_id": w, "kind": "wikilink"} for w in wikilinks],
|
||||
}
|
||||
res.append(_drop_empty(payload))
|
||||
|
||||
# Entferne Nones/Leeres, aber **text** bleibt (darf leer sein z. B. bei Bild-Only-Chunks)
|
||||
cleaned = {}
|
||||
for k, v in payload.items():
|
||||
if v in (None, [], {}):
|
||||
# immer behalten:
|
||||
if k in ("text", "neighbors"):
|
||||
cleaned[k] = v
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
cleaned[k] = v
|
||||
|
||||
res.append(cleaned)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CLI – nur zur Sichtprüfung / Debug
|
||||
# CLI zum schnellen Testen
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _cli() -> None:
|
||||
# CLI lädt hier *lazy* die schweren Abhängigkeiten,
|
||||
# damit die Modul-Initialisierung zirkularfrei bleibt.
|
||||
try:
|
||||
from app.core.parser import read_markdown
|
||||
from app.core.chunker import assemble_chunks
|
||||
except Exception: # pragma: no cover
|
||||
from .parser import read_markdown # type: ignore
|
||||
from .chunker import assemble_chunks # type: ignore
|
||||
|
||||
ap = argparse.ArgumentParser(description="Vorschau: Chunk-Payloads erzeugen und anzeigen")
|
||||
ap.add_argument("--from-file", dest="src", required=True, help="Pfad zu einer Markdown-Datei")
|
||||
ap.add_argument("--print", dest="do_print", action="store_true", help="Payload auf stdout ausgeben")
|
||||
ap = argparse.ArgumentParser(description="Chunk-Payloads aus einer einzelnen Markdown-Datei erzeugen")
|
||||
ap.add_argument("--from-file", required=True, help="Pfad zur Markdown-Datei")
|
||||
ap.add_argument("--vault-root", default=None, help="Vault-Wurzel (zur Pfad-Relativierung)")
|
||||
ap.add_argument("--summary", action="store_true", help="Nur kurze Übersicht je Chunk ausgeben")
|
||||
args = ap.parse_args()
|
||||
|
||||
note = read_markdown(args.src)
|
||||
note_meta = {
|
||||
"id": note.frontmatter.get("id") or note.frontmatter.get("note_id"),
|
||||
"title": note.frontmatter.get("title"),
|
||||
"type": note.frontmatter.get("type"),
|
||||
"area": note.frontmatter.get("area"),
|
||||
"project": note.frontmatter.get("project"),
|
||||
"tags": note.frontmatter.get("tags", []),
|
||||
"lang": note.frontmatter.get("lang"),
|
||||
}
|
||||
parsed = read_markdown(args.from_file)
|
||||
fm = normalize_frontmatter(parsed.frontmatter)
|
||||
validate_required_frontmatter(fm)
|
||||
|
||||
chunks = assemble_chunks(note.frontmatter.get("id"), note.body or "", note.frontmatter.get("type", "concept"))
|
||||
# lazy import, um Zyklen zu vermeiden
|
||||
try:
|
||||
from app.core.chunker import assemble_chunks
|
||||
except Exception:
|
||||
from .chunker import assemble_chunks # type: ignore
|
||||
|
||||
# Vault-Root heuristisch relativieren (nur für Demo)
|
||||
vault_root = os.path.dirname(os.path.dirname(args.src)) # heuristisch
|
||||
rel_path = os.path.relpath(args.src, vault_root).replace("\\", "/").lstrip("/")
|
||||
chunks = assemble_chunks(fm["id"], parsed.body, fm.get("type", "concept"))
|
||||
rel = _normalize_rel_path(parsed.path, args.vault_root)
|
||||
|
||||
payloads = make_chunk_payloads(note_meta, rel_path, chunks)
|
||||
pls = make_chunk_payloads(fm, rel or parsed.path, chunks)
|
||||
|
||||
if args.do_print:
|
||||
print(json.dumps(payloads, ensure_ascii=False, indent=2))
|
||||
if args.summary:
|
||||
out = []
|
||||
for p in pls:
|
||||
out.append({
|
||||
"id": p.get("id"),
|
||||
"chunk_index": p.get("chunk_index"),
|
||||
"token_est": p.get("token_est"),
|
||||
"wikilinks": p.get("wikilinks"),
|
||||
"ext_links": [e.get("href") for e in p.get("external_links", [])],
|
||||
"prev": (p.get("neighbors") or {}).get("prev"),
|
||||
"next": (p.get("neighbors") or {}).get("next"),
|
||||
})
|
||||
print(json.dumps(out, ensure_ascii=False, indent=2))
|
||||
else:
|
||||
print(json.dumps(pls, ensure_ascii=False, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user