app/core/chunk_payload.py aktualisiert
All checks were successful
Deploy mindnet to llm-node / deploy (push) Successful in 2s
All checks were successful
Deploy mindnet to llm-node / deploy (push) Successful in 2s
This commit is contained in:
parent
597090bc45
commit
bdbc4a1bf7
|
|
@ -1,50 +1,30 @@
|
||||||
# chunk_payload.py
|
# chunk_payload.py
|
||||||
"""
|
|
||||||
Mindnet - Chunk Payload Builder
|
|
||||||
Version: 1.4.3
|
|
||||||
Beschreibung:
|
|
||||||
- Robust gegenüber alten/neuen Aufrufsignaturen (toleriert *args, **kwargs).
|
|
||||||
- Liest Typ-Defaults aus ./config/config.yaml oder ./config/types.yaml.
|
|
||||||
- Baut Chunks aus vorhandenen note.chunks (falls vorhanden) oder fällt auf
|
|
||||||
eine einfache, profilabhängige Absatzbündelung zurück.
|
|
||||||
- Setzt in jedem Chunk-Payload:
|
|
||||||
- note_id, chunk_id (deterministisch), index, title, type, path
|
|
||||||
- text (nie leer), retriever_weight, chunk_profile
|
|
||||||
- Garantiert JSON-serialisierbare Payloads.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
import os
|
import os, json, pathlib, re, yaml, hashlib
|
||||||
import json
|
|
||||||
import pathlib
|
|
||||||
import re
|
|
||||||
import yaml
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
|
FRONTMATTER_RE = re.compile(r"^---\s*\n.*?\n---\s*\n?", re.DOTALL)
|
||||||
|
|
||||||
def _as_dict(note: Any) -> Dict[str, Any]:
|
def _as_dict(note: Any) -> Dict[str, Any]:
|
||||||
if isinstance(note, dict):
|
if isinstance(note, dict):
|
||||||
return note
|
return note
|
||||||
d: Dict[str, Any] = {}
|
d: Dict[str, Any] = {}
|
||||||
for attr in (
|
for attr in ("id","note_id","title","path","frontmatter","meta","metadata","body","text","content","raw","markdown","type","chunks"):
|
||||||
"id",
|
|
||||||
"note_id",
|
|
||||||
"title",
|
|
||||||
"path",
|
|
||||||
"frontmatter",
|
|
||||||
"meta",
|
|
||||||
"body",
|
|
||||||
"text",
|
|
||||||
"type",
|
|
||||||
"chunks",
|
|
||||||
):
|
|
||||||
if hasattr(note, attr):
|
if hasattr(note, attr):
|
||||||
d[attr] = getattr(note, attr)
|
d[attr] = getattr(note, attr)
|
||||||
if "frontmatter" not in d and hasattr(note, "metadata"):
|
fm = d.get("frontmatter") or d.get("meta") or d.get("metadata") or {}
|
||||||
d["frontmatter"] = getattr(note, "metadata")
|
d["frontmatter"] = fm if isinstance(fm, dict) else {}
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def _pick_args(*args, **kwargs) -> Tuple[Optional[str], Optional[Dict[str,Any]]]:
|
||||||
|
path = kwargs.get("path")
|
||||||
|
types_cfg = kwargs.get("types_config")
|
||||||
|
for a in args:
|
||||||
|
if path is None and isinstance(a, (str, pathlib.Path)):
|
||||||
|
path = str(a)
|
||||||
|
if types_cfg is None and isinstance(a, dict):
|
||||||
|
types_cfg = a
|
||||||
|
return path, types_cfg
|
||||||
|
|
||||||
def _load_types_config(explicit: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
def _load_types_config(explicit: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||||
if isinstance(explicit, dict):
|
if isinstance(explicit, dict):
|
||||||
|
|
@ -59,51 +39,70 @@ def _load_types_config(explicit: Optional[Dict[str, Any]] = None) -> Dict[str, A
|
||||||
return data if isinstance(data, dict) else {}
|
return data if isinstance(data, dict) else {}
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def _get_front(n: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
fm = n.get("frontmatter") or n.get("meta") or {}
|
|
||||||
return fm if isinstance(fm, dict) else {}
|
|
||||||
|
|
||||||
|
|
||||||
def _coalesce(*vals):
|
def _coalesce(*vals):
|
||||||
for v in vals:
|
for v in vals:
|
||||||
if v is not None:
|
if v is not None:
|
||||||
return v
|
return v
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _text_from_note(n: Dict[str, Any], path_hint: Optional[str]) -> str:
|
||||||
|
# häufige Felder
|
||||||
|
cand = [
|
||||||
|
n.get("body"),
|
||||||
|
n.get("text"),
|
||||||
|
n.get("markdown"),
|
||||||
|
n.get("raw"),
|
||||||
|
]
|
||||||
|
content = n.get("content")
|
||||||
|
if isinstance(content, str):
|
||||||
|
cand.append(content)
|
||||||
|
elif isinstance(content, dict):
|
||||||
|
for k in ("text","body","raw","markdown","content"):
|
||||||
|
v = content.get(k)
|
||||||
|
if isinstance(v, str):
|
||||||
|
cand.append(v)
|
||||||
|
for t in cand:
|
||||||
|
if isinstance(t, str) and t.strip():
|
||||||
|
return t
|
||||||
|
|
||||||
def _body(n: Dict[str, Any]) -> str:
|
# Fallback: Datei lesen und Frontmatter entfernen
|
||||||
b = n.get("body")
|
p = n.get("path") or path_hint
|
||||||
if isinstance(b, str):
|
if p:
|
||||||
return b
|
try:
|
||||||
t = n.get("text")
|
pth = pathlib.Path(p)
|
||||||
return t if isinstance(t, str) else ""
|
if pth.exists():
|
||||||
|
txt = pth.read_text(encoding="utf-8", errors="ignore")
|
||||||
|
if txt:
|
||||||
|
return FRONTMATTER_RE.sub("", txt).strip()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def _iter_chunks(n: Dict[str, Any], profile: str, fulltext: str) -> List[Dict[str, Any]]:
|
||||||
def _iter_chunks(n: Dict[str, Any], profile: str) -> List[Dict[str, Any]]:
|
# 1) vorhandene Chunks nehmen, wenn sinnvoll
|
||||||
# 1) Bereits vorhandene Chunks bevorzugen
|
|
||||||
existing = n.get("chunks")
|
existing = n.get("chunks")
|
||||||
if isinstance(existing, list) and existing:
|
|
||||||
out: List[Dict[str, Any]] = []
|
out: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(existing, list) and existing:
|
||||||
for i, c in enumerate(existing):
|
for i, c in enumerate(existing):
|
||||||
|
text = ""
|
||||||
if isinstance(c, dict):
|
if isinstance(c, dict):
|
||||||
text = c.get("text") or ""
|
text = c.get("text") or c.get("body") or c.get("raw") or ""
|
||||||
else:
|
elif isinstance(c, str):
|
||||||
text = str(c) if c is not None else ""
|
text = c
|
||||||
if not text:
|
if text and text.strip():
|
||||||
continue
|
|
||||||
out.append({"index": i, "text": text})
|
out.append({"index": i, "text": text})
|
||||||
if out:
|
if out:
|
||||||
return out
|
return out
|
||||||
|
|
||||||
# 2) Fallback: naive, profilabhängige Absatz-Bündelung
|
# 2) Fallback: profilabhängige Bündelung
|
||||||
size = {"short": 600, "medium": 1200, "long": 2400}.get(str(profile), 1200)
|
if not isinstance(profile, str):
|
||||||
text = _body(n)
|
profile = "medium"
|
||||||
if not text:
|
size = {"short": 600, "medium": 1200, "long": 2400}.get(profile, 1200)
|
||||||
|
if not fulltext:
|
||||||
return []
|
return []
|
||||||
paras = re.split(r"\n{2,}", text)
|
paras = re.split(r"\n{2,}", fulltext)
|
||||||
chunks: List[str] = []
|
|
||||||
buf = ""
|
buf = ""
|
||||||
|
chunks: List[str] = []
|
||||||
for p in paras:
|
for p in paras:
|
||||||
p = (p or "").strip()
|
p = (p or "").strip()
|
||||||
if not p:
|
if not p:
|
||||||
|
|
@ -111,29 +110,22 @@ def _iter_chunks(n: Dict[str, Any], profile: str) -> List[Dict[str, Any]]:
|
||||||
if len(buf) + (2 if buf else 0) + len(p) <= size:
|
if len(buf) + (2 if buf else 0) + len(p) <= size:
|
||||||
buf = (buf + "\n\n" + p).strip() if buf else p
|
buf = (buf + "\n\n" + p).strip() if buf else p
|
||||||
else:
|
else:
|
||||||
if buf:
|
if buf: chunks.append(buf)
|
||||||
chunks.append(buf)
|
|
||||||
if len(p) <= size:
|
if len(p) <= size:
|
||||||
buf = p
|
buf = p
|
||||||
else:
|
else:
|
||||||
for i in range(0, len(p), size):
|
for i in range(0, len(p), size):
|
||||||
chunks.append(p[i : i + size])
|
chunks.append(p[i:i+size])
|
||||||
buf = ""
|
buf = ""
|
||||||
if buf:
|
if buf: chunks.append(buf)
|
||||||
chunks.append(buf)
|
|
||||||
return [{"index": i, "text": c} for i, c in enumerate(chunks)]
|
return [{"index": i, "text": c} for i, c in enumerate(chunks)]
|
||||||
|
|
||||||
|
|
||||||
def make_chunk_payloads(note: Any, *args, **kwargs) -> List[Dict[str, Any]]:
|
def make_chunk_payloads(note: Any, *args, **kwargs) -> List[Dict[str, Any]]:
|
||||||
"""
|
|
||||||
Build payloads for chunks. Tolerates legacy positional arguments.
|
|
||||||
Returns list[dict] (ein Payload pro Chunk).
|
|
||||||
"""
|
|
||||||
n = _as_dict(note)
|
n = _as_dict(note)
|
||||||
types_cfg = kwargs.get("types_config") or (args[0] if args else None)
|
path_arg, types_cfg_explicit = _pick_args(*args, **kwargs)
|
||||||
types_cfg = _load_types_config(types_cfg)
|
types_cfg = _load_types_config(types_cfg_explicit)
|
||||||
|
|
||||||
fm = _get_front(n)
|
fm = n.get("frontmatter") or {}
|
||||||
note_type = str(fm.get("type") or n.get("type") or "note")
|
note_type = str(fm.get("type") or n.get("type") or "note")
|
||||||
cfg_for_type = types_cfg.get(note_type, {}) if isinstance(types_cfg, dict) else {}
|
cfg_for_type = types_cfg.get(note_type, {}) if isinstance(types_cfg, dict) else {}
|
||||||
|
|
||||||
|
|
@ -142,43 +134,34 @@ def make_chunk_payloads(note: Any, *args, **kwargs) -> List[Dict[str, Any]]:
|
||||||
except Exception:
|
except Exception:
|
||||||
default_rw = 1.0
|
default_rw = 1.0
|
||||||
|
|
||||||
retriever_weight = _coalesce(
|
retriever_weight = _coalesce(fm.get("retriever_weight"), cfg_for_type.get("retriever_weight"), default_rw)
|
||||||
fm.get("retriever_weight"),
|
|
||||||
cfg_for_type.get("retriever_weight"),
|
|
||||||
default_rw,
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
retriever_weight = float(retriever_weight)
|
retriever_weight = float(retriever_weight)
|
||||||
except Exception:
|
except Exception:
|
||||||
retriever_weight = default_rw
|
retriever_weight = default_rw
|
||||||
|
|
||||||
chunk_profile = _coalesce(
|
chunk_profile = _coalesce(fm.get("chunk_profile"), cfg_for_type.get("chunk_profile"), os.environ.get("MINDNET_DEFAULT_CHUNK_PROFILE","medium"))
|
||||||
fm.get("chunk_profile"),
|
chunk_profile = chunk_profile if isinstance(chunk_profile, str) else "medium"
|
||||||
cfg_for_type.get("chunk_profile"),
|
|
||||||
os.environ.get("MINDNET_DEFAULT_CHUNK_PROFILE", "medium"),
|
|
||||||
)
|
|
||||||
if not isinstance(chunk_profile, str):
|
|
||||||
chunk_profile = "medium"
|
|
||||||
|
|
||||||
note_id = n.get("note_id") or n.get("id") or fm.get("id")
|
note_id = n.get("note_id") or n.get("id") or fm.get("id")
|
||||||
title = n.get("title") or fm.get("title") or ""
|
title = n.get("title") or fm.get("title") or ""
|
||||||
path = n.get("path")
|
path = n.get("path") or path_arg
|
||||||
if isinstance(path, pathlib.Path):
|
if isinstance(path, pathlib.Path):
|
||||||
path = str(path)
|
path = str(path)
|
||||||
|
path = path or "" # immer vorhanden
|
||||||
|
|
||||||
chunks = _iter_chunks(n, chunk_profile)
|
fulltext = _text_from_note(n, path)
|
||||||
|
chunks = _iter_chunks(n, chunk_profile, fulltext)
|
||||||
|
|
||||||
payloads: List[Dict[str, Any]] = []
|
payloads: List[Dict[str, Any]] = []
|
||||||
for c in chunks:
|
for c in chunks:
|
||||||
idx = c.get("index", len(payloads))
|
idx = c.get("index", len(payloads))
|
||||||
text = c.get("text") if isinstance(c, dict) else (str(c) if c is not None else "")
|
text = c.get("text") if isinstance(c, dict) else (str(c) if c is not None else "")
|
||||||
if not isinstance(text, str):
|
text = text if isinstance(text, str) else str(text or "")
|
||||||
text = str(text or "")
|
|
||||||
|
|
||||||
# deterministische chunk_id
|
|
||||||
key = f"{note_id}|{idx}"
|
key = f"{note_id}|{idx}"
|
||||||
h = hashlib.sha1(key.encode("utf-8")).hexdigest()[:12]
|
h = hashlib.sha1(key.encode("utf-8")).hexdigest()[:12] if note_id else hashlib.sha1(f"{path}|{idx}".encode("utf-8")).hexdigest()[:12]
|
||||||
chunk_id = f"{note_id}-{idx:03d}-{h}" if note_id else h
|
chunk_id = f"{note_id}-{idx:03d}-{h}" if note_id else f"{h}"
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"note_id": note_id,
|
"note_id": note_id,
|
||||||
|
|
@ -186,13 +169,11 @@ def make_chunk_payloads(note: Any, *args, **kwargs) -> List[Dict[str, Any]]:
|
||||||
"index": idx,
|
"index": idx,
|
||||||
"title": title,
|
"title": title,
|
||||||
"type": note_type,
|
"type": note_type,
|
||||||
"path": path,
|
"path": path, # <- garantiert vorhanden
|
||||||
"text": text,
|
"text": text, # <- nie leer, sonst werden keine Chunks erzeugt
|
||||||
"retriever_weight": retriever_weight,
|
"retriever_weight": retriever_weight,
|
||||||
"chunk_profile": chunk_profile,
|
"chunk_profile": chunk_profile,
|
||||||
}
|
}
|
||||||
|
|
||||||
# JSON-Serialisierbarkeit sicherstellen
|
|
||||||
json.loads(json.dumps(payload, ensure_ascii=False))
|
json.loads(json.dumps(payload, ensure_ascii=False))
|
||||||
payloads.append(payload)
|
payloads.append(payload)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user