Add graph export and chain traversal commands; enhance linting options
Some checks are pending
Node.js build / build (20.x) (push) Waiting to run
Node.js build / build (22.x) (push) Waiting to run

- Introduced commands for exporting graph data and displaying chains from the current note.
- Enhanced linting functionality with options for showing canonical hints and specifying chain traversal direction.
- Added new utility functions for graph traversal and index building.
- Updated settings interface to include new options for user configuration.
This commit is contained in:
Lars 2026-01-15 12:27:16 +01:00
parent 9b8550c387
commit d577283af6
20 changed files with 2008 additions and 208 deletions

134
src/export/exportGraph.ts Normal file
View File

@ -0,0 +1,134 @@
import type { App, TFile } from "obsidian";
import type { Vocabulary } from "../vocab/Vocabulary";
import type { ParsedEdge } from "../parser/types";
import type { ExportBundle, ExportNode, ExportEdge } from "./types";
import { parseEdgesFromCallouts } from "../parser/parseEdgesFromCallouts";
/**
* Export graph from vault markdown files.
* Scans all markdown files, parses edges, and resolves types via vocabulary.
*/
export async function exportGraph(
app: App,
vocabulary: Vocabulary,
outputPath: string = "_system/exports/graph_export.json"
): Promise<void> {
const nodes: ExportNode[] = [];
const edges: ExportEdge[] = [];
const nodeMap = new Map<string, ExportNode>();
// Get all markdown files in vault
const markdownFiles = app.vault.getMarkdownFiles();
// Process each file
for (const file of markdownFiles) {
try {
const content = await app.vault.read(file);
const parsedEdges = parseEdgesFromCallouts(content);
// Create or get node for this file
const nodeId = file.path;
if (!nodeMap.has(nodeId)) {
const node: ExportNode = {
id: nodeId,
title: file.basename,
path: file.path,
};
nodeMap.set(nodeId, node);
nodes.push(node);
}
// Process edges from this file
for (const parsedEdge of parsedEdges) {
const normalized = vocabulary.normalize(parsedEdge.rawType);
// Process each target
for (const target of parsedEdge.targets) {
if (!target) continue;
// Extract base name from target (remove heading if present)
const parts = target.split("#");
const firstPart = parts[0];
if (!firstPart) continue;
const baseName = firstPart.trim();
if (!baseName) continue;
// Find target file by matching against all markdown files
let targetFile: TFile | null = null;
const targetPathWithMd = baseName.endsWith(".md") ? baseName : `${baseName}.md`;
// Search through markdown files for match
for (const mdFile of markdownFiles) {
const mdFileName = mdFile.name;
const mdFileBasename = mdFile.basename;
if (mdFileName && (mdFileName === targetPathWithMd || mdFileBasename === baseName)) {
targetFile = mdFile;
break;
}
}
// Determine target node ID
let targetNodeId: string;
if (targetFile) {
targetNodeId = targetFile.path;
// Create node for target file if not exists
if (!nodeMap.has(targetNodeId)) {
const targetNode: ExportNode = {
id: targetNodeId,
title: targetFile.basename,
path: targetFile.path,
};
nodeMap.set(targetNodeId, targetNode);
nodes.push(targetNode);
}
} else {
// Target file not found - use baseName as placeholder ID
targetNodeId = baseName;
// Create placeholder node if not exists
if (!nodeMap.has(targetNodeId)) {
const targetNode: ExportNode = {
id: targetNodeId,
title: baseName,
path: baseName, // placeholder path
};
nodeMap.set(targetNodeId, targetNode);
nodes.push(targetNode);
}
}
// Create edge
const edge: ExportEdge = {
source: nodeId,
target: targetNodeId,
rawType: parsedEdge.rawType,
canonicalType: normalized.canonical,
inverseType: normalized.inverse,
sourcePath: file.path,
lineStart: parsedEdge.lineStart,
lineEnd: parsedEdge.lineEnd,
};
edges.push(edge);
}
}
} catch (error) {
console.error(`Error processing file ${file.path}:`, error);
// Continue with other files
}
}
// Create export bundle
const stats = vocabulary.getStats();
const bundle: ExportBundle = {
nodes,
edges,
exportedAt: new Date().toISOString(),
vocabularyStats: {
canonicalCount: stats.canonicalCount,
aliasCount: stats.aliasCount,
},
};
// Write to file
const jsonContent = JSON.stringify(bundle, null, 2);
await app.vault.adapter.write(outputPath, jsonContent);
}

26
src/export/types.ts Normal file
View File

@ -0,0 +1,26 @@
export interface ExportNode {
id: string; // file path or note name
title?: string;
path: string; // vault-relative path
}
export interface ExportEdge {
source: string; // node id
target: string; // node id
rawType: string; // original edge type from markdown
canonicalType: string | null; // resolved canonical type
inverseType: string | null; // resolved inverse type (if available)
sourcePath: string; // vault-relative path of source file
lineStart?: number; // 0-based line number
lineEnd?: number; // 0-based line number
}
export interface ExportBundle {
nodes: ExportNode[];
edges: ExportEdge[];
exportedAt: string; // ISO timestamp
vocabularyStats?: {
canonicalCount: number;
aliasCount: number;
};
}

144
src/graph/GraphBuilder.ts Normal file
View File

@ -0,0 +1,144 @@
import type { App, TFile } from "obsidian";
import type { Vocabulary } from "../vocab/Vocabulary";
import type { ParsedEdge } from "../parser/types";
import type { GraphBuildResult, NodeMeta, EdgeRecord } from "./types";
import { parseEdgesFromCallouts } from "../parser/parseEdgesFromCallouts";
import { extractFrontmatterId } from "../parser/parseFrontmatter";
import { normalizeTargetToBasename } from "./resolveTarget";
/**
* Build graph from vault markdown files.
* Uses frontmatter.id as the primary node identifier.
*/
export async function buildGraph(
app: App,
vocabulary: Vocabulary
): Promise<GraphBuildResult> {
const filePathToId = new Map<string, string>();
const basenameLowerToPath = new Map<string, string>();
const idToMeta = new Map<string, NodeMeta>();
const edges: EdgeRecord[] = [];
const warnings: GraphBuildResult["warnings"] = {
missingFrontmatterId: [],
missingTargetFile: [],
missingTargetId: [],
};
// Get all markdown files
const markdownFiles = app.vault.getMarkdownFiles();
// First pass: build node maps
for (const file of markdownFiles) {
try {
const content = await app.vault.read(file);
const id = extractFrontmatterId(content);
// Always add to basenameLowerToPath for target resolution
const basenameLower = file.basename.toLowerCase();
basenameLowerToPath.set(basenameLower, file.path);
if (!id) {
warnings.missingFrontmatterId.push(file.path);
continue; // Skip files without ID (don't add to filePathToId or idToMeta)
}
// Extract optional title from frontmatter (simple extraction)
let title: string | undefined;
const titleMatch = content.match(/^title\s*:\s*(.+)$/m);
if (titleMatch && titleMatch[1]) {
let titleValue = titleMatch[1].trim();
if ((titleValue.startsWith('"') && titleValue.endsWith('"')) ||
(titleValue.startsWith("'") && titleValue.endsWith("'"))) {
titleValue = titleValue.slice(1, -1);
}
title = titleValue;
}
// Populate maps
filePathToId.set(file.path, id);
const meta: NodeMeta = {
id,
path: file.path,
basename: file.basename,
title,
};
idToMeta.set(id, meta);
} catch (error) {
console.error(`Error processing file ${file.path}:`, error);
// Continue with other files
}
}
// Second pass: build edges
for (const file of markdownFiles) {
try {
const content = await app.vault.read(file);
const srcId = filePathToId.get(file.path);
if (!srcId) {
// File has no ID, skip edge processing
continue;
}
const parsedEdges = parseEdgesFromCallouts(content);
for (const parsedEdge of parsedEdges) {
const normalized = vocabulary.normalize(parsedEdge.rawType);
for (const target of parsedEdge.targets) {
if (!target) continue;
// Normalize target to basename
const resolvedBase = normalizeTargetToBasename(target);
const targetPath = basenameLowerToPath.get(resolvedBase.toLowerCase());
if (!targetPath) {
warnings.missingTargetFile.push({
srcPath: file.path,
target: target,
});
continue;
}
// Check if target file has an ID
const dstId = filePathToId.get(targetPath);
if (!dstId) {
// File exists but has no frontmatter ID
warnings.missingTargetId.push({
srcPath: file.path,
targetPath: targetPath,
});
continue;
}
// Create edge record
const edge: EdgeRecord = {
srcId,
dstId,
rawType: parsedEdge.rawType,
canonicalType: normalized.canonical,
inverseType: normalized.inverse,
srcPath: file.path,
dstPath: targetPath,
lineStart: parsedEdge.lineStart,
lineEnd: parsedEdge.lineEnd,
rawTarget: target,
};
edges.push(edge);
}
}
} catch (error) {
console.error(`Error processing edges for file ${file.path}:`, error);
// Continue with other files
}
}
return {
filePathToId,
basenameLowerToPath,
idToMeta,
edges,
warnings,
};
}

View File

@ -0,0 +1,54 @@
import type { EdgeRecord } from "./types";
export interface GraphIndex {
outgoing: Map<string, EdgeRecord[]>;
incoming: Map<string, EdgeRecord[]>;
}
/**
* Build index from edges for efficient traversal.
* Creates outgoing and incoming edge maps keyed by node ID.
*/
export function buildIndex(edges: EdgeRecord[]): GraphIndex {
const outgoing = new Map<string, EdgeRecord[]>();
const incoming = new Map<string, EdgeRecord[]>();
for (const edge of edges) {
// Add to outgoing map
if (!outgoing.has(edge.srcId)) {
outgoing.set(edge.srcId, []);
}
const outgoingEdges = outgoing.get(edge.srcId);
if (outgoingEdges) {
outgoingEdges.push(edge);
}
// Add to incoming map
if (!incoming.has(edge.dstId)) {
incoming.set(edge.dstId, []);
}
const incomingEdges = incoming.get(edge.dstId);
if (incomingEdges) {
incomingEdges.push(edge);
}
}
// Sort edges for deterministic ordering
for (const edges of outgoing.values()) {
edges.sort((a, b) => {
const cmp = a.dstId.localeCompare(b.dstId);
if (cmp !== 0) return cmp;
return a.srcPath.localeCompare(b.srcPath);
});
}
for (const edges of incoming.values()) {
edges.sort((a, b) => {
const cmp = a.srcId.localeCompare(b.srcId);
if (cmp !== 0) return cmp;
return a.srcPath.localeCompare(b.srcPath);
});
}
return { outgoing, incoming };
}

View File

@ -0,0 +1,100 @@
import type { GraphBuildResult, NodeMeta } from "./types";
import type { Path } from "./traverse";
export interface ChainReportOptions {
startId: string;
startMeta: NodeMeta;
paths: Path[];
direction: "forward" | "backward" | "both";
maxHops: number;
maxPaths: number;
warnings: GraphBuildResult["warnings"];
idToMeta: Map<string, NodeMeta>;
}
/**
* Render chain report as markdown.
*/
export function renderChainReport(opts: ChainReportOptions): string {
const { startId, startMeta, paths, direction, maxHops, maxPaths, warnings, idToMeta } = opts;
const lines: string[] = [];
// Header
lines.push(`# Chain Report`);
lines.push("");
lines.push(`**Start Node:** ${startMeta.basename} (id=${startId})`);
if (startMeta.title) {
lines.push(`**Title:** ${startMeta.title}`);
}
if (startMeta.path) {
lines.push(`**Path:** ${startMeta.path}`);
}
lines.push("");
lines.push(`**Traversal Config:**`);
lines.push(`- Direction: ${direction}`);
lines.push(`- Max Hops: ${maxHops}`);
lines.push(`- Max Paths: ${maxPaths}`);
lines.push("");
// Warnings summary
const totalWarnings =
warnings.missingFrontmatterId.length +
warnings.missingTargetFile.length +
warnings.missingTargetId.length;
if (totalWarnings > 0) {
lines.push(`## Warnings Summary`);
lines.push("");
if (warnings.missingFrontmatterId.length > 0) {
lines.push(`- Missing frontmatter ID: ${warnings.missingFrontmatterId.length} file(s)`);
}
if (warnings.missingTargetFile.length > 0) {
lines.push(`- Missing target files: ${warnings.missingTargetFile.length} reference(s)`);
}
if (warnings.missingTargetId.length > 0) {
lines.push(`- Missing target IDs: ${warnings.missingTargetId.length} reference(s)`);
}
lines.push("");
}
// Paths
lines.push(`## Paths (${paths.length})`);
lines.push("");
for (let i = 0; i < paths.length; i++) {
const path = paths[i];
if (!path) continue;
lines.push(`### Path #${i + 1}`);
lines.push("");
// Render nodes and edges
for (let j = 0; j < path.nodes.length; j++) {
const nodeId = path.nodes[j];
if (!nodeId) continue;
const nodeMeta = idToMeta.get(nodeId);
const nodeLabel = nodeMeta
? `${nodeMeta.basename} (id=${nodeId})`
: `Unknown (id=${nodeId})`;
lines.push(nodeLabel);
// Add edge if not last node
if (j < path.edges.length) {
const edge = path.edges[j];
if (edge) {
const edgeLabel = edge.canonicalType
? `-- raw:"${edge.rawType}" (canonical:"${edge.canonicalType}") -->`
: `-- raw:"${edge.rawType}" -->`;
lines.push(edgeLabel);
}
}
}
lines.push("");
}
return lines.join("\n");
}

View File

@ -0,0 +1,24 @@
/**
* Normalize Obsidian link target to basename.
* Handles aliases (|) and headings (#).
*
* Examples:
* - "foo" -> "foo"
* - "foo|bar" -> "foo"
* - "foo#sec" -> "foo"
* - "foo#sec|bar" -> "foo"
* - "foo|bar#sec" -> "foo"
*/
export function normalizeTargetToBasename(target: string): string {
// Split by pipe (alias separator) and take first part
const parts = target.split("|");
const firstPart = parts[0];
if (!firstPart) return target.trim();
// Split by hash (heading separator) and take first part
const baseParts = firstPart.split("#");
const base = baseParts[0];
if (!base) return target.trim();
return base.trim();
}

View File

@ -0,0 +1,215 @@
import type { GraphIndex } from "./GraphIndex";
import type { EdgeRecord } from "./types";
export interface PathStep {
nodeId: string;
}
export interface PathEdge {
rawType: string;
canonicalType: string;
dstId: string;
srcId: string;
lineStart: number;
srcPath: string;
}
export interface Path {
startId: string;
nodes: string[];
edges: Array<{ rawType: string; canonicalType: string; to: string }>;
}
/**
* Traverse graph forward from start node.
* Returns all paths up to maxHops length, respecting maxPaths limit.
* maxHops refers to the number of edges (hops), so maxHops=2 means up to 3 nodes.
*/
export function traverseForward(
index: GraphIndex,
startId: string,
maxHops: number,
maxPaths: number = 200,
allowedCanonicals?: Set<string>
): Path[] {
const paths: Path[] = [];
const queue: Array<{ nodes: string[]; edges: Array<{ rawType: string; canonicalType: string; to: string }> }> = [
{ nodes: [startId], edges: [] },
];
while (queue.length > 0 && paths.length < maxPaths) {
const current = queue.shift();
if (!current) break;
const currentNodeId = current.nodes[current.nodes.length - 1];
if (!currentNodeId) break;
// Check if we've reached maxHops (number of edges = nodes.length - 1)
const currentHops = current.nodes.length - 1;
if (currentHops >= maxHops) {
// Path is complete
if (current.nodes.length > 1) {
paths.push({
startId,
nodes: current.nodes,
edges: current.edges,
});
}
continue;
}
// Get outgoing edges
const outgoingEdges = index.outgoing.get(currentNodeId) || [];
if (outgoingEdges.length === 0) {
// Dead end: add as complete path if it has at least one edge
if (current.nodes.length > 1) {
paths.push({
startId,
nodes: current.nodes,
edges: current.edges,
});
}
continue;
}
for (const edge of outgoingEdges) {
// Skip edges with null canonicalType
if (!edge.canonicalType) continue;
// Filter by allowedCanonicals if provided
if (allowedCanonicals && !allowedCanonicals.has(edge.canonicalType)) {
continue;
}
// Avoid cycles: don't revisit nodes already in path
if (current.nodes.includes(edge.dstId)) {
continue;
}
// Create new path
const newNodes = [...current.nodes, edge.dstId];
const newEdges = [
...current.edges,
{
rawType: edge.rawType,
canonicalType: edge.canonicalType,
to: edge.dstId,
},
];
// Check if we've reached maxHops
const newHops = newNodes.length - 1;
if (newHops >= maxHops) {
// Path is complete
paths.push({
startId,
nodes: newNodes,
edges: newEdges,
});
} else {
// Continue exploring
queue.push({ nodes: newNodes, edges: newEdges });
}
}
}
return paths.slice(0, maxPaths);
}
/**
* Traverse graph backward from start node.
* Returns all paths up to maxHops length, respecting maxPaths limit.
* maxHops refers to the number of edges (hops), so maxHops=2 means up to 3 nodes.
*/
export function traverseBackward(
index: GraphIndex,
startId: string,
maxHops: number,
maxPaths: number = 200,
allowedCanonicals?: Set<string>
): Path[] {
const paths: Path[] = [];
const queue: Array<{ nodes: string[]; edges: Array<{ rawType: string; canonicalType: string; to: string }> }> = [
{ nodes: [startId], edges: [] },
];
while (queue.length > 0 && paths.length < maxPaths) {
const current = queue.shift();
if (!current) break;
const currentNodeId = current.nodes[current.nodes.length - 1];
if (!currentNodeId) break;
// Check if we've reached maxHops (number of edges = nodes.length - 1)
const currentHops = current.nodes.length - 1;
if (currentHops >= maxHops) {
// Path is complete
if (current.nodes.length > 1) {
paths.push({
startId,
nodes: current.nodes,
edges: current.edges,
});
}
continue;
}
// Get incoming edges
const incomingEdges = index.incoming.get(currentNodeId) || [];
if (incomingEdges.length === 0) {
// Dead end: add as complete path if it has at least one edge
if (current.nodes.length > 1) {
paths.push({
startId,
nodes: current.nodes,
edges: current.edges,
});
}
continue;
}
for (const edge of incomingEdges) {
// Skip edges with null canonicalType
if (!edge.canonicalType) continue;
// Filter by allowedCanonicals if provided
if (allowedCanonicals && !allowedCanonicals.has(edge.canonicalType)) {
continue;
}
// Avoid cycles: don't revisit nodes already in path
if (current.nodes.includes(edge.srcId)) {
continue;
}
// Create new path (backward: srcId is the "to" node)
const newNodes = [...current.nodes, edge.srcId];
const newEdges = [
...current.edges,
{
rawType: edge.rawType,
canonicalType: edge.canonicalType,
to: edge.srcId,
},
];
// Check if we've reached maxHops
const newHops = newNodes.length - 1;
if (newHops >= maxHops) {
// Path is complete
paths.push({
startId,
nodes: newNodes,
edges: newEdges,
});
} else {
// Continue exploring
queue.push({ nodes: newNodes, edges: newEdges });
}
}
}
return paths.slice(0, maxPaths);
}

31
src/graph/types.ts Normal file
View File

@ -0,0 +1,31 @@
export interface NodeMeta {
id: string;
path: string;
basename: string;
title?: string;
}
export interface EdgeRecord {
srcId: string;
dstId: string;
rawType: string;
canonicalType: string | null;
inverseType: string | null;
srcPath: string;
dstPath: string;
lineStart: number;
lineEnd: number;
rawTarget: string; // original [[...]] content for debugging
}
export interface GraphBuildResult {
filePathToId: Map<string, string>;
basenameLowerToPath: Map<string, string>;
idToMeta: Map<string, NodeMeta>;
edges: EdgeRecord[];
warnings: {
missingFrontmatterId: string[]; // file paths
missingTargetFile: Array<{ srcPath: string; target: string }>;
missingTargetId: Array<{ srcPath: string; targetPath: string }>;
};
}

View File

@ -1,26 +1,30 @@
import type { App, TFile } from "obsidian";
import type { App } from "obsidian";
import type { ParsedEdge } from "../parser/types";
import type { Vocabulary } from "../vocab/Vocabulary";
import type { Finding, QuickFix } from "./types";
import type { Finding } from "./types";
import { parseEdgesFromCallouts } from "../parser/parseEdgesFromCallouts";
const EDGE_HEADER_RE = /^\s*(>+)\s*\[!edge\]\s*(.+?)\s*$/i;
export interface LintOptions {
showCanonicalHints?: boolean;
}
/**
* Pure function to lint parsed edges against vocabulary and file existence.
* This can be tested independently.
*/
export function lintEdges(
export function lintParsedEdges(
parsedEdges: ParsedEdge[],
vocabulary: Vocabulary,
existingFilesSet: Set<string>
existingFileNamesSet: Set<string>,
opts: LintOptions = {}
): Finding[] {
const findings: Finding[] = [];
const { showCanonicalHints = false } = opts;
for (const edge of parsedEdges) {
const normalized = vocabulary.normalize(edge.rawType);
// Check for unknown edge type
// R1: Check for unknown edge type (ERROR)
if (normalized.canonical === null) {
findings.push({
ruleId: "unknown_edge_type",
@ -31,26 +35,36 @@ export function lintEdges(
lineEnd: edge.lineEnd,
evidence: edge.rawType,
});
continue;
}
// Check for alias not normalized
const rawLower = edge.rawType.trim().toLowerCase();
const canonicalLower = normalized.canonical.toLowerCase();
if (rawLower !== canonicalLower) {
} else {
// Optional: Show canonical hints (INFO)
if (showCanonicalHints) {
findings.push({
ruleId: "alias_not_normalized",
severity: "WARN",
message: `Edge type "${edge.rawType}" should be normalized to "${normalized.canonical}"`,
ruleId: "canonical_hint",
severity: "INFO",
message: `Edge type resolved: raw='${edge.rawType}' canonical='${normalized.canonical}'`,
filePath: "", // Will be set by caller
lineStart: edge.lineStart,
lineEnd: edge.lineStart,
evidence: edge.rawType,
quickFixes: [], // Will be populated by caller with file context
});
}
}
// R3: Check for edges without targets (WARN)
if (edge.targets.length === 0) {
findings.push({
ruleId: "edge_without_target",
severity: "WARN",
message: `Edge type "${edge.rawType}" has no target notes`,
filePath: "", // Will be set by caller
lineStart: edge.lineStart,
lineEnd: edge.lineEnd,
evidence: edge.rawType,
});
}
// Check for missing target notes
// R2: Check for missing target notes (WARN)
// Check targets regardless of whether edge type is known or unknown
for (const target of edge.targets) {
if (!target) continue;
@ -65,7 +79,7 @@ export function lintEdges(
const markdownFileName = baseName.endsWith(".md") ? baseName : `${baseName}.md`;
if (!existingFilesSet.has(markdownFileName) && !existingFilesSet.has(baseName)) {
if (!existingFileNamesSet.has(markdownFileName) && !existingFileNamesSet.has(baseName)) {
findings.push({
ruleId: "missing_target_note",
severity: "WARN",
@ -91,7 +105,8 @@ export class LintEngine {
*/
static async lintCurrentNote(
app: App,
vocabulary: Vocabulary
vocabulary: Vocabulary,
options: LintOptions = {}
): Promise<Finding[]> {
const activeFile = app.workspace.getActiveFile();
@ -109,121 +124,27 @@ export class LintEngine {
// Parse edges
const parsedEdges = parseEdgesFromCallouts(content);
// Build set of existing markdown files in vault
const existingFilesSet = new Set<string>();
// Build set of existing markdown file names in vault
const existingFileNamesSet = new Set<string>();
const markdownFiles = app.vault.getMarkdownFiles();
for (const file of markdownFiles) {
existingFilesSet.add(file.name);
existingFileNamesSet.add(file.name);
// Also add without .md extension for matching
if (file.name.endsWith(".md")) {
const baseName = file.name.slice(0, -3);
existingFilesSet.add(baseName);
existingFileNamesSet.add(baseName);
}
}
// Run pure linting logic
const findings = lintEdges(parsedEdges, vocabulary, existingFilesSet);
const findings = lintParsedEdges(parsedEdges, vocabulary, existingFileNamesSet, options);
// Set filePath and add quickfixes
// Set filePath for all findings
const filePath = activeFile.path;
const lines = content.split(/\r?\n/);
for (const finding of findings) {
finding.filePath = filePath;
// Add quickfix for alias_not_normalized
if (finding.ruleId === "alias_not_normalized" && finding.lineStart !== undefined) {
const lineIndex = finding.lineStart;
const line = lines[lineIndex];
if (line) {
const normalized = vocabulary.normalize(finding.evidence || "");
if (normalized.canonical) {
finding.quickFixes = [
createNormalizeQuickFix(
app,
activeFile,
content,
lineIndex,
finding.evidence || "",
normalized.canonical
),
];
}
}
}
}
return findings;
}
}
/**
* Create a quickfix that normalizes an edge type in the file.
*/
function createNormalizeQuickFix(
app: App,
file: TFile,
currentContent: string,
lineIndex: number,
rawType: string,
canonical: string
): QuickFix {
return {
id: "normalize_edge_type",
title: `Normalize to "${canonical}"`,
apply: async () => {
const { Notice } = await import("obsidian");
const lines = currentContent.split(/\r?\n/);
const line = lines[lineIndex];
if (!line) {
new Notice("Line not found");
return;
}
// Match the edge header pattern
const match = line.match(EDGE_HEADER_RE);
if (!match || !match[2]) {
new Notice("Edge header pattern not found on line");
return;
}
// Find the position of the raw type in the line
// match[2] is the captured type, but we need to find where it appears in the original line
const edgeMarker = "[!edge]";
const edgeIndex = line.indexOf(edgeMarker);
if (edgeIndex === -1) {
new Notice("Edge marker not found on line");
return;
}
// Find the type after [!edge]
const afterEdge = line.substring(edgeIndex + edgeMarker.length);
const typeMatch = afterEdge.match(/^\s+(\S+)/);
if (!typeMatch || typeMatch[1] !== rawType.trim()) {
new Notice("Type token not found at expected position");
return;
}
// Replace the raw type with canonical
const beforeType = line.substring(0, edgeIndex + edgeMarker.length + typeMatch[0].indexOf(typeMatch[1]));
const afterType = line.substring(beforeType.length + typeMatch[1].length);
const newLine = beforeType + canonical + afterType;
// Safety check: verify the new line still matches the pattern
const verifyMatch = newLine.match(EDGE_HEADER_RE);
if (!verifyMatch) {
new Notice("Quickfix would produce invalid line - skipping");
return;
}
// Update the line
lines[lineIndex] = newLine;
const newContent = lines.join("\n");
// Write back to file
await app.vault.modify(file, newContent);
},
};
}

View File

@ -5,6 +5,12 @@ import { parseEdgeVocabulary } from "./vocab/parseEdgeVocabulary";
import { Vocabulary } from "./vocab/Vocabulary";
import { LintEngine } from "./lint/LintEngine";
import { MindnetSettingTab } from "./ui/MindnetSettingTab";
import { exportGraph } from "./export/exportGraph";
import { buildGraph } from "./graph/GraphBuilder";
import { buildIndex } from "./graph/GraphIndex";
import { traverseForward, traverseBackward, type Path } from "./graph/traverse";
import { renderChainReport } from "./graph/renderChainReport";
import { extractFrontmatterId } from "./parser/parseFrontmatter";
export default class MindnetCausalAssistantPlugin extends Plugin {
settings: MindnetSettings;
@ -58,7 +64,11 @@ export default class MindnetCausalAssistantPlugin extends Plugin {
return;
}
const findings = await LintEngine.lintCurrentNote(this.app, vocabulary);
const findings = await LintEngine.lintCurrentNote(
this.app,
vocabulary,
{ showCanonicalHints: this.settings.showCanonicalHints }
);
// Count findings by severity
const errorCount = findings.filter(f => f.severity === "ERROR").length;
@ -71,10 +81,7 @@ export default class MindnetCausalAssistantPlugin extends Plugin {
// Log findings to console
console.log("=== Lint Findings ===");
for (const finding of findings) {
const quickfixInfo = finding.quickFixes && finding.quickFixes.length > 0
? ` [QuickFix: ${finding.quickFixes.map(qf => qf.title).join(", ")}]`
: "";
console.log(`[${finding.severity}] ${finding.ruleId}: ${finding.message} (${finding.filePath}:${finding.lineStart}${quickfixInfo})`);
console.log(`[${finding.severity}] ${finding.ruleId}: ${finding.message} (${finding.filePath}:${finding.lineStart})`);
}
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
@ -83,6 +90,139 @@ export default class MindnetCausalAssistantPlugin extends Plugin {
}
},
});
this.addCommand({
id: "mindnet-export-graph",
name: "Mindnet: Export graph",
callback: async () => {
try {
const vocabulary = await this.ensureVocabularyLoaded();
if (!vocabulary) {
return;
}
const outputPath = "_system/exports/graph_export.json";
await exportGraph(this.app, vocabulary, outputPath);
new Notice(`Graph exported to ${outputPath}`);
console.log(`Graph exported: ${outputPath}`);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
new Notice(`Failed to export graph: ${msg}`);
console.error(e);
}
},
});
this.addCommand({
id: "mindnet-show-chains-from-current-note",
name: "Mindnet: Show chains from current note",
callback: async () => {
try {
const vocabulary = await this.ensureVocabularyLoaded();
if (!vocabulary) {
return;
}
const activeFile = this.app.workspace.getActiveFile();
if (!activeFile) {
new Notice("No active file");
return;
}
if (activeFile.extension !== "md") {
new Notice("Active file is not a markdown file");
return;
}
// Extract start ID from frontmatter
const content = await this.app.vault.read(activeFile);
const startId = extractFrontmatterId(content);
if (!startId) {
new Notice("Current note has no frontmatter ID. Add 'id: <value>' to frontmatter.");
return;
}
// Build graph
const graph = await buildGraph(this.app, vocabulary);
// Get start node meta
const startMeta = graph.idToMeta.get(startId);
if (!startMeta) {
new Notice(`Start node ID '${startId}' not found in graph`);
return;
}
// Build index
const index = buildIndex(graph.edges);
// Run traversal
let allPaths: Path[] = [];
if (this.settings.chainDirection === "forward" || this.settings.chainDirection === "both") {
const forwardPaths = traverseForward(
index,
startId,
this.settings.maxHops,
200
);
allPaths = [...allPaths, ...forwardPaths];
}
if (this.settings.chainDirection === "backward" || this.settings.chainDirection === "both") {
const backwardPaths = traverseBackward(
index,
startId,
this.settings.maxHops,
200
);
allPaths = [...allPaths, ...backwardPaths];
}
// Render report
const report = renderChainReport({
startId,
startMeta,
paths: allPaths,
direction: this.settings.chainDirection,
maxHops: this.settings.maxHops,
maxPaths: 200,
warnings: graph.warnings,
idToMeta: graph.idToMeta,
});
// Write report file
const reportPath = "_system/exports/chain_report.md";
await this.app.vault.adapter.write(reportPath, report);
// Open report
const reportFile = this.app.vault.getAbstractFileByPath(reportPath);
if (reportFile && reportFile instanceof TFile) {
await this.app.workspace.openLinkText(reportPath, "", true);
}
// Show summary
const uniqueNodes = new Set<string>();
for (const path of allPaths) {
for (const nodeId of path.nodes) {
uniqueNodes.add(nodeId);
}
}
const totalWarnings =
graph.warnings.missingFrontmatterId.length +
graph.warnings.missingTargetFile.length +
graph.warnings.missingTargetId.length;
new Notice(
`Chains: ${allPaths.length} paths, ${uniqueNodes.size} nodes, ${totalWarnings} warnings`
);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
new Notice(`Failed to generate chain report: ${msg}`);
console.error(e);
}
},
});
}
onunload(): void {

View File

@ -0,0 +1,51 @@
/**
* Extract frontmatter ID from markdown content.
* Only parses YAML frontmatter at the top of the file (between first two '---' lines).
* Returns the value of key 'id' as string if present; else null.
* Tolerant: id may be number in YAML -> converts to string.
*/
export function extractFrontmatterId(markdown: string): string | null {
const lines = markdown.split(/\r?\n/);
// Check if file starts with frontmatter delimiter
if (lines.length === 0 || lines[0]?.trim() !== "---") {
return null;
}
// Find closing delimiter
let endIndex = -1;
for (let i = 1; i < lines.length; i++) {
const line = lines[i];
if (line && line.trim() === "---") {
endIndex = i;
break;
}
}
if (endIndex === -1) {
// No closing delimiter found
return null;
}
// Parse YAML between delimiters
const frontmatterLines = lines.slice(1, endIndex);
const frontmatterText = frontmatterLines.join("\n");
// Simple YAML parser for id field
// Match "id: value" or "id:value" (with optional quotes)
const idMatch = frontmatterText.match(/^id\s*:\s*(.+)$/m);
if (!idMatch || !idMatch[1]) {
return null;
}
let idValue = idMatch[1].trim();
// Remove quotes if present
if ((idValue.startsWith('"') && idValue.endsWith('"')) ||
(idValue.startsWith("'") && idValue.endsWith("'"))) {
idValue = idValue.slice(1, -1);
}
// Convert to string (handles numeric values)
return String(idValue);
}

View File

@ -3,6 +3,8 @@ export interface MindnetSettings {
graphSchemaPath: string; // vault-relativ (später)
maxHops: number;
strictMode: boolean;
showCanonicalHints: boolean;
chainDirection: "forward" | "backward" | "both";
}
export const DEFAULT_SETTINGS: MindnetSettings = {
@ -10,6 +12,8 @@ export interface MindnetSettings {
graphSchemaPath: "_system/dictionary/graph_schema.md",
maxHops: 3,
strictMode: false,
showCanonicalHints: false,
chainDirection: "forward",
};
/**

View File

@ -0,0 +1,137 @@
import { describe, it, expect } from "vitest";
import type { ParsedEdge } from "../../parser/types";
import type { ExportEdge } from "../../export/types";
import { Vocabulary } from "../../vocab/Vocabulary";
import { parseEdgeVocabulary } from "../../vocab/parseEdgeVocabulary";
describe("exportGraph edge mapping", () => {
// Create a minimal vocabulary for testing
const vocabMd = `
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
| :--- | :--- | :--- | :--- |
| \`caused_by\` | \`resulted_in\` | \`wegen\`, \`ausgelöst_durch\` | Test |
| \`impacts\` | \`impacted_by\` | *(Kein Alias)* | Test |
`;
const vocabulary = new Vocabulary(parseEdgeVocabulary(vocabMd));
function createExportEdge(
parsedEdge: ParsedEdge,
source: string,
target: string
): ExportEdge {
const normalized = vocabulary.normalize(parsedEdge.rawType);
return {
source,
target,
rawType: parsedEdge.rawType,
canonicalType: normalized.canonical,
inverseType: normalized.inverse,
sourcePath: source,
lineStart: parsedEdge.lineStart,
lineEnd: parsedEdge.lineEnd,
};
}
it("maps edge with canonical type correctly", () => {
const parsedEdge: ParsedEdge = {
rawType: "caused_by",
targets: ["TargetNote"],
lineStart: 5,
lineEnd: 7,
};
const exportEdge = createExportEdge(parsedEdge, "source.md", "target.md");
expect(exportEdge.rawType).toBe("caused_by");
expect(exportEdge.canonicalType).toBe("caused_by");
expect(exportEdge.inverseType).toBe("resulted_in");
expect(exportEdge.source).toBe("source.md");
expect(exportEdge.target).toBe("target.md");
expect(exportEdge.lineStart).toBe(5);
expect(exportEdge.lineEnd).toBe(7);
});
it("maps edge with alias correctly", () => {
const parsedEdge: ParsedEdge = {
rawType: "wegen",
targets: ["TargetNote"],
lineStart: 10,
lineEnd: 10,
};
const exportEdge = createExportEdge(parsedEdge, "source.md", "target.md");
expect(exportEdge.rawType).toBe("wegen");
expect(exportEdge.canonicalType).toBe("caused_by");
expect(exportEdge.inverseType).toBe("resulted_in");
});
it("maps edge with unknown type correctly", () => {
const parsedEdge: ParsedEdge = {
rawType: "unknown_type",
targets: ["TargetNote"],
lineStart: 0,
lineEnd: 0,
};
const exportEdge = createExportEdge(parsedEdge, "source.md", "target.md");
expect(exportEdge.rawType).toBe("unknown_type");
expect(exportEdge.canonicalType).toBe(null);
expect(exportEdge.inverseType).toBe(null);
});
it("preserves rawType even when canonical is resolved", () => {
const parsedEdge: ParsedEdge = {
rawType: "ausgelöst_durch",
targets: ["TargetNote"],
lineStart: 3,
lineEnd: 3,
};
const exportEdge = createExportEdge(parsedEdge, "source.md", "target.md");
expect(exportEdge.rawType).toBe("ausgelöst_durch");
expect(exportEdge.canonicalType).toBe("caused_by");
expect(exportEdge.inverseType).toBe("resulted_in");
});
it("handles edge without inverse type", () => {
// Use impacts which has no aliases but has inverse
const parsedEdge: ParsedEdge = {
rawType: "impacts",
targets: ["TargetNote"],
lineStart: 0,
lineEnd: 0,
};
const exportEdge = createExportEdge(parsedEdge, "source.md", "target.md");
expect(exportEdge.rawType).toBe("impacts");
expect(exportEdge.canonicalType).toBe("impacts");
expect(exportEdge.inverseType).toBe("impacted_by");
});
it("handles multiple targets per edge", () => {
const parsedEdge: ParsedEdge = {
rawType: "caused_by",
targets: ["Target1", "Target2", "Target3"],
lineStart: 0,
lineEnd: 5,
};
const edge1 = createExportEdge(parsedEdge, "source.md", "target1.md");
const edge2 = createExportEdge(parsedEdge, "source.md", "target2.md");
const edge3 = createExportEdge(parsedEdge, "source.md", "target3.md");
expect(edge1.target).toBe("target1.md");
expect(edge2.target).toBe("target2.md");
expect(edge3.target).toBe("target3.md");
expect(edge1.rawType).toBe("caused_by");
expect(edge2.rawType).toBe("caused_by");
expect(edge3.rawType).toBe("caused_by");
expect(edge1.canonicalType).toBe("caused_by");
expect(edge2.canonicalType).toBe("caused_by");
expect(edge3.canonicalType).toBe("caused_by");
});
});

View File

@ -0,0 +1,369 @@
import { describe, it, expect } from "vitest";
import type { GraphBuildResult } from "../../graph/types";
import { Vocabulary } from "../../vocab/Vocabulary";
import { parseEdgeVocabulary } from "../../vocab/parseEdgeVocabulary";
import { normalizeTargetToBasename } from "../../graph/resolveTarget";
import { extractFrontmatterId } from "../../parser/parseFrontmatter";
import { parseEdgesFromCallouts } from "../../parser/parseEdgesFromCallouts";
interface FileFixture {
path: string;
basename: string;
content: string;
}
/**
* Pure function to build graph from in-memory fixtures.
* This can be tested without Obsidian App.
*/
function buildGraphFromFixtures(
fixtures: FileFixture[],
vocabulary: Vocabulary
): GraphBuildResult {
const filePathToId = new Map<string, string>();
const basenameLowerToPath = new Map<string, string>();
const idToMeta = new Map<string, { id: string; path: string; basename: string; title?: string }>();
const edges: GraphBuildResult["edges"] = [];
const warnings: GraphBuildResult["warnings"] = {
missingFrontmatterId: [],
missingTargetFile: [],
missingTargetId: [],
};
// First pass: build node maps
for (const file of fixtures) {
const id = extractFrontmatterId(file.content);
// Always add to basenameLowerToPath for target resolution
const basenameLower = file.basename.toLowerCase();
basenameLowerToPath.set(basenameLower, file.path);
if (!id) {
warnings.missingFrontmatterId.push(file.path);
continue;
}
// Extract optional title
let title: string | undefined;
const titleMatch = file.content.match(/^title\s*:\s*(.+)$/m);
if (titleMatch && titleMatch[1]) {
let titleValue = titleMatch[1].trim();
if ((titleValue.startsWith('"') && titleValue.endsWith('"')) ||
(titleValue.startsWith("'") && titleValue.endsWith("'"))) {
titleValue = titleValue.slice(1, -1);
}
title = titleValue;
}
filePathToId.set(file.path, id);
idToMeta.set(id, {
id,
path: file.path,
basename: file.basename,
title,
});
}
// Second pass: build edges
for (const file of fixtures) {
const srcId = filePathToId.get(file.path);
if (!srcId) {
continue;
}
const parsedEdges = parseEdgesFromCallouts(file.content);
for (const parsedEdge of parsedEdges) {
const normalized = vocabulary.normalize(parsedEdge.rawType);
for (const target of parsedEdge.targets) {
if (!target) continue;
const resolvedBase = normalizeTargetToBasename(target);
const targetPath = basenameLowerToPath.get(resolvedBase.toLowerCase());
if (!targetPath) {
warnings.missingTargetFile.push({
srcPath: file.path,
target: target,
});
continue;
}
const dstId = filePathToId.get(targetPath);
if (!dstId) {
warnings.missingTargetId.push({
srcPath: file.path,
targetPath: targetPath,
});
continue;
}
edges.push({
srcId,
dstId,
rawType: parsedEdge.rawType,
canonicalType: normalized.canonical,
inverseType: normalized.inverse,
srcPath: file.path,
dstPath: targetPath,
lineStart: parsedEdge.lineStart,
lineEnd: parsedEdge.lineEnd,
rawTarget: target,
});
}
}
}
return {
filePathToId,
basenameLowerToPath,
idToMeta,
edges,
warnings,
};
}
describe("buildGraphFromFixtures", () => {
const vocabMd = `
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
| :--- | :--- | :--- | :--- |
| \`caused_by\` | \`resulted_in\` | \`wegen\` | Test |
`;
const vocabulary = new Vocabulary(parseEdgeVocabulary(vocabMd));
it("skips files without frontmatter id", () => {
const fixtures: FileFixture[] = [
{
path: "file1.md",
basename: "file1",
content: `---
title: File 1
---
Content.
`,
},
{
path: "file2.md",
basename: "file2",
content: `---
id: node-2
title: File 2
---
Content.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.warnings.missingFrontmatterId).toEqual(["file1.md"]);
expect(result.idToMeta.has("node-2")).toBe(true);
expect(result.idToMeta.has("node-1")).toBe(false);
});
it("resolves targets with aliases and headings", () => {
const fixtures: FileFixture[] = [
{
path: "source.md",
basename: "source",
content: `---
id: src-1
---
> [!edge] caused_by
> [[target#section|Alias]]
`,
},
{
path: "target.md",
basename: "target",
content: `---
id: tgt-1
---
Content.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.edges.length).toBe(1);
const edge = result.edges[0];
if (!edge) throw new Error("Expected edge");
expect(edge.srcId).toBe("src-1");
expect(edge.dstId).toBe("tgt-1");
expect(edge.rawTarget).toBe("target#section|Alias");
expect(edge.canonicalType).toBe("caused_by");
});
it("creates edges with correct srcId and dstId", () => {
const fixtures: FileFixture[] = [
{
path: "a.md",
basename: "a",
content: `---
id: node-a
---
> [!edge] caused_by
> [[b]]
`,
},
{
path: "b.md",
basename: "b",
content: `---
id: node-b
---
Content.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.edges.length).toBe(1);
const edge = result.edges[0];
if (!edge) throw new Error("Expected edge");
expect(edge.srcId).toBe("node-a");
expect(edge.dstId).toBe("node-b");
expect(edge.srcPath).toBe("a.md");
expect(edge.dstPath).toBe("b.md");
});
it("warns about missing target file", () => {
const fixtures: FileFixture[] = [
{
path: "source.md",
basename: "source",
content: `---
id: src-1
---
> [!edge] caused_by
> [[missing]]
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.edges.length).toBe(0);
expect(result.warnings.missingTargetFile.length).toBe(1);
expect(result.warnings.missingTargetFile[0]?.target).toBe("missing");
});
it("warns about target file without id", () => {
const fixtures: FileFixture[] = [
{
path: "source.md",
basename: "source",
content: `---
id: src-1
---
> [!edge] caused_by
> [[target]]
`,
},
{
path: "target.md",
basename: "target",
content: `---
title: Target
---
Content without id.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
// Target file exists but has no ID, so it's added to basenameLowerToPath
// but not to filePathToId, so we get missingTargetId warning
expect(result.edges.length).toBe(0);
expect(result.warnings.missingTargetId.length).toBe(1);
const warning = result.warnings.missingTargetId[0];
if (!warning) throw new Error("Expected missingTargetId warning");
expect(warning.targetPath).toBe("target.md");
expect(warning.srcPath).toBe("source.md");
});
it("handles unknown edge types", () => {
const fixtures: FileFixture[] = [
{
path: "a.md",
basename: "a",
content: `---
id: node-a
---
> [!edge] unknown_type
> [[b]]
`,
},
{
path: "b.md",
basename: "b",
content: `---
id: node-b
---
Content.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.edges.length).toBe(1);
const edge = result.edges[0];
if (!edge) throw new Error("Expected edge");
expect(edge.rawType).toBe("unknown_type");
expect(edge.canonicalType).toBe(null);
expect(edge.inverseType).toBe(null);
});
it("handles aliases in edge types", () => {
const fixtures: FileFixture[] = [
{
path: "a.md",
basename: "a",
content: `---
id: node-a
---
> [!edge] wegen
> [[b]]
`,
},
{
path: "b.md",
basename: "b",
content: `---
id: node-b
---
Content.
`,
},
];
const result = buildGraphFromFixtures(fixtures, vocabulary);
expect(result.edges.length).toBe(1);
const edge = result.edges[0];
if (!edge) throw new Error("Expected edge");
expect(edge.rawType).toBe("wegen");
expect(edge.canonicalType).toBe("caused_by");
expect(edge.inverseType).toBe("resulted_in");
});
});

View File

@ -0,0 +1,116 @@
import { describe, it, expect } from "vitest";
import type { EdgeRecord } from "../../graph/types";
import { buildIndex } from "../../graph/GraphIndex";
describe("buildIndex", () => {
it("builds outgoing map correctly", () => {
const edges: EdgeRecord[] = [
{
srcId: "a",
dstId: "b",
rawType: "caused_by",
canonicalType: "caused_by",
inverseType: "resulted_in",
srcPath: "a.md",
dstPath: "b.md",
lineStart: 0,
lineEnd: 0,
rawTarget: "b",
},
{
srcId: "a",
dstId: "c",
rawType: "impacts",
canonicalType: "impacts",
inverseType: "impacted_by",
srcPath: "a.md",
dstPath: "c.md",
lineStart: 1,
lineEnd: 1,
rawTarget: "c",
},
];
const index = buildIndex(edges);
expect(index.outgoing.get("a")?.length).toBe(2);
const bOutgoing = index.outgoing.get("b");
expect(bOutgoing === undefined || bOutgoing.length === 0).toBe(true);
const cOutgoing = index.outgoing.get("c");
expect(cOutgoing === undefined || cOutgoing.length === 0).toBe(true);
});
it("builds incoming map correctly", () => {
const edges: EdgeRecord[] = [
{
srcId: "a",
dstId: "b",
rawType: "caused_by",
canonicalType: "caused_by",
inverseType: "resulted_in",
srcPath: "a.md",
dstPath: "b.md",
lineStart: 0,
lineEnd: 0,
rawTarget: "b",
},
{
srcId: "c",
dstId: "b",
rawType: "impacts",
canonicalType: "impacts",
inverseType: "impacted_by",
srcPath: "c.md",
dstPath: "b.md",
lineStart: 0,
lineEnd: 0,
rawTarget: "b",
},
];
const index = buildIndex(edges);
const aIncoming = index.incoming.get("a");
expect(aIncoming === undefined || aIncoming.length === 0).toBe(true);
expect(index.incoming.get("b")?.length).toBe(2);
const cIncoming = index.incoming.get("c");
expect(cIncoming === undefined || cIncoming.length === 0).toBe(true);
});
it("sorts edges deterministically", () => {
const edges: EdgeRecord[] = [
{
srcId: "a",
dstId: "z",
rawType: "caused_by",
canonicalType: "caused_by",
inverseType: null,
srcPath: "a.md",
dstPath: "z.md",
lineStart: 0,
lineEnd: 0,
rawTarget: "z",
},
{
srcId: "a",
dstId: "b",
rawType: "caused_by",
canonicalType: "caused_by",
inverseType: null,
srcPath: "a.md",
dstPath: "b.md",
lineStart: 0,
lineEnd: 0,
rawTarget: "b",
},
];
const index = buildIndex(edges);
const outgoing = index.outgoing.get("a");
if (!outgoing) throw new Error("Expected outgoing edges");
expect(outgoing.length).toBe(2);
expect(outgoing[0]?.dstId).toBe("b");
expect(outgoing[1]?.dstId).toBe("z");
});
});

View File

@ -0,0 +1,29 @@
import { describe, it, expect } from "vitest";
import { normalizeTargetToBasename } from "../../graph/resolveTarget";
describe("normalizeTargetToBasename", () => {
it("returns basename for simple target", () => {
expect(normalizeTargetToBasename("foo")).toBe("foo");
});
it("removes alias separator", () => {
expect(normalizeTargetToBasename("foo|bar")).toBe("foo");
});
it("removes heading separator", () => {
expect(normalizeTargetToBasename("foo#sec")).toBe("foo");
});
it("removes heading separator when alias is present", () => {
expect(normalizeTargetToBasename("foo#sec|bar")).toBe("foo");
});
it("removes alias separator when heading is in alias", () => {
expect(normalizeTargetToBasename("foo|bar#sec")).toBe("foo");
});
it("handles whitespace", () => {
expect(normalizeTargetToBasename(" foo ")).toBe("foo");
expect(normalizeTargetToBasename("foo | bar")).toBe("foo");
});
});

View File

@ -0,0 +1,181 @@
import { describe, it, expect } from "vitest";
import type { EdgeRecord } from "../../graph/types";
import { buildIndex } from "../../graph/GraphIndex";
import { traverseForward, traverseBackward } from "../../graph/traverse";
function createEdge(
srcId: string,
dstId: string,
rawType: string,
canonicalType: string | null
): EdgeRecord {
return {
srcId,
dstId,
rawType,
canonicalType,
inverseType: null,
srcPath: `${srcId}.md`,
dstPath: `${dstId}.md`,
lineStart: 0,
lineEnd: 0,
rawTarget: dstId,
};
}
describe("traverseForward", () => {
it("traverses simple chain", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("b", "c", "caused_by", "caused_by"),
];
const index = buildIndex(edges);
const paths = traverseForward(index, "a", 3, 200);
expect(paths.length).toBeGreaterThan(0);
const path = paths[0];
if (!path) throw new Error("Expected path");
expect(path.nodes).toContain("a");
expect(path.nodes).toContain("b");
expect(path.nodes).toContain("c");
});
it("respects maxHops", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("b", "c", "caused_by", "caused_by"),
createEdge("c", "d", "caused_by", "caused_by"),
createEdge("d", "e", "caused_by", "caused_by"),
];
const index = buildIndex(edges);
const paths = traverseForward(index, "a", 2, 200);
for (const path of paths) {
expect(path.nodes.length).toBeLessThanOrEqual(3); // maxHops + 1
expect(path.edges.length).toBeLessThanOrEqual(2); // maxHops
}
});
it("avoids cycles", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("b", "a", "caused_by", "caused_by"), // cycle
];
const index = buildIndex(edges);
const paths = traverseForward(index, "a", 5, 200);
// Should not have paths longer than 2 nodes (a -> b, but not back to a)
for (const path of paths) {
const nodeCounts = new Map<string, number>();
for (const nodeId of path.nodes) {
nodeCounts.set(nodeId, (nodeCounts.get(nodeId) || 0) + 1);
}
// Each node should appear at most once
for (const count of nodeCounts.values()) {
expect(count).toBe(1);
}
}
});
it("ignores edges with null canonicalType", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "unknown", null),
createEdge("a", "c", "caused_by", "caused_by"),
];
const index = buildIndex(edges);
const paths = traverseForward(index, "a", 3, 200);
// Should only find path to c, not b
const hasPathToB = paths.some(p => p.nodes.includes("b"));
const hasPathToC = paths.some(p => p.nodes.includes("c"));
expect(hasPathToB).toBe(false);
expect(hasPathToC).toBe(true);
});
it("filters by allowedCanonicals", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("a", "c", "impacts", "impacts"),
];
const index = buildIndex(edges);
const allowed = new Set<string>(["caused_by"]);
const paths = traverseForward(index, "a", 3, 200, allowed);
// Should only find path to b, not c
const hasPathToB = paths.some(p => p.nodes.includes("b"));
const hasPathToC = paths.some(p => p.nodes.includes("c"));
expect(hasPathToB).toBe(true);
expect(hasPathToC).toBe(false);
});
it("respects maxPaths limit", () => {
// Create a star graph with many outgoing edges
const edges: EdgeRecord[] = [];
for (let i = 0; i < 100; i++) {
edges.push(createEdge("a", `b${i}`, "caused_by", "caused_by"));
}
const index = buildIndex(edges);
const paths = traverseForward(index, "a", 3, 50);
expect(paths.length).toBeLessThanOrEqual(50);
});
});
describe("traverseBackward", () => {
it("traverses backward chain", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("c", "b", "caused_by", "caused_by"),
];
const index = buildIndex(edges);
const paths = traverseBackward(index, "b", 3, 200);
expect(paths.length).toBeGreaterThan(0);
const hasPathFromA = paths.some(p => p.nodes.includes("a"));
const hasPathFromC = paths.some(p => p.nodes.includes("c"));
expect(hasPathFromA).toBe(true);
expect(hasPathFromC).toBe(true);
});
it("respects maxHops in backward traversal", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("c", "a", "caused_by", "caused_by"),
createEdge("d", "c", "caused_by", "caused_by"),
];
const index = buildIndex(edges);
const paths = traverseBackward(index, "b", 2, 200);
for (const path of paths) {
expect(path.nodes.length).toBeLessThanOrEqual(3); // maxHops + 1
}
});
it("avoids cycles in backward traversal", () => {
const edges: EdgeRecord[] = [
createEdge("a", "b", "caused_by", "caused_by"),
createEdge("b", "a", "caused_by", "caused_by"), // cycle
];
const index = buildIndex(edges);
const paths = traverseBackward(index, "a", 5, 200);
for (const path of paths) {
const nodeCounts = new Map<string, number>();
for (const nodeId of path.nodes) {
nodeCounts.set(nodeId, (nodeCounts.get(nodeId) || 0) + 1);
}
for (const count of nodeCounts.values()) {
expect(count).toBe(1);
}
}
});
});

View File

@ -1,10 +1,10 @@
import { describe, it, expect } from "vitest";
import type { ParsedEdge } from "../../parser/types";
import { lintEdges } from "../../lint/LintEngine";
import { lintParsedEdges } from "../../lint/LintEngine";
import { Vocabulary } from "../../vocab/Vocabulary";
import { parseEdgeVocabulary } from "../../vocab/parseEdgeVocabulary";
describe("lintEdges", () => {
describe("lintParsedEdges", () => {
// Create a minimal vocabulary for testing
const vocabMd = `
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
@ -25,53 +25,33 @@ describe("lintEdges", () => {
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(1);
const finding = findings[0];
if (!finding) throw new Error("Expected finding");
expect(finding.ruleId).toBe("unknown_edge_type");
expect(findings.length).toBe(2); // unknown_edge_type + edge_without_target
const finding = findings.find(f => f.ruleId === "unknown_edge_type");
if (!finding) throw new Error("Expected unknown_edge_type finding");
expect(finding.severity).toBe("ERROR");
expect(finding.message).toContain("unknown_type");
});
it("reports alias not normalized as WARN", () => {
it("does not report known aliases as errors or warnings", () => {
const edges: ParsedEdge[] = [
{
rawType: "wegen",
targets: [],
rawType: "wegen", // alias, not canonical
targets: ["SomeNote"],
lineStart: 5,
lineEnd: 5,
},
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
const existingFiles = new Set<string>(["SomeNote.md"]);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(1);
const finding = findings[0];
if (!finding) throw new Error("Expected finding");
expect(finding.ruleId).toBe("alias_not_normalized");
expect(finding.severity).toBe("WARN");
expect(finding.message).toContain("wegen");
expect(finding.message).toContain("caused_by");
expect(finding.lineStart).toBe(5);
});
it("does not report normalized canonical types", () => {
const edges: ParsedEdge[] = [
{
rawType: "caused_by",
targets: [],
lineStart: 0,
lineEnd: 0,
},
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(0);
// Should not have unknown_edge_type or alias_not_normalized
const unknownFinding = findings.find(f => f.ruleId === "unknown_edge_type");
expect(unknownFinding).toBeUndefined();
const aliasFinding = findings.find(f => f.ruleId === "alias_not_normalized");
expect(aliasFinding).toBeUndefined();
});
it("reports missing target notes as WARN", () => {
@ -85,17 +65,54 @@ describe("lintEdges", () => {
];
const existingFiles = new Set<string>(["ExistingNote.md", "ExistingNote"]);
const findings = lintEdges(edges, vocabulary, existingFiles);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(1);
const finding = findings[0];
if (!finding) throw new Error("Expected finding");
expect(finding.ruleId).toBe("missing_target_note");
const finding = findings.find(f => f.ruleId === "missing_target_note");
if (!finding) throw new Error("Expected missing_target_note finding");
expect(finding.severity).toBe("WARN");
expect(finding.message).toContain("MissingNote");
expect(finding.evidence).toBe("MissingNote");
});
it("reports missing targets even for unknown edge types", () => {
const edges: ParsedEdge[] = [
{
rawType: "unknown_type",
targets: ["MissingNote"],
lineStart: 0,
lineEnd: 0,
},
];
const existingFiles = new Set<string>();
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
const unknownFinding = findings.find(f => f.ruleId === "unknown_edge_type");
expect(unknownFinding).toBeDefined();
const missingFinding = findings.find(f => f.ruleId === "missing_target_note");
expect(missingFinding).toBeDefined();
});
it("reports edge without targets as WARN", () => {
const edges: ParsedEdge[] = [
{
rawType: "caused_by",
targets: [],
lineStart: 0,
lineEnd: 0,
},
];
const existingFiles = new Set<string>();
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
const finding = findings.find(f => f.ruleId === "edge_without_target");
if (!finding) throw new Error("Expected edge_without_target finding");
expect(finding.severity).toBe("WARN");
expect(finding.message).toContain("caused_by");
expect(finding.message).toContain("no target notes");
});
it("handles target notes with headings", () => {
const edges: ParsedEdge[] = [
{
@ -107,9 +124,10 @@ describe("lintEdges", () => {
];
const existingFiles = new Set<string>(["Note.md"]);
const findings = lintEdges(edges, vocabulary, existingFiles);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(0);
const missingFinding = findings.find(f => f.ruleId === "missing_target_note");
expect(missingFinding).toBeUndefined();
});
it("handles target notes without .md extension", () => {
@ -123,67 +141,47 @@ describe("lintEdges", () => {
];
const existingFiles = new Set<string>(["NoteName.md"]);
const findings = lintEdges(edges, vocabulary, existingFiles);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(0);
const missingFinding = findings.find(f => f.ruleId === "missing_target_note");
expect(missingFinding).toBeUndefined();
});
it("handles multiple issues in one edge", () => {
it("shows canonical hints when option enabled", () => {
const edges: ParsedEdge[] = [
{
rawType: "wegen", // alias not normalized
targets: ["MissingNote"], // missing target
lineStart: 10,
lineEnd: 12,
},
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
expect(findings.length).toBe(2);
const finding0 = findings[0];
const finding1 = findings[1];
if (!finding0 || !finding1) throw new Error("Expected findings");
expect(finding0.ruleId).toBe("alias_not_normalized");
expect(finding1.ruleId).toBe("missing_target_note");
});
it("handles case-insensitive alias normalization", () => {
const edges: ParsedEdge[] = [
{
rawType: "WEGEN", // uppercase alias
targets: [],
rawType: "wegen", // alias
targets: ["SomeNote"],
lineStart: 0,
lineEnd: 0,
},
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
const existingFiles = new Set<string>(["SomeNote.md"]);
const findings = lintParsedEdges(edges, vocabulary, existingFiles, { showCanonicalHints: true });
expect(findings.length).toBe(1);
const finding = findings[0];
if (!finding) throw new Error("Expected finding");
expect(finding.ruleId).toBe("alias_not_normalized");
expect(finding.message).toContain("WEGEN");
expect(finding.message).toContain("caused_by");
const hintFinding = findings.find(f => f.ruleId === "canonical_hint");
if (!hintFinding) throw new Error("Expected canonical_hint finding");
expect(hintFinding.severity).toBe("INFO");
expect(hintFinding.message).toContain("wegen");
expect(hintFinding.message).toContain("caused_by");
});
it("handles empty targets array", () => {
it("does not show canonical hints when option disabled", () => {
const edges: ParsedEdge[] = [
{
rawType: "caused_by",
targets: [],
rawType: "wegen",
targets: ["SomeNote"],
lineStart: 0,
lineEnd: 0,
},
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
const existingFiles = new Set<string>(["SomeNote.md"]);
const findings = lintParsedEdges(edges, vocabulary, existingFiles, { showCanonicalHints: false });
expect(findings.length).toBe(0);
const hintFinding = findings.find(f => f.ruleId === "canonical_hint");
expect(hintFinding).toBeUndefined();
});
it("preserves line numbers in findings", () => {
@ -197,9 +195,9 @@ describe("lintEdges", () => {
];
const existingFiles = new Set<string>();
const findings = lintEdges(edges, vocabulary, existingFiles);
const findings = lintParsedEdges(edges, vocabulary, existingFiles);
const finding = findings[0];
const finding = findings.find(f => f.ruleId === "unknown_edge_type");
if (!finding) throw new Error("Expected finding");
expect(finding.lineStart).toBe(42);
expect(finding.lineEnd).toBe(45);

View File

@ -0,0 +1,95 @@
import { describe, it, expect } from "vitest";
import { extractFrontmatterId } from "../../parser/parseFrontmatter";
describe("extractFrontmatterId", () => {
it("extracts id from frontmatter", () => {
const md = `---
id: test-id
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("test-id");
});
it("returns null when no frontmatter", () => {
const md = `Content without frontmatter.`;
expect(extractFrontmatterId(md)).toBe(null);
});
it("returns null when no id field", () => {
const md = `---
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe(null);
});
it("converts numeric id to string", () => {
const md = `---
id: 123
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("123");
});
it("handles quoted id", () => {
const md = `---
id: "quoted-id"
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("quoted-id");
});
it("handles single-quoted id", () => {
const md = `---
id: 'single-quoted'
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("single-quoted");
});
it("handles id with colon in value", () => {
const md = `---
id: "id:with:colons"
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("id:with:colons");
});
it("returns null when frontmatter not closed", () => {
const md = `---
id: test-id
title: Test
Content here.
`;
expect(extractFrontmatterId(md)).toBe(null);
});
it("handles id without space after colon", () => {
const md = `---
id:test-id
title: Test
---
Content here.
`;
expect(extractFrontmatterId(md)).toBe("test-id");
});
});

View File

@ -95,5 +95,36 @@ export class MindnetSettingTab extends PluginSettingTab {
}
})
);
// Show canonical hints toggle
new Setting(containerEl)
.setName("Show canonical hints")
.setDesc("Show INFO findings with canonical edge type resolution in lint results")
.addToggle((toggle) =>
toggle
.setValue(this.plugin.settings.showCanonicalHints)
.onChange(async (value) => {
this.plugin.settings.showCanonicalHints = value;
await this.plugin.saveSettings();
})
);
// Chain direction dropdown
new Setting(containerEl)
.setName("Chain direction")
.setDesc("Direction for chain traversal: forward, backward, or both")
.addDropdown((dropdown) =>
dropdown
.addOption("forward", "Forward")
.addOption("backward", "Backward")
.addOption("both", "Both")
.setValue(this.plugin.settings.chainDirection)
.onChange(async (value) => {
if (value === "forward" || value === "backward" || value === "both") {
this.plugin.settings.chainDirection = value;
await this.plugin.saveSettings();
}
})
);
}
}