Refactor plugin to Mindnet Causal Assistant with enhanced vocabulary management and linting features. Updated manifest and package files to reflect new plugin details and dependencies. Added commands for reloading vocabulary and validating notes.
This commit is contained in:
parent
dc2fa22c4d
commit
9b8550c387
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"id": "sample-plugin",
|
||||
"name": "Sample Plugin",
|
||||
"id": "mindnet-causal-assistant",
|
||||
"name": "Mindnet Causal Assistant",
|
||||
"version": "1.0.0",
|
||||
"minAppVersion": "0.15.0",
|
||||
"description": "Demonstrates some of the capabilities of the Obsidian API.",
|
||||
"author": "Obsidian",
|
||||
"description": "Linting, chain explorer and guided authoring for Mindnet causal graphs.",
|
||||
"author": "Lars Stommer",
|
||||
"authorUrl": "https://obsidian.md",
|
||||
"fundingUrl": "https://obsidian.md/pricing",
|
||||
"isDesktopOnly": false
|
||||
"isDesktopOnly": true
|
||||
}
|
||||
|
|
|
|||
1973
package-lock.json
generated
1973
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
|
|
@ -8,7 +8,10 @@
|
|||
"dev": "node esbuild.config.mjs",
|
||||
"build": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
|
||||
"version": "node version-bump.mjs && git add manifest.json versions.json",
|
||||
"lint": "eslint ."
|
||||
"deploy:local": "powershell -ExecutionPolicy Bypass -File scripts/deploy-local.ps1",
|
||||
"build:deploy": "npm run build && npm run deploy:local",
|
||||
"lint": "eslint .",
|
||||
"test": "vitest run"
|
||||
},
|
||||
"keywords": [],
|
||||
"license": "0-BSD",
|
||||
|
|
@ -21,7 +24,8 @@
|
|||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "8.35.1",
|
||||
"@eslint/js": "9.30.1",
|
||||
"jiti": "2.6.1"
|
||||
"jiti": "2.6.1",
|
||||
"vitest": "^1.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"obsidian": "latest"
|
||||
|
|
|
|||
16
scripts/deploy-local.ps1
Normal file
16
scripts/deploy-local.ps1
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
param(
|
||||
[string]$VaultPath = "\\nashome\mindnet\vault\mindnet_dev",
|
||||
[string]$PluginId = "mindnet-causal-assistant"
|
||||
)
|
||||
|
||||
$pluginDir = Join-Path $VaultPath ".obsidian\plugins\$PluginId"
|
||||
New-Item -ItemType Directory -Force -Path $pluginDir | Out-Null
|
||||
|
||||
Copy-Item -Path ".\manifest.json" -Destination $pluginDir -Force
|
||||
Copy-Item -Path ".\main.js" -Destination $pluginDir -Force
|
||||
|
||||
if (Test-Path ".\styles.css") {
|
||||
Copy-Item -Path ".\styles.css" -Destination $pluginDir -Force
|
||||
}
|
||||
|
||||
Write-Host "Deployed to $pluginDir"
|
||||
25
src/__mocks__/obsidian.ts
Normal file
25
src/__mocks__/obsidian.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
// Mock for obsidian API in tests
|
||||
export class Notice {
|
||||
constructor(public message: string) {}
|
||||
}
|
||||
|
||||
export interface App {
|
||||
vault: Vault;
|
||||
workspace: Workspace;
|
||||
}
|
||||
|
||||
export interface Vault {
|
||||
read(file: TFile): Promise<string>;
|
||||
modify(file: TFile, content: string): Promise<void>;
|
||||
getMarkdownFiles(): TFile[];
|
||||
}
|
||||
|
||||
export interface Workspace {
|
||||
getActiveFile(): TFile | null;
|
||||
}
|
||||
|
||||
export interface TFile {
|
||||
path: string;
|
||||
name: string;
|
||||
extension: string;
|
||||
}
|
||||
0
src/graph/GraphIndex.ts
Normal file
0
src/graph/GraphIndex.ts
Normal file
0
src/graph/traverse.ts
Normal file
0
src/graph/traverse.ts
Normal file
229
src/lint/LintEngine.ts
Normal file
229
src/lint/LintEngine.ts
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
import type { App, TFile } from "obsidian";
|
||||
import type { ParsedEdge } from "../parser/types";
|
||||
import type { Vocabulary } from "../vocab/Vocabulary";
|
||||
import type { Finding, QuickFix } from "./types";
|
||||
import { parseEdgesFromCallouts } from "../parser/parseEdgesFromCallouts";
|
||||
|
||||
const EDGE_HEADER_RE = /^\s*(>+)\s*\[!edge\]\s*(.+?)\s*$/i;
|
||||
|
||||
/**
|
||||
* Pure function to lint parsed edges against vocabulary and file existence.
|
||||
* This can be tested independently.
|
||||
*/
|
||||
export function lintEdges(
|
||||
parsedEdges: ParsedEdge[],
|
||||
vocabulary: Vocabulary,
|
||||
existingFilesSet: Set<string>
|
||||
): Finding[] {
|
||||
const findings: Finding[] = [];
|
||||
|
||||
for (const edge of parsedEdges) {
|
||||
const normalized = vocabulary.normalize(edge.rawType);
|
||||
|
||||
// Check for unknown edge type
|
||||
if (normalized.canonical === null) {
|
||||
findings.push({
|
||||
ruleId: "unknown_edge_type",
|
||||
severity: "ERROR",
|
||||
message: `Unknown edge type: "${edge.rawType}"`,
|
||||
filePath: "", // Will be set by caller
|
||||
lineStart: edge.lineStart,
|
||||
lineEnd: edge.lineEnd,
|
||||
evidence: edge.rawType,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for alias not normalized
|
||||
const rawLower = edge.rawType.trim().toLowerCase();
|
||||
const canonicalLower = normalized.canonical.toLowerCase();
|
||||
if (rawLower !== canonicalLower) {
|
||||
findings.push({
|
||||
ruleId: "alias_not_normalized",
|
||||
severity: "WARN",
|
||||
message: `Edge type "${edge.rawType}" should be normalized to "${normalized.canonical}"`,
|
||||
filePath: "", // Will be set by caller
|
||||
lineStart: edge.lineStart,
|
||||
lineEnd: edge.lineStart,
|
||||
evidence: edge.rawType,
|
||||
quickFixes: [], // Will be populated by caller with file context
|
||||
});
|
||||
}
|
||||
|
||||
// Check for missing target notes
|
||||
for (const target of edge.targets) {
|
||||
if (!target) continue;
|
||||
|
||||
// Check if file exists (exact match to markdown file name)
|
||||
// Target might be "Note Name" or "Note Name#heading" - we check the base name
|
||||
const parts = target.split("#");
|
||||
const firstPart = parts[0];
|
||||
if (!firstPart) continue;
|
||||
|
||||
const baseName = firstPart.trim();
|
||||
if (!baseName) continue;
|
||||
|
||||
const markdownFileName = baseName.endsWith(".md") ? baseName : `${baseName}.md`;
|
||||
|
||||
if (!existingFilesSet.has(markdownFileName) && !existingFilesSet.has(baseName)) {
|
||||
findings.push({
|
||||
ruleId: "missing_target_note",
|
||||
severity: "WARN",
|
||||
message: `Target note "${target}" not found in vault`,
|
||||
filePath: "", // Will be set by caller
|
||||
lineStart: edge.lineStart,
|
||||
lineEnd: edge.lineEnd,
|
||||
evidence: target,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
/**
|
||||
* LintEngine for validating markdown notes containing edge definitions.
|
||||
*/
|
||||
export class LintEngine {
|
||||
/**
|
||||
* Lint the currently active markdown note.
|
||||
*/
|
||||
static async lintCurrentNote(
|
||||
app: App,
|
||||
vocabulary: Vocabulary
|
||||
): Promise<Finding[]> {
|
||||
const activeFile = app.workspace.getActiveFile();
|
||||
|
||||
if (!activeFile) {
|
||||
throw new Error("No active file");
|
||||
}
|
||||
|
||||
if (activeFile.extension !== "md") {
|
||||
throw new Error("Active file is not a markdown file");
|
||||
}
|
||||
|
||||
// Read file content
|
||||
const content = await app.vault.read(activeFile);
|
||||
|
||||
// Parse edges
|
||||
const parsedEdges = parseEdgesFromCallouts(content);
|
||||
|
||||
// Build set of existing markdown files in vault
|
||||
const existingFilesSet = new Set<string>();
|
||||
const markdownFiles = app.vault.getMarkdownFiles();
|
||||
for (const file of markdownFiles) {
|
||||
existingFilesSet.add(file.name);
|
||||
// Also add without .md extension for matching
|
||||
if (file.name.endsWith(".md")) {
|
||||
const baseName = file.name.slice(0, -3);
|
||||
existingFilesSet.add(baseName);
|
||||
}
|
||||
}
|
||||
|
||||
// Run pure linting logic
|
||||
const findings = lintEdges(parsedEdges, vocabulary, existingFilesSet);
|
||||
|
||||
// Set filePath and add quickfixes
|
||||
const filePath = activeFile.path;
|
||||
const lines = content.split(/\r?\n/);
|
||||
|
||||
for (const finding of findings) {
|
||||
finding.filePath = filePath;
|
||||
|
||||
// Add quickfix for alias_not_normalized
|
||||
if (finding.ruleId === "alias_not_normalized" && finding.lineStart !== undefined) {
|
||||
const lineIndex = finding.lineStart;
|
||||
const line = lines[lineIndex];
|
||||
|
||||
if (line) {
|
||||
const normalized = vocabulary.normalize(finding.evidence || "");
|
||||
if (normalized.canonical) {
|
||||
finding.quickFixes = [
|
||||
createNormalizeQuickFix(
|
||||
app,
|
||||
activeFile,
|
||||
content,
|
||||
lineIndex,
|
||||
finding.evidence || "",
|
||||
normalized.canonical
|
||||
),
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a quickfix that normalizes an edge type in the file.
|
||||
*/
|
||||
function createNormalizeQuickFix(
|
||||
app: App,
|
||||
file: TFile,
|
||||
currentContent: string,
|
||||
lineIndex: number,
|
||||
rawType: string,
|
||||
canonical: string
|
||||
): QuickFix {
|
||||
return {
|
||||
id: "normalize_edge_type",
|
||||
title: `Normalize to "${canonical}"`,
|
||||
apply: async () => {
|
||||
const { Notice } = await import("obsidian");
|
||||
const lines = currentContent.split(/\r?\n/);
|
||||
const line = lines[lineIndex];
|
||||
|
||||
if (!line) {
|
||||
new Notice("Line not found");
|
||||
return;
|
||||
}
|
||||
|
||||
// Match the edge header pattern
|
||||
const match = line.match(EDGE_HEADER_RE);
|
||||
if (!match || !match[2]) {
|
||||
new Notice("Edge header pattern not found on line");
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the position of the raw type in the line
|
||||
// match[2] is the captured type, but we need to find where it appears in the original line
|
||||
const edgeMarker = "[!edge]";
|
||||
const edgeIndex = line.indexOf(edgeMarker);
|
||||
if (edgeIndex === -1) {
|
||||
new Notice("Edge marker not found on line");
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the type after [!edge]
|
||||
const afterEdge = line.substring(edgeIndex + edgeMarker.length);
|
||||
const typeMatch = afterEdge.match(/^\s+(\S+)/);
|
||||
if (!typeMatch || typeMatch[1] !== rawType.trim()) {
|
||||
new Notice("Type token not found at expected position");
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace the raw type with canonical
|
||||
const beforeType = line.substring(0, edgeIndex + edgeMarker.length + typeMatch[0].indexOf(typeMatch[1]));
|
||||
const afterType = line.substring(beforeType.length + typeMatch[1].length);
|
||||
const newLine = beforeType + canonical + afterType;
|
||||
|
||||
// Safety check: verify the new line still matches the pattern
|
||||
const verifyMatch = newLine.match(EDGE_HEADER_RE);
|
||||
if (!verifyMatch) {
|
||||
new Notice("Quickfix would produce invalid line - skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the line
|
||||
lines[lineIndex] = newLine;
|
||||
const newContent = lines.join("\n");
|
||||
|
||||
// Write back to file
|
||||
await app.vault.modify(file, newContent);
|
||||
},
|
||||
};
|
||||
}
|
||||
0
src/lint/index.ts
Normal file
0
src/lint/index.ts
Normal file
0
src/lint/rules/index.ts
Normal file
0
src/lint/rules/index.ts
Normal file
0
src/lint/rules/rule_hub_has_causality.ts
Normal file
0
src/lint/rules/rule_hub_has_causality.ts
Normal file
0
src/lint/rules/rule_missing_target.ts
Normal file
0
src/lint/rules/rule_missing_target.ts
Normal file
0
src/lint/rules/rule_unkown_edge.ts
Normal file
0
src/lint/rules/rule_unkown_edge.ts
Normal file
18
src/lint/types.ts
Normal file
18
src/lint/types.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
export type Severity = "ERROR" | "WARN" | "INFO";
|
||||
|
||||
export interface QuickFix {
|
||||
id: string;
|
||||
title: string;
|
||||
apply: () => Promise<void>;
|
||||
}
|
||||
|
||||
export interface Finding {
|
||||
ruleId: string;
|
||||
severity: Severity;
|
||||
message: string;
|
||||
filePath: string;
|
||||
lineStart?: number;
|
||||
lineEnd?: number;
|
||||
evidence?: string;
|
||||
quickFixes?: QuickFix[];
|
||||
}
|
||||
245
src/main.ts
245
src/main.ts
|
|
@ -1,99 +1,176 @@
|
|||
import {App, Editor, MarkdownView, Modal, Notice, Plugin} from 'obsidian';
|
||||
import {DEFAULT_SETTINGS, MyPluginSettings, SampleSettingTab} from "./settings";
|
||||
import { Notice, Plugin, TFile } from "obsidian";
|
||||
import { DEFAULT_SETTINGS, type MindnetSettings, normalizeVaultPath } from "./settings";
|
||||
import { VocabularyLoader } from "./vocab/VocabularyLoader";
|
||||
import { parseEdgeVocabulary } from "./vocab/parseEdgeVocabulary";
|
||||
import { Vocabulary } from "./vocab/Vocabulary";
|
||||
import { LintEngine } from "./lint/LintEngine";
|
||||
import { MindnetSettingTab } from "./ui/MindnetSettingTab";
|
||||
|
||||
// Remember to rename these classes and interfaces!
|
||||
export default class MindnetCausalAssistantPlugin extends Plugin {
|
||||
settings: MindnetSettings;
|
||||
private vocabulary: Vocabulary | null = null;
|
||||
private reloadDebounceTimer: number | null = null;
|
||||
|
||||
export default class MyPlugin extends Plugin {
|
||||
settings: MyPluginSettings;
|
||||
async onload(): Promise<void> {
|
||||
await this.loadSettings();
|
||||
|
||||
async onload() {
|
||||
await this.loadSettings();
|
||||
// Add settings tab
|
||||
this.addSettingTab(new MindnetSettingTab(this.app, this));
|
||||
|
||||
// This creates an icon in the left ribbon.
|
||||
this.addRibbonIcon('dice', 'Sample', (evt: MouseEvent) => {
|
||||
// Called when the user clicks the icon.
|
||||
new Notice('This is a notice!');
|
||||
});
|
||||
// Register live reload for edge vocabulary file
|
||||
this.registerEvent(
|
||||
this.app.vault.on("modify", async (file: TFile) => {
|
||||
const normalizedFilePath = normalizeVaultPath(file.path);
|
||||
const normalizedVocabPath = normalizeVaultPath(this.settings.edgeVocabularyPath);
|
||||
|
||||
// Check if modified file matches vocabulary path (exact match or ends with)
|
||||
if (normalizedFilePath === normalizedVocabPath ||
|
||||
normalizedFilePath === `/${normalizedVocabPath}` ||
|
||||
normalizedFilePath.endsWith(`/${normalizedVocabPath}`)) {
|
||||
// Debounce reload to avoid multiple rapid reloads
|
||||
if (this.reloadDebounceTimer !== null) {
|
||||
window.clearTimeout(this.reloadDebounceTimer);
|
||||
}
|
||||
|
||||
this.reloadDebounceTimer = window.setTimeout(async () => {
|
||||
await this.reloadVocabulary();
|
||||
this.reloadDebounceTimer = null;
|
||||
}, 200);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// This adds a status bar item to the bottom of the app. Does not work on mobile apps.
|
||||
const statusBarItemEl = this.addStatusBarItem();
|
||||
statusBarItemEl.setText('Status bar text');
|
||||
this.addCommand({
|
||||
id: "mindnet-reload-edge-vocabulary",
|
||||
name: "Mindnet: Reload edge vocabulary",
|
||||
callback: async () => {
|
||||
await this.reloadVocabulary();
|
||||
},
|
||||
});
|
||||
|
||||
// This adds a simple command that can be triggered anywhere
|
||||
this.addCommand({
|
||||
id: 'open-modal-simple',
|
||||
name: 'Open modal (simple)',
|
||||
callback: () => {
|
||||
new SampleModal(this.app).open();
|
||||
}
|
||||
});
|
||||
// This adds an editor command that can perform some operation on the current editor instance
|
||||
this.addCommand({
|
||||
id: 'replace-selected',
|
||||
name: 'Replace selected content',
|
||||
editorCallback: (editor: Editor, view: MarkdownView) => {
|
||||
editor.replaceSelection('Sample editor command');
|
||||
}
|
||||
});
|
||||
// This adds a complex command that can check whether the current state of the app allows execution of the command
|
||||
this.addCommand({
|
||||
id: 'open-modal-complex',
|
||||
name: 'Open modal (complex)',
|
||||
checkCallback: (checking: boolean) => {
|
||||
// Conditions to check
|
||||
const markdownView = this.app.workspace.getActiveViewOfType(MarkdownView);
|
||||
if (markdownView) {
|
||||
// If checking is true, we're simply "checking" if the command can be run.
|
||||
// If checking is false, then we want to actually perform the operation.
|
||||
if (!checking) {
|
||||
new SampleModal(this.app).open();
|
||||
}
|
||||
this.addCommand({
|
||||
id: "mindnet-validate-current-note",
|
||||
name: "Mindnet: Validate current note",
|
||||
callback: async () => {
|
||||
try {
|
||||
const vocabulary = await this.ensureVocabularyLoaded();
|
||||
if (!vocabulary) {
|
||||
return;
|
||||
}
|
||||
|
||||
// This command will only show up in Command Palette when the check function returns true
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
});
|
||||
const findings = await LintEngine.lintCurrentNote(this.app, vocabulary);
|
||||
|
||||
// This adds a settings tab so the user can configure various aspects of the plugin
|
||||
this.addSettingTab(new SampleSettingTab(this.app, this));
|
||||
// Count findings by severity
|
||||
const errorCount = findings.filter(f => f.severity === "ERROR").length;
|
||||
const warnCount = findings.filter(f => f.severity === "WARN").length;
|
||||
const infoCount = findings.filter(f => f.severity === "INFO").length;
|
||||
|
||||
// If the plugin hooks up any global DOM events (on parts of the app that doesn't belong to this plugin)
|
||||
// Using this function will automatically remove the event listener when this plugin is disabled.
|
||||
this.registerDomEvent(document, 'click', (evt: MouseEvent) => {
|
||||
new Notice("Click");
|
||||
});
|
||||
// Show summary notice
|
||||
new Notice(`Lint: ${errorCount} errors, ${warnCount} warnings, ${infoCount} info`);
|
||||
|
||||
// When registering intervals, this function will automatically clear the interval when the plugin is disabled.
|
||||
this.registerInterval(window.setInterval(() => console.log('setInterval'), 5 * 60 * 1000));
|
||||
// Log findings to console
|
||||
console.log("=== Lint Findings ===");
|
||||
for (const finding of findings) {
|
||||
const quickfixInfo = finding.quickFixes && finding.quickFixes.length > 0
|
||||
? ` [QuickFix: ${finding.quickFixes.map(qf => qf.title).join(", ")}]`
|
||||
: "";
|
||||
console.log(`[${finding.severity}] ${finding.ruleId}: ${finding.message} (${finding.filePath}:${finding.lineStart}${quickfixInfo})`);
|
||||
}
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
new Notice(`Failed to validate note: ${msg}`);
|
||||
console.error(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
onunload(): void {
|
||||
// nothing yet
|
||||
}
|
||||
|
||||
onunload() {
|
||||
}
|
||||
private async loadSettings(): Promise<void> {
|
||||
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
|
||||
}
|
||||
|
||||
async loadSettings() {
|
||||
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData() as Partial<MyPluginSettings>);
|
||||
}
|
||||
async saveSettings(): Promise<void> {
|
||||
await this.saveData(this.settings);
|
||||
}
|
||||
|
||||
async saveSettings() {
|
||||
await this.saveData(this.settings);
|
||||
}
|
||||
}
|
||||
|
||||
class SampleModal extends Modal {
|
||||
constructor(app: App) {
|
||||
super(app);
|
||||
}
|
||||
|
||||
onOpen() {
|
||||
let {contentEl} = this;
|
||||
contentEl.setText('Woah!');
|
||||
}
|
||||
|
||||
onClose() {
|
||||
const {contentEl} = this;
|
||||
contentEl.empty();
|
||||
}
|
||||
/**
|
||||
* Ensure vocabulary is loaded. Auto-loads if not present.
|
||||
* Returns Vocabulary instance or null on failure.
|
||||
*/
|
||||
private async ensureVocabularyLoaded(): Promise<Vocabulary | null> {
|
||||
if (this.vocabulary) {
|
||||
return this.vocabulary;
|
||||
}
|
||||
|
||||
try {
|
||||
const text = await VocabularyLoader.loadText(
|
||||
this.app,
|
||||
this.settings.edgeVocabularyPath
|
||||
);
|
||||
|
||||
const parsed = parseEdgeVocabulary(text);
|
||||
this.vocabulary = new Vocabulary(parsed);
|
||||
|
||||
const stats = this.vocabulary.getStats();
|
||||
console.log("Vocabulary auto-loaded", stats);
|
||||
return this.vocabulary;
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
if (msg.includes("not found") || msg.includes("Vocabulary file not found")) {
|
||||
new Notice("edge_vocabulary.md not found. Check the path in plugin settings.");
|
||||
} else {
|
||||
new Notice(`Failed to load vocabulary: ${msg}. Check plugin settings.`);
|
||||
}
|
||||
console.error("Failed to load vocabulary:", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reload vocabulary from file. Used by manual command and live reload.
|
||||
*/
|
||||
private async reloadVocabulary(): Promise<void> {
|
||||
try {
|
||||
const text = await VocabularyLoader.loadText(
|
||||
this.app,
|
||||
this.settings.edgeVocabularyPath
|
||||
);
|
||||
|
||||
const parsed = parseEdgeVocabulary(text);
|
||||
this.vocabulary = new Vocabulary(parsed);
|
||||
|
||||
const stats = this.vocabulary.getStats();
|
||||
console.log("Vocabulary loaded", stats);
|
||||
new Notice(`Edge vocabulary reloaded: ${stats.canonicalCount} canonical, ${stats.aliasCount} aliases`);
|
||||
|
||||
// Log normalization examples
|
||||
if (stats.canonicalCount > 0) {
|
||||
const firstCanonical = Array.from(parsed.byCanonical.keys())[0];
|
||||
if (firstCanonical) {
|
||||
const canonicalNorm = this.vocabulary.normalize(firstCanonical);
|
||||
console.log(`Normalization example (canonical): "${firstCanonical}" -> canonical: ${canonicalNorm.canonical}, inverse: ${canonicalNorm.inverse}`);
|
||||
}
|
||||
|
||||
if (stats.aliasCount > 0) {
|
||||
const firstAlias = Array.from(parsed.aliasToCanonical.keys())[0];
|
||||
if (firstAlias) {
|
||||
const aliasNorm = this.vocabulary.normalize(firstAlias);
|
||||
console.log(`Normalization example (alias): "${firstAlias}" -> canonical: ${aliasNorm.canonical}, inverse: ${aliasNorm.inverse}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
if (msg.includes("not found") || msg.includes("Vocabulary file not found")) {
|
||||
new Notice("edge_vocabulary.md not found. Configure path in plugin settings.");
|
||||
} else {
|
||||
new Notice(`Failed to reload vocabulary: ${msg}`);
|
||||
}
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2
src/parser/index.ts
Normal file
2
src/parser/index.ts
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from "./types";
|
||||
export * from "./parseEdgesFromCallouts";
|
||||
78
src/parser/parseEdgesFromCallouts.ts
Normal file
78
src/parser/parseEdgesFromCallouts.ts
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import type { ParsedEdge } from "./types";
|
||||
|
||||
const EDGE_HEADER_RE = /^\s*(>+)\s*\[!edge\]\s*(.+?)\s*$/i;
|
||||
const TARGET_LINK_RE = /\[\[([^\]]+?)\]\]/g;
|
||||
|
||||
/**
|
||||
* Extract edges from any callout nesting:
|
||||
* - Edge starts with: > [!edge] <type> (any number of '>' allowed)
|
||||
* - Collect targets from subsequent lines while quoteLevel >= edgeLevel
|
||||
* - Stop when:
|
||||
* a) next [!edge] header appears, OR
|
||||
* b) quoteLevel drops below edgeLevel (block ends), ignoring blank lines
|
||||
*/
|
||||
export function parseEdgesFromCallouts(markdown: string): ParsedEdge[] {
|
||||
const lines = markdown.split(/\r?\n/);
|
||||
const edges: ParsedEdge[] = [];
|
||||
|
||||
let current: ParsedEdge | null = null;
|
||||
let currentEdgeLevel = 0;
|
||||
|
||||
const getQuoteLevel = (line: string): number => {
|
||||
const m = line.match(/^\s*(>+)/);
|
||||
return m && m[1] ? m[1].length : 0;
|
||||
};
|
||||
|
||||
const flush = (endLine: number) => {
|
||||
if (!current) return;
|
||||
current.lineEnd = endLine;
|
||||
if (current.targets.length > 0) edges.push(current);
|
||||
current = null;
|
||||
currentEdgeLevel = 0;
|
||||
};
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line === undefined) continue;
|
||||
|
||||
// Start of a new edge block
|
||||
const edgeMatch = line.match(EDGE_HEADER_RE);
|
||||
if (edgeMatch && edgeMatch[1] && edgeMatch[2]) {
|
||||
flush(i - 1);
|
||||
|
||||
currentEdgeLevel = edgeMatch[1].length;
|
||||
current = {
|
||||
rawType: edgeMatch[2].trim(),
|
||||
targets: [],
|
||||
lineStart: i,
|
||||
lineEnd: i,
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!current) continue;
|
||||
|
||||
const trimmed = line.trim();
|
||||
const ql = getQuoteLevel(line);
|
||||
|
||||
// End of the current edge block if quote level drops below the edge header level
|
||||
// (ignore blank lines)
|
||||
if (trimmed !== "" && ql < currentEdgeLevel) {
|
||||
flush(i - 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Collect targets (multiple per line allowed)
|
||||
TARGET_LINK_RE.lastIndex = 0;
|
||||
let m: RegExpExecArray | null;
|
||||
while ((m = TARGET_LINK_RE.exec(line)) !== null) {
|
||||
if (m[1]) {
|
||||
const t = m[1].trim();
|
||||
if (t) current.targets.push(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flush(lines.length - 1);
|
||||
return edges;
|
||||
}
|
||||
0
src/parser/parseFrontmatter.ts
Normal file
0
src/parser/parseFrontmatter.ts
Normal file
15
src/parser/types.ts
Normal file
15
src/parser/types.ts
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
export interface NodeMeta {
|
||||
id?: string;
|
||||
title?: string;
|
||||
type?: string;
|
||||
status?: string;
|
||||
tags?: string[];
|
||||
date?: string;
|
||||
}
|
||||
|
||||
export interface ParsedEdge {
|
||||
rawType: string;
|
||||
targets: string[]; // link texts without [[ ]]
|
||||
lineStart: number; // 0-based
|
||||
lineEnd: number; // 0-based
|
||||
}
|
||||
|
|
@ -1,36 +1,21 @@
|
|||
import {App, PluginSettingTab, Setting} from "obsidian";
|
||||
import MyPlugin from "./main";
|
||||
|
||||
export interface MyPluginSettings {
|
||||
mySetting: string;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: MyPluginSettings = {
|
||||
mySetting: 'default'
|
||||
}
|
||||
|
||||
export class SampleSettingTab extends PluginSettingTab {
|
||||
plugin: MyPlugin;
|
||||
|
||||
constructor(app: App, plugin: MyPlugin) {
|
||||
super(app, plugin);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
display(): void {
|
||||
const {containerEl} = this;
|
||||
|
||||
containerEl.empty();
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName('Settings #1')
|
||||
.setDesc('It\'s a secret')
|
||||
.addText(text => text
|
||||
.setPlaceholder('Enter your secret')
|
||||
.setValue(this.plugin.settings.mySetting)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.mySetting = value;
|
||||
await this.plugin.saveSettings();
|
||||
}));
|
||||
}
|
||||
}
|
||||
export interface MindnetSettings {
|
||||
edgeVocabularyPath: string; // vault-relativ
|
||||
graphSchemaPath: string; // vault-relativ (später)
|
||||
maxHops: number;
|
||||
strictMode: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: MindnetSettings = {
|
||||
edgeVocabularyPath: "_system/dictionary/edge_vocabulary.md",
|
||||
graphSchemaPath: "_system/dictionary/graph_schema.md",
|
||||
maxHops: 3,
|
||||
strictMode: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* Optional helper: normalize to Obsidian vault paths (forward slashes).
|
||||
*/
|
||||
export function normalizeVaultPath(p: string): string {
|
||||
return (p || "").trim().replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
207
src/tests/lint/LintEngine.test.ts
Normal file
207
src/tests/lint/LintEngine.test.ts
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
import { describe, it, expect } from "vitest";
|
||||
import type { ParsedEdge } from "../../parser/types";
|
||||
import { lintEdges } from "../../lint/LintEngine";
|
||||
import { Vocabulary } from "../../vocab/Vocabulary";
|
||||
import { parseEdgeVocabulary } from "../../vocab/parseEdgeVocabulary";
|
||||
|
||||
describe("lintEdges", () => {
|
||||
// Create a minimal vocabulary for testing
|
||||
const vocabMd = `
|
||||
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| \`caused_by\` | \`resulted_in\` | \`wegen\`, \`ausgelöst_durch\` | Test |
|
||||
| \`impacts\` | \`impacted_by\` | *(Kein Alias)* | Test |
|
||||
`;
|
||||
const vocabulary = new Vocabulary(parseEdgeVocabulary(vocabMd));
|
||||
|
||||
it("reports unknown edge type as ERROR", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "unknown_type",
|
||||
targets: [],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(1);
|
||||
const finding = findings[0];
|
||||
if (!finding) throw new Error("Expected finding");
|
||||
expect(finding.ruleId).toBe("unknown_edge_type");
|
||||
expect(finding.severity).toBe("ERROR");
|
||||
expect(finding.message).toContain("unknown_type");
|
||||
});
|
||||
|
||||
it("reports alias not normalized as WARN", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "wegen",
|
||||
targets: [],
|
||||
lineStart: 5,
|
||||
lineEnd: 5,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(1);
|
||||
const finding = findings[0];
|
||||
if (!finding) throw new Error("Expected finding");
|
||||
expect(finding.ruleId).toBe("alias_not_normalized");
|
||||
expect(finding.severity).toBe("WARN");
|
||||
expect(finding.message).toContain("wegen");
|
||||
expect(finding.message).toContain("caused_by");
|
||||
expect(finding.lineStart).toBe(5);
|
||||
});
|
||||
|
||||
it("does not report normalized canonical types", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "caused_by",
|
||||
targets: [],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(0);
|
||||
});
|
||||
|
||||
it("reports missing target notes as WARN", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "caused_by",
|
||||
targets: ["MissingNote", "ExistingNote"],
|
||||
lineStart: 0,
|
||||
lineEnd: 2,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>(["ExistingNote.md", "ExistingNote"]);
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(1);
|
||||
const finding = findings[0];
|
||||
if (!finding) throw new Error("Expected finding");
|
||||
expect(finding.ruleId).toBe("missing_target_note");
|
||||
expect(finding.severity).toBe("WARN");
|
||||
expect(finding.message).toContain("MissingNote");
|
||||
expect(finding.evidence).toBe("MissingNote");
|
||||
});
|
||||
|
||||
it("handles target notes with headings", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "caused_by",
|
||||
targets: ["Note#heading"],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>(["Note.md"]);
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(0);
|
||||
});
|
||||
|
||||
it("handles target notes without .md extension", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "caused_by",
|
||||
targets: ["NoteName"],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>(["NoteName.md"]);
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(0);
|
||||
});
|
||||
|
||||
it("handles multiple issues in one edge", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "wegen", // alias not normalized
|
||||
targets: ["MissingNote"], // missing target
|
||||
lineStart: 10,
|
||||
lineEnd: 12,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(2);
|
||||
const finding0 = findings[0];
|
||||
const finding1 = findings[1];
|
||||
if (!finding0 || !finding1) throw new Error("Expected findings");
|
||||
expect(finding0.ruleId).toBe("alias_not_normalized");
|
||||
expect(finding1.ruleId).toBe("missing_target_note");
|
||||
});
|
||||
|
||||
it("handles case-insensitive alias normalization", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "WEGEN", // uppercase alias
|
||||
targets: [],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(1);
|
||||
const finding = findings[0];
|
||||
if (!finding) throw new Error("Expected finding");
|
||||
expect(finding.ruleId).toBe("alias_not_normalized");
|
||||
expect(finding.message).toContain("WEGEN");
|
||||
expect(finding.message).toContain("caused_by");
|
||||
});
|
||||
|
||||
it("handles empty targets array", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "caused_by",
|
||||
targets: [],
|
||||
lineStart: 0,
|
||||
lineEnd: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
expect(findings.length).toBe(0);
|
||||
});
|
||||
|
||||
it("preserves line numbers in findings", () => {
|
||||
const edges: ParsedEdge[] = [
|
||||
{
|
||||
rawType: "unknown",
|
||||
targets: [],
|
||||
lineStart: 42,
|
||||
lineEnd: 45,
|
||||
},
|
||||
];
|
||||
|
||||
const existingFiles = new Set<string>();
|
||||
const findings = lintEdges(edges, vocabulary, existingFiles);
|
||||
|
||||
const finding = findings[0];
|
||||
if (!finding) throw new Error("Expected finding");
|
||||
expect(finding.lineStart).toBe(42);
|
||||
expect(finding.lineEnd).toBe(45);
|
||||
});
|
||||
});
|
||||
73
src/tests/parser/parseEdgesFromCallouts.test.ts
Normal file
73
src/tests/parser/parseEdgesFromCallouts.test.ts
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import { describe, it, expect } from "vitest";
|
||||
import { parseEdgesFromCallouts } from "../../parser/parseEdgesFromCallouts";
|
||||
|
||||
describe("parseEdgesFromCallouts", () => {
|
||||
it("parses edge blocks embedded inside other callouts", () => {
|
||||
const md = `
|
||||
> [!abstract]- 🧩 Some Block
|
||||
>> Some text
|
||||
>> [!edge] caused_by
|
||||
>> [[A]]
|
||||
>> [[B]]
|
||||
>> More text
|
||||
|
||||
Outside.
|
||||
`.trim();
|
||||
|
||||
const edges = parseEdgesFromCallouts(md);
|
||||
expect(edges.length).toBe(1);
|
||||
const edge = edges[0];
|
||||
if (!edge) throw new Error("Expected edge to exist");
|
||||
expect(edge.rawType).toBe("caused_by");
|
||||
expect(edge.targets).toEqual(["A", "B"]);
|
||||
});
|
||||
|
||||
it("stops when quote level drops below edge level", () => {
|
||||
const md = `
|
||||
> [!abstract]
|
||||
>> [!edge] resulted_in
|
||||
>> [[X]]
|
||||
Normal text ends quote.
|
||||
`.trim();
|
||||
|
||||
const edges = parseEdgesFromCallouts(md);
|
||||
expect(edges.length).toBe(1);
|
||||
const edge = edges[0];
|
||||
if (!edge) throw new Error("Expected edge to exist");
|
||||
expect(edge.rawType).toBe("resulted_in");
|
||||
expect(edge.targets).toEqual(["X"]);
|
||||
});
|
||||
|
||||
it("starts a new edge when another edge header appears", () => {
|
||||
const md = `
|
||||
> [!edge] caused_by
|
||||
> [[A]]
|
||||
> [!edge] resulted_in
|
||||
> [[B]]
|
||||
`.trim();
|
||||
|
||||
const edges = parseEdgesFromCallouts(md);
|
||||
expect(edges.length).toBe(2);
|
||||
const edge0 = edges[0];
|
||||
const edge1 = edges[1];
|
||||
if (!edge0 || !edge1) throw new Error("Expected edges to exist");
|
||||
expect(edge0.rawType).toBe("caused_by");
|
||||
expect(edge0.targets).toEqual(["A"]);
|
||||
expect(edge1.rawType).toBe("resulted_in");
|
||||
expect(edge1.targets).toEqual(["B"]);
|
||||
});
|
||||
|
||||
it("allows multiple links in one line", () => {
|
||||
const md = `
|
||||
>> [!edge] impacts
|
||||
>> [[Y]] [[Z]]
|
||||
`.trim();
|
||||
|
||||
const edges = parseEdgesFromCallouts(md);
|
||||
expect(edges.length).toBe(1);
|
||||
const edge = edges[0];
|
||||
if (!edge) throw new Error("Expected edge to exist");
|
||||
expect(edge.rawType).toBe("impacts");
|
||||
expect(edge.targets).toEqual(["Y", "Z"]);
|
||||
});
|
||||
});
|
||||
169
src/tests/vocab/parseEdgeVocabulary.test.ts
Normal file
169
src/tests/vocab/parseEdgeVocabulary.test.ts
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
import { describe, it, expect } from "vitest";
|
||||
import { parseEdgeVocabulary } from "../../vocab/parseEdgeVocabulary";
|
||||
import { Vocabulary } from "../../vocab/Vocabulary";
|
||||
|
||||
describe("parseEdgeVocabulary", () => {
|
||||
const fixtureMd = `
|
||||
# Edge Vocabulary
|
||||
|
||||
## Causal Relations
|
||||
|
||||
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| **\`caused_by\`** | \`resulted_in\` | \`ausgelöst_durch\`, \`wegen\`, \`verursacht_durch\` | Describes causation |
|
||||
| \`impacts\` | \`impacted_by\` | *(Kein Alias)* | Impact relationship |
|
||||
| \`influences\` | \`influenced_by\` | \`beeinflusst\`, \`wirkt_auf\` | Influence relation |
|
||||
|
||||
## Other Relations
|
||||
|
||||
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| \`relates_to\` | \`related_from\` | \`verwandt_mit\`, \`bezogen_auf\` | General relation |
|
||||
`;
|
||||
|
||||
it("parses canonical types correctly", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
expect(vocab.byCanonical.has("caused_by")).toBe(true);
|
||||
expect(vocab.byCanonical.has("impacts")).toBe(true);
|
||||
expect(vocab.byCanonical.has("influences")).toBe(true);
|
||||
expect(vocab.byCanonical.has("relates_to")).toBe(true);
|
||||
});
|
||||
|
||||
it("parses inverse types correctly", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
const causedBy = vocab.byCanonical.get("caused_by");
|
||||
if (!causedBy) throw new Error("Expected caused_by entry");
|
||||
expect(causedBy.inverse).toBe("resulted_in");
|
||||
|
||||
const impacts = vocab.byCanonical.get("impacts");
|
||||
if (!impacts) throw new Error("Expected impacts entry");
|
||||
expect(impacts.inverse).toBe("impacted_by");
|
||||
});
|
||||
|
||||
it("handles (Kein Alias) correctly", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
const impacts = vocab.byCanonical.get("impacts");
|
||||
if (!impacts) throw new Error("Expected impacts entry");
|
||||
expect(impacts.aliases).toEqual([]);
|
||||
});
|
||||
|
||||
it("parses multiple aliases correctly", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
const causedBy = vocab.byCanonical.get("caused_by");
|
||||
if (!causedBy) throw new Error("Expected caused_by entry");
|
||||
expect(causedBy.aliases).toEqual(["ausgelöst_durch", "wegen", "verursacht_durch"]);
|
||||
|
||||
const influences = vocab.byCanonical.get("influences");
|
||||
if (!influences) throw new Error("Expected influences entry");
|
||||
expect(influences.aliases).toEqual(["beeinflusst", "wirkt_auf"]);
|
||||
});
|
||||
|
||||
it("builds alias-to-canonical mapping", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
expect(vocab.aliasToCanonical.get("ausgelöst_durch")).toBe("caused_by");
|
||||
expect(vocab.aliasToCanonical.get("wegen")).toBe("caused_by");
|
||||
expect(vocab.aliasToCanonical.get("verursacht_durch")).toBe("caused_by");
|
||||
expect(vocab.aliasToCanonical.get("beeinflusst")).toBe("influences");
|
||||
expect(vocab.aliasToCanonical.get("wirkt_auf")).toBe("influences");
|
||||
});
|
||||
|
||||
it("parses multiple tables correctly", () => {
|
||||
const vocab = parseEdgeVocabulary(fixtureMd);
|
||||
|
||||
expect(vocab.byCanonical.size).toBe(4);
|
||||
expect(vocab.byCanonical.has("caused_by")).toBe(true);
|
||||
expect(vocab.byCanonical.has("impacts")).toBe(true);
|
||||
expect(vocab.byCanonical.has("influences")).toBe(true);
|
||||
expect(vocab.byCanonical.has("relates_to")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Vocabulary wrapper", () => {
|
||||
const fixtureMd = `
|
||||
| System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| **\`caused_by\`** | \`resulted_in\` | \`ausgelöst_durch\`, \`wegen\` | Test |
|
||||
| \`impacts\` | \`impacted_by\` | *(Kein Alias)* | Test |
|
||||
`;
|
||||
|
||||
it("getCanonical returns canonical for canonical input", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
expect(vocab.getCanonical("caused_by")).toBe("caused_by");
|
||||
expect(vocab.getCanonical("impacts")).toBe("impacts");
|
||||
});
|
||||
|
||||
it("getCanonical returns canonical for alias input (case-insensitive)", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
expect(vocab.getCanonical("wegen")).toBe("caused_by");
|
||||
expect(vocab.getCanonical("WEGEN")).toBe("caused_by");
|
||||
expect(vocab.getCanonical("ausgelöst_durch")).toBe("caused_by");
|
||||
expect(vocab.getCanonical("AUSGELÖST_DURCH")).toBe("caused_by");
|
||||
});
|
||||
|
||||
it("getCanonical returns null for unknown input", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
expect(vocab.getCanonical("unknown")).toBe(null);
|
||||
expect(vocab.getCanonical("")).toBe(null);
|
||||
});
|
||||
|
||||
it("getInverse returns inverse for canonical", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
expect(vocab.getInverse("caused_by")).toBe("resulted_in");
|
||||
expect(vocab.getInverse("impacts")).toBe("impacted_by");
|
||||
});
|
||||
|
||||
it("getInverse returns null for unknown canonical", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
expect(vocab.getInverse("unknown")).toBe(null);
|
||||
});
|
||||
|
||||
it("normalize returns correct result for canonical", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
const result = vocab.normalize("caused_by");
|
||||
expect(result.raw).toBe("caused_by");
|
||||
expect(result.canonical).toBe("caused_by");
|
||||
expect(result.inverse).toBe("resulted_in");
|
||||
});
|
||||
|
||||
it("normalize returns correct result for alias (case-insensitive)", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
const result = vocab.normalize("wegen");
|
||||
expect(result.raw).toBe("wegen");
|
||||
expect(result.canonical).toBe("caused_by");
|
||||
expect(result.inverse).toBe("resulted_in");
|
||||
|
||||
const resultUpper = vocab.normalize("WEGEN");
|
||||
expect(resultUpper.raw).toBe("WEGEN");
|
||||
expect(resultUpper.canonical).toBe("caused_by");
|
||||
expect(resultUpper.inverse).toBe("resulted_in");
|
||||
});
|
||||
|
||||
it("normalize returns null canonical for unknown input", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
const result = vocab.normalize("unknown");
|
||||
expect(result.raw).toBe("unknown");
|
||||
expect(result.canonical).toBe(null);
|
||||
expect(result.inverse).toBe(null);
|
||||
});
|
||||
|
||||
it("getStats returns correct counts", () => {
|
||||
const vocab = new Vocabulary(parseEdgeVocabulary(fixtureMd));
|
||||
|
||||
const stats = vocab.getStats();
|
||||
expect(stats.canonicalCount).toBe(2);
|
||||
expect(stats.aliasCount).toBe(2); // ausgelöst_durch, wegen
|
||||
});
|
||||
});
|
||||
99
src/ui/MindnetSettingTab.ts
Normal file
99
src/ui/MindnetSettingTab.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import { App, Notice, PluginSettingTab, Setting } from "obsidian";
|
||||
import type MindnetCausalAssistantPlugin from "../main";
|
||||
import { VocabularyLoader } from "../vocab/VocabularyLoader";
|
||||
|
||||
export class MindnetSettingTab extends PluginSettingTab {
|
||||
plugin: MindnetCausalAssistantPlugin;
|
||||
|
||||
constructor(app: App, plugin: MindnetCausalAssistantPlugin) {
|
||||
super(app, plugin);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
display(): void {
|
||||
const { containerEl } = this;
|
||||
|
||||
containerEl.empty();
|
||||
|
||||
containerEl.createEl("h2", { text: "Mindnet Settings" });
|
||||
|
||||
// Edge vocabulary path
|
||||
new Setting(containerEl)
|
||||
.setName("Edge vocabulary path")
|
||||
.setDesc("Vault-relative path to the edge vocabulary markdown file")
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("_system/dictionary/edge_vocabulary.md")
|
||||
.setValue(this.plugin.settings.edgeVocabularyPath)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.edgeVocabularyPath = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
)
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Validate")
|
||||
.setCta()
|
||||
.onClick(async () => {
|
||||
try {
|
||||
const text = await VocabularyLoader.loadText(
|
||||
this.app,
|
||||
this.plugin.settings.edgeVocabularyPath
|
||||
);
|
||||
new Notice(
|
||||
`Edge vocabulary file found (${text.length} characters)`
|
||||
);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : String(e);
|
||||
new Notice(
|
||||
`Failed to load edge vocabulary: ${msg}`
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Graph schema path
|
||||
new Setting(containerEl)
|
||||
.setName("Graph schema path")
|
||||
.setDesc("Vault-relative path to the graph schema markdown file")
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("_system/dictionary/graph_schema.md")
|
||||
.setValue(this.plugin.settings.graphSchemaPath)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.graphSchemaPath = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
|
||||
// Strict mode toggle
|
||||
new Setting(containerEl)
|
||||
.setName("Strict mode")
|
||||
.setDesc("Enable strict validation mode")
|
||||
.addToggle((toggle) =>
|
||||
toggle
|
||||
.setValue(this.plugin.settings.strictMode)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.strictMode = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
|
||||
// Max hops number input
|
||||
new Setting(containerEl)
|
||||
.setName("Max hops")
|
||||
.setDesc("Maximum number of hops for graph traversal")
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("3")
|
||||
.setValue(String(this.plugin.settings.maxHops))
|
||||
.onChange(async (value) => {
|
||||
const numValue = parseInt(value, 10);
|
||||
if (!isNaN(numValue) && numValue > 0) {
|
||||
this.plugin.settings.maxHops = numValue;
|
||||
await this.plugin.saveSettings();
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
0
src/ui/MindnetView.ts
Normal file
0
src/ui/MindnetView.ts
Normal file
64
src/vocab/Vocabulary.ts
Normal file
64
src/vocab/Vocabulary.ts
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import type { EdgeVocabulary, NormalizeResult } from "./types";
|
||||
|
||||
/**
|
||||
* Wrapper class for edge vocabulary that provides convenient lookup methods.
|
||||
*/
|
||||
export class Vocabulary {
|
||||
private vocab: EdgeVocabulary;
|
||||
|
||||
constructor(vocab: EdgeVocabulary) {
|
||||
this.vocab = vocab;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get canonical edge type from raw input (case-insensitive).
|
||||
* Returns null if not found.
|
||||
*/
|
||||
getCanonical(raw: string): string | null {
|
||||
const lowerRaw = raw.toLowerCase();
|
||||
|
||||
// Check if it's already a canonical
|
||||
for (const [canonical] of this.vocab.byCanonical) {
|
||||
if (canonical.toLowerCase() === lowerRaw) {
|
||||
return canonical;
|
||||
}
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
const canonical = this.vocab.aliasToCanonical.get(lowerRaw);
|
||||
return canonical ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get inverse edge type for a canonical type.
|
||||
* Returns null if not found or no inverse defined.
|
||||
*/
|
||||
getInverse(canonical: string): string | null {
|
||||
const entry = this.vocab.byCanonical.get(canonical);
|
||||
return entry?.inverse ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a raw edge type string to canonical and inverse.
|
||||
*/
|
||||
normalize(raw: string): NormalizeResult {
|
||||
const canonical = this.getCanonical(raw);
|
||||
const inverse = canonical ? this.getInverse(canonical) : null;
|
||||
|
||||
return {
|
||||
raw,
|
||||
canonical,
|
||||
inverse,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics about the vocabulary.
|
||||
*/
|
||||
getStats(): { canonicalCount: number; aliasCount: number } {
|
||||
return {
|
||||
canonicalCount: this.vocab.byCanonical.size,
|
||||
aliasCount: this.vocab.aliasToCanonical.size,
|
||||
};
|
||||
}
|
||||
}
|
||||
27
src/vocab/VocabularyLoader.ts
Normal file
27
src/vocab/VocabularyLoader.ts
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import type { App, TFile } from "obsidian";
|
||||
import { normalizeVaultPath } from "../settings";
|
||||
|
||||
export class VocabularyLoader {
|
||||
/**
|
||||
* Loads a text file from the currently opened Obsidian vault.
|
||||
* @param app Obsidian App
|
||||
* @param vaultRelativePath e.g. "_system/dictionary/edge_vocabulary.md"
|
||||
*/
|
||||
static async loadText(app: App, vaultRelativePath: string): Promise<string> {
|
||||
const p = normalizeVaultPath(vaultRelativePath);
|
||||
const abstract = app.vault.getAbstractFileByPath(p);
|
||||
|
||||
if (!abstract) {
|
||||
throw new Error(`Vocabulary file not found in vault: "${p}"`);
|
||||
}
|
||||
|
||||
// Guard: Only files can be read
|
||||
const file = abstract as TFile;
|
||||
// TFile has 'extension' and 'path' properties; if it isn't a file this will usually fail at runtime.
|
||||
if (!(file && typeof file.path === "string")) {
|
||||
throw new Error(`Path is not a file: "${p}"`);
|
||||
}
|
||||
|
||||
return await app.vault.read(file);
|
||||
}
|
||||
}
|
||||
0
src/vocab/index.ts
Normal file
0
src/vocab/index.ts
Normal file
105
src/vocab/parseEdgeVocabulary.ts
Normal file
105
src/vocab/parseEdgeVocabulary.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import type { EdgeVocabulary } from "./types";
|
||||
|
||||
const BACKTICK_RE = /`([^`]+)`/g;
|
||||
|
||||
/**
|
||||
* Parses markdown tables containing edge vocabulary definitions.
|
||||
*
|
||||
* Expected format:
|
||||
* | System-Typ (Canonical) | Inverser Typ | Erlaubte Aliasse (User) | Beschreibung ... |
|
||||
* | **`caused_by`** | `resulted_in` | `ausgelöst_durch`, `wegen`, ... | ... |
|
||||
*
|
||||
* Rules:
|
||||
* - Extract all backticked tokens from each row
|
||||
* - First token = canonical (may be wrapped in ** **)
|
||||
* - Second token = inverse (optional)
|
||||
* - Remaining tokens = aliases (skip if cell contains "(Kein Alias)")
|
||||
* - Skip rows with less than 1 token (with warning counter)
|
||||
* - Store canonical as-is, but use lowercase for lookup keys
|
||||
*/
|
||||
export function parseEdgeVocabulary(md: string): EdgeVocabulary {
|
||||
const lines = md.split(/\r?\n/);
|
||||
const byCanonical = new Map<string, { canonical: string; inverse?: string; aliases: string[] }>();
|
||||
const aliasToCanonical = new Map<string, string>();
|
||||
|
||||
let skippedRows = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip header separator rows (e.g., "| :--- | :--- |")
|
||||
if (/^\s*\|[\s:|-]+\|\s*$/.test(line)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only process table rows
|
||||
if (!line.trim().startsWith("|")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract all backticked tokens
|
||||
const tokens: string[] = [];
|
||||
let match: RegExpExecArray | null;
|
||||
BACKTICK_RE.lastIndex = 0;
|
||||
while ((match = BACKTICK_RE.exec(line)) !== null) {
|
||||
if (match[1]) {
|
||||
const token = match[1].trim();
|
||||
if (token) {
|
||||
tokens.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip rows with no tokens or only one token (need at least canonical)
|
||||
if (tokens.length < 1) {
|
||||
skippedRows++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if aliases cell contains "(Kein Alias)"
|
||||
const hasNoAliases = /\(Kein Alias\)/i.test(line);
|
||||
|
||||
const canonical = tokens[0];
|
||||
if (!canonical) {
|
||||
skippedRows++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const inverse = tokens.length >= 2 && tokens[1] ? tokens[1] : undefined;
|
||||
|
||||
// Extract aliases: all tokens after the first two, but only if not "(Kein Alias)"
|
||||
const aliases: string[] = [];
|
||||
if (!hasNoAliases && tokens.length > 2) {
|
||||
for (let i = 2; i < tokens.length; i++) {
|
||||
const alias = tokens[i];
|
||||
if (alias) {
|
||||
aliases.push(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store canonical entry
|
||||
byCanonical.set(canonical, {
|
||||
canonical,
|
||||
inverse,
|
||||
aliases,
|
||||
});
|
||||
|
||||
// Build alias-to-canonical mapping (case-insensitive keys)
|
||||
for (const alias of aliases) {
|
||||
if (alias) {
|
||||
const lowerAlias = alias.toLowerCase();
|
||||
if (!aliasToCanonical.has(lowerAlias)) {
|
||||
aliasToCanonical.set(lowerAlias, canonical);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (skippedRows > 0) {
|
||||
console.warn(`parseEdgeVocabulary: Skipped ${skippedRows} rows with insufficient tokens`);
|
||||
}
|
||||
|
||||
return {
|
||||
byCanonical,
|
||||
aliasToCanonical,
|
||||
};
|
||||
}
|
||||
20
src/vocab/types.ts
Normal file
20
src/vocab/types.ts
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
export type CanonicalEdgeType = string;
|
||||
|
||||
export interface EdgeTypeEntry {
|
||||
canonical: CanonicalEdgeType;
|
||||
inverse?: CanonicalEdgeType;
|
||||
aliases: string[];
|
||||
}
|
||||
|
||||
export interface EdgeVocabulary {
|
||||
// key: canonical edge type
|
||||
byCanonical: Map<CanonicalEdgeType, EdgeTypeEntry>;
|
||||
// key: alias (lowercased) -> canonical
|
||||
aliasToCanonical: Map<string, CanonicalEdgeType>;
|
||||
}
|
||||
|
||||
export interface NormalizeResult {
|
||||
raw: string;
|
||||
canonical: CanonicalEdgeType | null;
|
||||
inverse: CanonicalEdgeType | null;
|
||||
}
|
||||
13
vitest.config.ts
Normal file
13
vitest.config.ts
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
include: ["src/**/*.test.ts"],
|
||||
globals: true,
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
obsidian: new URL("./src/__mocks__/obsidian.ts", import.meta.url).pathname,
|
||||
},
|
||||
},
|
||||
});
|
||||
Loading…
Reference in New Issue
Block a user