fix: show debug info on errors + prompt export function
- Frontend: debug viewer now shows even when test fails - Frontend: export button to download complete prompt config as JSON - Backend: attach debug info to JSON validation errors - Backend: include raw output and length in error details Users can now debug failed prompts and export configs for analysis. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
7f2ba4fbad
commit
84dad07e15
|
|
@ -60,28 +60,37 @@ def resolve_placeholders(template: str, variables: Dict[str, Any], debug_info: O
|
|||
return result
|
||||
|
||||
|
||||
def validate_json_output(output: str, schema: Optional[Dict] = None) -> Dict:
|
||||
def validate_json_output(output: str, schema: Optional[Dict] = None, debug_info: Optional[Dict] = None) -> Dict:
|
||||
"""
|
||||
Validate that output is valid JSON.
|
||||
|
||||
Args:
|
||||
output: String to validate
|
||||
schema: Optional JSON schema to validate against (TODO: jsonschema library)
|
||||
debug_info: Optional dict to attach to error for debugging
|
||||
|
||||
Returns:
|
||||
Parsed JSON dict
|
||||
|
||||
Raises:
|
||||
HTTPException: If output is not valid JSON
|
||||
HTTPException: If output is not valid JSON (with debug info attached)
|
||||
"""
|
||||
try:
|
||||
parsed = json.loads(output)
|
||||
# TODO: Add jsonschema validation if schema provided
|
||||
return parsed
|
||||
except json.JSONDecodeError as e:
|
||||
error_detail = {
|
||||
"error": f"AI returned invalid JSON: {str(e)}",
|
||||
"raw_output": output[:500] + ('...' if len(output) > 500 else ''),
|
||||
"output_length": len(output)
|
||||
}
|
||||
if debug_info:
|
||||
error_detail["debug"] = debug_info
|
||||
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"AI returned invalid JSON: {str(e)}"
|
||||
detail=error_detail
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -169,7 +178,7 @@ async def execute_base_prompt(
|
|||
# Validate JSON if required
|
||||
output_format = prompt.get('output_format', 'text')
|
||||
if output_format == 'json':
|
||||
output = validate_json_output(response, prompt.get('output_schema'))
|
||||
output = validate_json_output(response, prompt.get('output_schema'), debug_info if enable_debug else None)
|
||||
else:
|
||||
output = response
|
||||
|
||||
|
|
@ -273,7 +282,7 @@ async def execute_pipeline_prompt(
|
|||
|
||||
# Validate JSON if required
|
||||
if output_format == 'json':
|
||||
output = validate_json_output(response, prompt_def.get('output_schema'))
|
||||
output = validate_json_output(response, prompt_def.get('output_schema'), prompt_debug if enable_debug else None)
|
||||
else:
|
||||
output = response
|
||||
|
||||
|
|
|
|||
|
|
@ -236,6 +236,33 @@ export default function UnifiedPromptModal({ prompt, onSave, onClose }) {
|
|||
}
|
||||
}
|
||||
|
||||
const handleExport = () => {
|
||||
// Export complete prompt configuration as JSON
|
||||
const exportData = {
|
||||
name,
|
||||
slug,
|
||||
display_name: displayName,
|
||||
description,
|
||||
type,
|
||||
category,
|
||||
active,
|
||||
sort_order: sortOrder,
|
||||
output_format: outputFormat,
|
||||
template: type === 'base' ? template : null,
|
||||
stages: type === 'pipeline' ? stages : null
|
||||
}
|
||||
|
||||
const blob = new Blob([JSON.stringify(exportData, null, 2)], { type: 'application/json' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = `prompt-${slug || 'new'}-${new Date().toISOString().split('T')[0]}.json`
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
document.body.removeChild(a)
|
||||
URL.revokeObjectURL(url)
|
||||
}
|
||||
|
||||
const handleTest = async () => {
|
||||
// Can only test existing prompts (need slug in database)
|
||||
if (!prompt?.slug) {
|
||||
|
|
@ -252,7 +279,30 @@ export default function UnifiedPromptModal({ prompt, onSave, onClose }) {
|
|||
setTestResult(result)
|
||||
setShowDebug(true)
|
||||
} catch (e) {
|
||||
setError('Test-Fehler: ' + e.message)
|
||||
// Show error AND try to extract debug info from error
|
||||
const errorMsg = e.message
|
||||
let debugData = null
|
||||
|
||||
// Try to parse error message for embedded debug info
|
||||
try {
|
||||
const parsed = JSON.parse(errorMsg)
|
||||
if (parsed.detail) {
|
||||
setError('Test-Fehler: ' + parsed.detail)
|
||||
debugData = parsed
|
||||
} else {
|
||||
setError('Test-Fehler: ' + errorMsg)
|
||||
}
|
||||
} catch {
|
||||
setError('Test-Fehler: ' + errorMsg)
|
||||
}
|
||||
|
||||
// Set result with error info so debug viewer shows it
|
||||
setTestResult({
|
||||
error: true,
|
||||
error_message: errorMsg,
|
||||
debug: debugData || { error: errorMsg }
|
||||
})
|
||||
setShowDebug(true) // ALWAYS show debug on test, even on error
|
||||
} finally {
|
||||
setTesting(false)
|
||||
}
|
||||
|
|
@ -640,7 +690,7 @@ export default function UnifiedPromptModal({ prompt, onSave, onClose }) {
|
|||
display: 'flex', gap: 12, justifyContent: 'space-between',
|
||||
paddingTop: 16, borderTop: '1px solid var(--border)'
|
||||
}}>
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 8 }}>
|
||||
{prompt?.slug && (
|
||||
<button
|
||||
className="btn"
|
||||
|
|
@ -654,6 +704,14 @@ export default function UnifiedPromptModal({ prompt, onSave, onClose }) {
|
|||
{testing ? '🔬 Teste...' : '🔬 Test ausführen'}
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
className="btn"
|
||||
onClick={handleExport}
|
||||
disabled={loading}
|
||||
title="Exportiere Prompt-Konfiguration als JSON"
|
||||
>
|
||||
📥 Export
|
||||
</button>
|
||||
</div>
|
||||
<div style={{ display: 'flex', gap: 12 }}>
|
||||
<button className="btn" onClick={onClose}>
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user