WP25b #21
|
|
@ -101,23 +101,26 @@ class LLMService:
|
||||||
|
|
||||||
def get_prompt(self, key: str, model_id: str = None, provider: str = None) -> str:
|
def get_prompt(self, key: str, model_id: str = None, provider: str = None) -> str:
|
||||||
"""
|
"""
|
||||||
WP-25b: Hochpräziser Prompt-Lookup.
|
WP-25b: Hochpräziser Prompt-Lookup mit detailliertem Trace-Logging.
|
||||||
Hierarchie: Exakte Modell-ID -> Provider-Name -> Globaler Default.
|
|
||||||
"""
|
"""
|
||||||
data = self.prompts.get(key, "")
|
data = self.prompts.get(key, "")
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
return str(data)
|
return str(data)
|
||||||
|
|
||||||
# 1. Spezifischstes Match: Exakte Modell-ID (z.B. 'google/gemini-2.0-flash-exp:free')
|
# 1. Spezifischstes Match: Exakte Modell-ID
|
||||||
if model_id and model_id in data:
|
if model_id and model_id in data:
|
||||||
|
logger.info(f"🎯 [PROMPT-TRACE] Level 1 Match: Model-specific ('{model_id}') for key '{key}'")
|
||||||
return str(data[model_id])
|
return str(data[model_id])
|
||||||
|
|
||||||
# 2. Mittlere Ebene: Provider (z.B. 'ollama' oder 'openrouter')
|
# 2. Mittlere Ebene: Provider
|
||||||
if provider and provider in data:
|
if provider and provider in data:
|
||||||
|
logger.info(f"📡 [PROMPT-TRACE] Level 2 Match: Provider-fallback ('{provider}') for key '{key}'")
|
||||||
return str(data[provider])
|
return str(data[provider])
|
||||||
|
|
||||||
# 3. Fallback: Bekannte Keys oder Default aus prompts.yaml
|
# 3. Globaler Fallback
|
||||||
return str(data.get("default", data.get("gemini", data.get("ollama", ""))))
|
default_val = data.get("default", data.get("gemini", data.get("ollama", "")))
|
||||||
|
logger.info(f"⚓ [PROMPT-TRACE] Level 3 Match: Global Default for key '{key}'")
|
||||||
|
return str(default_val)
|
||||||
|
|
||||||
async def generate_raw_response(
|
async def generate_raw_response(
|
||||||
self,
|
self,
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user