Error in llama-swap catchall implemented + webhook notifier

This commit is contained in:
2026-01-18 01:30:26 +02:00
parent d1e6b21508
commit 50e4f7a5f2
3 changed files with 308 additions and 6 deletions

267
bot/utils/error_handler.py Normal file
View File

@@ -0,0 +1,267 @@
# utils/error_handler.py
import aiohttp
import traceback
import datetime
import re
from utils.logger import get_logger
logger = get_logger('error_handler')
# Webhook URL for error notifications
ERROR_WEBHOOK_URL = "https://discord.com/api/webhooks/1462216811293708522/4kdGenpxZFsP0z3VBgebYENODKmcRrmEzoIwCN81jCirnAxuU2YvxGgwGCNBb6TInA9Z"
# User-friendly error message that Miku will say
MIKU_ERROR_MESSAGE = "Someone tell Koko-nii there is a problem with my AI."
def is_error_response(response_text: str) -> bool:
"""
Detect if a response text is an error message.
Args:
response_text: The response text to check
Returns:
bool: True if the response appears to be an error message
"""
if not response_text or not isinstance(response_text, str):
return False
response_lower = response_text.lower().strip()
# Common error patterns
error_patterns = [
r'^error:?\s*\d{3}', # "Error: 502" or "Error 502"
r'^error:?\s+', # "Error: " or "Error "
r'^\d{3}\s+error', # "502 Error"
r'^sorry,?\s+(there\s+was\s+)?an?\s+error', # "Sorry, an error" or "Sorry, there was an error"
r'^sorry,?\s+the\s+response\s+took\s+too\s+long', # Timeout error
r'connection\s+(refused|failed|error|timeout)',
r'timed?\s*out',
r'failed\s+to\s+(connect|respond|process)',
r'service\s+unavailable',
r'internal\s+server\s+error',
r'bad\s+gateway',
r'gateway\s+timeout',
]
# Check if response matches any error pattern
for pattern in error_patterns:
if re.search(pattern, response_lower):
return True
# Check for HTTP status codes indicating errors
if re.match(r'^\d{3}$', response_text.strip()):
status_code = int(response_text.strip())
if status_code >= 400: # HTTP error codes
return True
return False
async def send_error_webhook(error_message: str, context: dict = None):
"""
Send error notification to the webhook.
Args:
error_message: The error message or exception details
context: Optional dictionary with additional context (user, channel, etc.)
"""
try:
def truncate_field(text: str, max_length: int = 1000) -> str:
"""Truncate text to fit Discord's field value limit (1024 chars)."""
if not text:
return "N/A"
text = str(text)
if len(text) > max_length:
return text[:max_length - 20] + "\n...(truncated)"
return text
# Build embed for webhook
embed = {
"title": "🚨 Miku Bot Error",
"color": 0xFF0000, # Red color
"timestamp": datetime.datetime.utcnow().isoformat(),
"fields": []
}
# Add error message (limit to 1000 chars to leave room for code blocks)
error_value = f"```\n{truncate_field(error_message, 900)}\n```"
embed["fields"].append({
"name": "Error Message",
"value": error_value,
"inline": False
})
# Add context if provided
if context:
if 'user' in context and context['user']:
embed["fields"].append({
"name": "User",
"value": truncate_field(context['user'], 200),
"inline": True
})
if 'channel' in context and context['channel']:
embed["fields"].append({
"name": "Channel",
"value": truncate_field(context['channel'], 200),
"inline": True
})
if 'guild' in context and context['guild']:
embed["fields"].append({
"name": "Server",
"value": truncate_field(context['guild'], 200),
"inline": True
})
if 'prompt' in context and context['prompt']:
prompt_value = f"```\n{truncate_field(context['prompt'], 400)}\n```"
embed["fields"].append({
"name": "User Prompt",
"value": prompt_value,
"inline": False
})
if 'exception_type' in context and context['exception_type']:
embed["fields"].append({
"name": "Exception Type",
"value": f"`{truncate_field(context['exception_type'], 200)}`",
"inline": True
})
if 'traceback' in context and context['traceback']:
tb_value = f"```python\n{truncate_field(context['traceback'], 800)}\n```"
embed["fields"].append({
"name": "Traceback",
"value": tb_value,
"inline": False
})
# Ensure we have at least one field (Discord requirement)
if not embed["fields"]:
embed["fields"].append({
"name": "Status",
"value": "Error occurred with no additional context",
"inline": False
})
# Send webhook
payload = {
"content": "<@344584170839236608>", # Mention Koko-nii
"embeds": [embed]
}
async with aiohttp.ClientSession() as session:
async with session.post(ERROR_WEBHOOK_URL, json=payload) as response:
if response.status in [200, 204]:
logger.info(f"✅ Error webhook sent successfully")
else:
error_text = await response.text()
logger.error(f"❌ Failed to send error webhook: {response.status} - {error_text}")
except Exception as e:
logger.error(f"❌ Exception while sending error webhook: {e}")
logger.error(traceback.format_exc())
async def handle_llm_error(
error: Exception,
user_prompt: str = None,
user_id: str = None,
guild_id: str = None,
author_name: str = None
) -> str:
"""
Handle LLM errors by logging them and sending webhook notification.
Args:
error: The exception that occurred
user_prompt: The user's prompt (if available)
user_id: The user ID (if available)
guild_id: The guild ID (if available)
author_name: The user's display name (if available)
Returns:
str: User-friendly error message for Miku to say
"""
logger.error(f"🚨 LLM Error occurred: {type(error).__name__}: {str(error)}")
# Build context
context = {
"exception_type": type(error).__name__,
"traceback": traceback.format_exc()
}
if user_prompt:
context["prompt"] = user_prompt
if author_name:
context["user"] = author_name
elif user_id:
context["user"] = f"User ID: {user_id}"
if guild_id:
context["guild"] = f"Guild ID: {guild_id}"
# Get full error message
error_message = f"{type(error).__name__}: {str(error)}"
# Send webhook notification
await send_error_webhook(error_message, context)
return MIKU_ERROR_MESSAGE
async def handle_response_error(
response_text: str,
user_prompt: str = None,
user_id: str = None,
guild_id: str = None,
author_name: str = None,
channel_name: str = None
) -> str:
"""
Handle error responses from the LLM by checking if the response is an error message.
Args:
response_text: The response text from the LLM
user_prompt: The user's prompt (if available)
user_id: The user ID (if available)
guild_id: The guild ID (if available)
author_name: The user's display name (if available)
channel_name: The channel name (if available)
Returns:
str: Either the original response (if not an error) or user-friendly error message
"""
if not is_error_response(response_text):
return response_text
logger.error(f"🚨 Error response detected: {response_text}")
# Build context
context = {}
if user_prompt:
context["prompt"] = user_prompt
if author_name:
context["user"] = author_name
elif user_id:
context["user"] = f"User ID: {user_id}"
if channel_name:
context["channel"] = channel_name
elif guild_id:
context["channel"] = f"Guild ID: {guild_id}"
if guild_id:
context["guild"] = f"Guild ID: {guild_id}"
# Send webhook notification
await send_error_webhook(f"LLM returned error response: {response_text}", context)
return MIKU_ERROR_MESSAGE

View File

@@ -11,6 +11,7 @@ from utils.context_manager import get_context_for_response_type, get_complete_co
from utils.moods import load_mood_description from utils.moods import load_mood_description
from utils.conversation_history import conversation_history from utils.conversation_history import conversation_history
from utils.logger import get_logger from utils.logger import get_logger
from utils.error_handler import handle_llm_error, handle_response_error
logger = get_logger('llm') logger = get_logger('llm')
@@ -281,8 +282,18 @@ Please respond in a way that reflects this emotional tone.{pfp_context}"""
# Escape asterisks for actions (e.g., *adjusts hair* becomes \*adjusts hair\*) # Escape asterisks for actions (e.g., *adjusts hair* becomes \*adjusts hair\*)
reply = _escape_markdown_actions(reply) reply = _escape_markdown_actions(reply)
# Check if the reply is an error response and handle it
reply = await handle_response_error(
reply,
user_prompt=user_prompt,
user_id=str(user_id),
guild_id=str(guild_id) if guild_id else None,
author_name=author_name
)
# Save to conversation history (only if both prompt and reply are non-empty) # Save to conversation history (only if both prompt and reply are non-empty)
if user_prompt and user_prompt.strip() and reply and reply.strip(): # Don't save error messages to history
if user_prompt and user_prompt.strip() and reply and reply.strip() and reply != "Someone tell Koko-nii there is a problem with my AI.":
# Add user message to history # Add user message to history
conversation_history.add_message( conversation_history.add_message(
channel_id=channel_id, channel_id=channel_id,
@@ -298,21 +309,44 @@ Please respond in a way that reflects this emotional tone.{pfp_context}"""
is_bot=True is_bot=True
) )
# Also save to legacy globals for backward compatibility # Also save to legacy globals for backward compatibility (skip error messages)
if user_prompt and user_prompt.strip() and reply and reply.strip(): if user_prompt and user_prompt.strip() and reply and reply.strip() and reply != "Someone tell Koko-nii there is a problem with my AI.":
globals.conversation_history[user_id].append((user_prompt, reply)) globals.conversation_history[user_id].append((user_prompt, reply))
return reply return reply
else: else:
error_text = await response.text() error_text = await response.text()
logger.error(f"Error from llama-swap: {response.status} - {error_text}") logger.error(f"Error from llama-swap: {response.status} - {error_text}")
# Send webhook notification for HTTP errors
await handle_response_error(
f"Error: {response.status}",
user_prompt=user_prompt,
user_id=str(user_id),
guild_id=str(guild_id) if guild_id else None,
author_name=author_name
)
# Don't save error responses to conversation history # Don't save error responses to conversation history
return f"Error: {response.status}" return "Someone tell Koko-nii there is a problem with my AI."
except asyncio.TimeoutError: except asyncio.TimeoutError:
return "Sorry, the response took too long. Please try again." logger.error("Timeout error in query_llama")
return await handle_llm_error(
asyncio.TimeoutError("Request timed out after 300 seconds"),
user_prompt=user_prompt,
user_id=str(user_id),
guild_id=str(guild_id) if guild_id else None,
author_name=author_name
)
except Exception as e: except Exception as e:
logger.error(f"Error in query_llama: {e}") logger.error(f"Error in query_llama: {e}")
return f"Sorry, there was an error: {str(e)}" return await handle_llm_error(
e,
user_prompt=user_prompt,
user_id=str(user_id),
guild_id=str(guild_id) if guild_id else None,
author_name=author_name
)
# Backward compatibility alias for existing code # Backward compatibility alias for existing code
query_ollama = query_llama query_ollama = query_llama

View File

@@ -62,6 +62,7 @@ COMPONENTS = {
'voice_manager': 'Voice channel session management', 'voice_manager': 'Voice channel session management',
'voice_commands': 'Voice channel commands', 'voice_commands': 'Voice channel commands',
'voice_audio': 'Voice audio streaming and TTS', 'voice_audio': 'Voice audio streaming and TTS',
'error_handler': 'Error detection and webhook notifications',
} }
# Global configuration # Global configuration