Error in llama-swap catchall implemented + webhook notifier
This commit is contained in:
@@ -11,6 +11,7 @@ from utils.context_manager import get_context_for_response_type, get_complete_co
|
||||
from utils.moods import load_mood_description
|
||||
from utils.conversation_history import conversation_history
|
||||
from utils.logger import get_logger
|
||||
from utils.error_handler import handle_llm_error, handle_response_error
|
||||
|
||||
logger = get_logger('llm')
|
||||
|
||||
@@ -281,8 +282,18 @@ Please respond in a way that reflects this emotional tone.{pfp_context}"""
|
||||
# Escape asterisks for actions (e.g., *adjusts hair* becomes \*adjusts hair\*)
|
||||
reply = _escape_markdown_actions(reply)
|
||||
|
||||
# Check if the reply is an error response and handle it
|
||||
reply = await handle_response_error(
|
||||
reply,
|
||||
user_prompt=user_prompt,
|
||||
user_id=str(user_id),
|
||||
guild_id=str(guild_id) if guild_id else None,
|
||||
author_name=author_name
|
||||
)
|
||||
|
||||
# Save to conversation history (only if both prompt and reply are non-empty)
|
||||
if user_prompt and user_prompt.strip() and reply and reply.strip():
|
||||
# Don't save error messages to history
|
||||
if user_prompt and user_prompt.strip() and reply and reply.strip() and reply != "Someone tell Koko-nii there is a problem with my AI.":
|
||||
# Add user message to history
|
||||
conversation_history.add_message(
|
||||
channel_id=channel_id,
|
||||
@@ -298,21 +309,44 @@ Please respond in a way that reflects this emotional tone.{pfp_context}"""
|
||||
is_bot=True
|
||||
)
|
||||
|
||||
# Also save to legacy globals for backward compatibility
|
||||
if user_prompt and user_prompt.strip() and reply and reply.strip():
|
||||
# Also save to legacy globals for backward compatibility (skip error messages)
|
||||
if user_prompt and user_prompt.strip() and reply and reply.strip() and reply != "Someone tell Koko-nii there is a problem with my AI.":
|
||||
globals.conversation_history[user_id].append((user_prompt, reply))
|
||||
|
||||
return reply
|
||||
else:
|
||||
error_text = await response.text()
|
||||
logger.error(f"Error from llama-swap: {response.status} - {error_text}")
|
||||
|
||||
# Send webhook notification for HTTP errors
|
||||
await handle_response_error(
|
||||
f"Error: {response.status}",
|
||||
user_prompt=user_prompt,
|
||||
user_id=str(user_id),
|
||||
guild_id=str(guild_id) if guild_id else None,
|
||||
author_name=author_name
|
||||
)
|
||||
|
||||
# Don't save error responses to conversation history
|
||||
return f"Error: {response.status}"
|
||||
return "Someone tell Koko-nii there is a problem with my AI."
|
||||
except asyncio.TimeoutError:
|
||||
return "Sorry, the response took too long. Please try again."
|
||||
logger.error("Timeout error in query_llama")
|
||||
return await handle_llm_error(
|
||||
asyncio.TimeoutError("Request timed out after 300 seconds"),
|
||||
user_prompt=user_prompt,
|
||||
user_id=str(user_id),
|
||||
guild_id=str(guild_id) if guild_id else None,
|
||||
author_name=author_name
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in query_llama: {e}")
|
||||
return f"Sorry, there was an error: {str(e)}"
|
||||
return await handle_llm_error(
|
||||
e,
|
||||
user_prompt=user_prompt,
|
||||
user_id=str(user_id),
|
||||
guild_id=str(guild_id) if guild_id else None,
|
||||
author_name=author_name
|
||||
)
|
||||
|
||||
# Backward compatibility alias for existing code
|
||||
query_ollama = query_llama
|
||||
|
||||
Reference in New Issue
Block a user