feat: Restore mood system and implement comprehensive memory editor UI
MOOD SYSTEM FIX: - Mount bot/moods directory in docker-compose.yml for Cat container access - Update miku_personality plugin to load mood descriptions from .txt files - Add Cat logger for debugging mood loading (replaces print statements) - Moods now dynamically loaded from working_memory instead of hardcoded neutral
This commit is contained in:
@@ -381,6 +381,147 @@ class CatAdapter:
|
||||
logger.error(f"Error deleting memory point: {e}")
|
||||
return False
|
||||
|
||||
async def update_memory_point(self, collection: str, point_id: str, content: str, metadata: dict = None) -> bool:
|
||||
"""Update an existing memory point's content and/or metadata."""
|
||||
try:
|
||||
# First, get the existing point to retrieve its vector
|
||||
qdrant_host = self._base_url.replace("http://cheshire-cat:80", "http://cheshire-cat-vector-memory:6333")
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Get existing point
|
||||
async with session.post(
|
||||
f"{qdrant_host}/collections/{collection}/points",
|
||||
json={"ids": [point_id], "with_vector": True, "with_payload": True},
|
||||
timeout=aiohttp.ClientTimeout(total=15)
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
logger.error(f"Failed to fetch point {point_id}: {response.status}")
|
||||
return False
|
||||
|
||||
data = await response.json()
|
||||
points = data.get("result", [])
|
||||
if not points:
|
||||
logger.error(f"Point {point_id} not found")
|
||||
return False
|
||||
|
||||
existing_point = points[0]
|
||||
existing_vector = existing_point.get("vector")
|
||||
existing_payload = existing_point.get("payload", {})
|
||||
|
||||
# If content changed, we need to re-embed it
|
||||
if content != existing_payload.get("page_content"):
|
||||
# Call Cat's embedder to get new vector
|
||||
embed_response = await session.post(
|
||||
f"{self._base_url}/embedder",
|
||||
json={"text": content},
|
||||
headers=self._get_headers(),
|
||||
timeout=aiohttp.ClientTimeout(total=30)
|
||||
)
|
||||
if embed_response.status == 200:
|
||||
embed_data = await embed_response.json()
|
||||
new_vector = embed_data.get("embedding")
|
||||
else:
|
||||
logger.warning(f"Failed to re-embed content, keeping old vector")
|
||||
new_vector = existing_vector
|
||||
else:
|
||||
new_vector = existing_vector
|
||||
|
||||
# Build updated payload
|
||||
updated_payload = {
|
||||
"page_content": content,
|
||||
"metadata": metadata if metadata is not None else existing_payload.get("metadata", {})
|
||||
}
|
||||
|
||||
# Update the point
|
||||
async with session.put(
|
||||
f"{qdrant_host}/collections/{collection}/points",
|
||||
json={
|
||||
"points": [{
|
||||
"id": point_id,
|
||||
"vector": new_vector,
|
||||
"payload": updated_payload
|
||||
}]
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=15)
|
||||
) as update_response:
|
||||
if update_response.status == 200:
|
||||
logger.info(f"✏️ Updated memory point {point_id} in {collection}")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Failed to update point: {update_response.status}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating memory point: {e}")
|
||||
return False
|
||||
|
||||
async def create_memory_point(self, collection: str, content: str, user_id: str, source: str, metadata: dict = None) -> Optional[str]:
|
||||
"""Create a new memory point manually."""
|
||||
try:
|
||||
import uuid
|
||||
import time
|
||||
|
||||
# Generate a unique ID
|
||||
point_id = str(uuid.uuid4())
|
||||
|
||||
# Get vector embedding from Cat
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
f"{self._base_url}/embedder",
|
||||
json={"text": content},
|
||||
headers=self._get_headers(),
|
||||
timeout=aiohttp.ClientTimeout(total=30)
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
logger.error(f"Failed to embed content: {response.status}")
|
||||
return None
|
||||
|
||||
data = await response.json()
|
||||
vector = data.get("embedding")
|
||||
if not vector:
|
||||
logger.error("No embedding returned from Cat")
|
||||
return None
|
||||
|
||||
# Build payload
|
||||
payload = {
|
||||
"page_content": content,
|
||||
"metadata": metadata or {}
|
||||
}
|
||||
payload["metadata"]["source"] = source
|
||||
payload["metadata"]["when"] = time.time()
|
||||
|
||||
# For declarative memories, add user_id to metadata
|
||||
# For episodic, it's in the source field
|
||||
if collection == "declarative":
|
||||
payload["metadata"]["user_id"] = user_id
|
||||
elif collection == "episodic":
|
||||
payload["metadata"]["source"] = user_id
|
||||
|
||||
# Insert into Qdrant
|
||||
qdrant_host = self._base_url.replace("http://cheshire-cat:80", "http://cheshire-cat-vector-memory:6333")
|
||||
|
||||
async with session.put(
|
||||
f"{qdrant_host}/collections/{collection}/points",
|
||||
json={
|
||||
"points": [{
|
||||
"id": point_id,
|
||||
"vector": vector,
|
||||
"payload": payload
|
||||
}]
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=15)
|
||||
) as insert_response:
|
||||
if insert_response.status == 200:
|
||||
logger.info(f"✨ Created new {collection} memory point: {point_id}")
|
||||
return point_id
|
||||
else:
|
||||
logger.error(f"Failed to insert point: {insert_response.status}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating memory point: {e}")
|
||||
return None
|
||||
|
||||
async def wipe_all_memories(self) -> bool:
|
||||
"""
|
||||
Delete ALL memory collections (episodic + declarative).
|
||||
|
||||
Reference in New Issue
Block a user