feat(memory): integrate Mem0 for enhanced conversational memory
Some checks failed
Build and Push Docker / build-and-push (push) Has been cancelled
Some checks failed
Build and Push Docker / build-and-push (push) Has been cancelled
This commit is contained in:
158
memory_module/memory_integration.py
Normal file
158
memory_module/memory_integration.py
Normal file
@ -0,0 +1,158 @@
|
||||
import os
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
import openai
|
||||
from mem0 import Memory
|
||||
|
||||
|
||||
class Mem0Integration:
|
||||
"""Mem0 integration for memory retrieval and storage in RAG pipeline."""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
"""Initialize Mem0 with configuration."""
|
||||
self.config = config
|
||||
self.memory = Memory.from_config(config)
|
||||
|
||||
# Initialize OpenAI client for chat completion
|
||||
self.openai_client = openai.OpenAI(
|
||||
api_key=config["llm"]["config"]["api_key"],
|
||||
base_url=config["llm"]["config"].get("openai_base_url")
|
||||
)
|
||||
self.llm_model = config["llm"]["config"]["model"]
|
||||
|
||||
# Memory prompt template
|
||||
self.memory_template = """Based on the following memories about the user:
|
||||
{memories}
|
||||
|
||||
Please respond to the user's query: {query}
|
||||
|
||||
In your response, consider the memories above to provide a personalized answer."""
|
||||
|
||||
def search_memories(self, query: str, user_id: str, limit: int = 5) -> List[Dict[str, Any]]:
|
||||
"""Search for relevant memories about the user."""
|
||||
try:
|
||||
results = self.memory.search(
|
||||
query=query,
|
||||
user_id=user_id,
|
||||
limit=limit
|
||||
)
|
||||
return results
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to search memories: {e}")
|
||||
return []
|
||||
|
||||
def add_memory(self, messages: List[Dict[str, str]], user_id: str, metadata: Optional[Dict] = None) -> Dict[str, Any]:
|
||||
"""Add a memory for the user."""
|
||||
try:
|
||||
result = self.memory.add(
|
||||
messages=messages,
|
||||
user_id=user_id,
|
||||
metadata=metadata or {}
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to add memory: {e}")
|
||||
return {}
|
||||
|
||||
def format_memories_for_prompt(self, memories: List[Dict[str, Any]]) -> str:
|
||||
"""Format memories into a string for the prompt."""
|
||||
if not memories:
|
||||
return "No previous memories about this user."
|
||||
|
||||
formatted = []
|
||||
for i, memory in enumerate(memories, 1):
|
||||
memory_text = memory.get("memory", "")
|
||||
created_at = memory.get("created_at", "")
|
||||
if created_at:
|
||||
try:
|
||||
# Format the date if it's available
|
||||
created_date = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
||||
created_str = created_date.strftime("%Y-%m-%d %H:%M")
|
||||
except:
|
||||
created_str = created_at
|
||||
formatted.append(f"{i}. {memory_text} (remembered on: {created_str})")
|
||||
else:
|
||||
formatted.append(f"{i}. {memory_text}")
|
||||
|
||||
return "\n".join(formatted)
|
||||
|
||||
def generate_response_with_memory(self, user_input: str, user_id: str) -> Dict[str, Any]:
|
||||
"""Generate a response using memories and store the interaction."""
|
||||
# Step 1: Search for relevant memories
|
||||
memories = self.search_memories(user_input, user_id)
|
||||
|
||||
# Step 2: Format memories for the prompt
|
||||
formatted_memories = self.format_memories_for_prompt(memories)
|
||||
|
||||
# Step 3: Create the enhanced prompt
|
||||
enhanced_prompt = self.memory_template.format(
|
||||
memories=formatted_memories,
|
||||
query=user_input
|
||||
)
|
||||
|
||||
# Step 4: Generate response using OpenAI
|
||||
try:
|
||||
response = self.openai_client.chat.completions.create(
|
||||
model=self.llm_model,
|
||||
messages=[
|
||||
{"role": "system", "content": "You are a helpful assistant with access to user memories. Use the provided memories to personalize your responses."},
|
||||
{"role": "user", "content": enhanced_prompt}
|
||||
],
|
||||
)
|
||||
|
||||
assistant_response = response.choices[0].message.content
|
||||
|
||||
# Step 5: Store the interaction as new memories
|
||||
messages = [
|
||||
{"role": "user", "content": user_input},
|
||||
{"role": "assistant", "content": assistant_response}
|
||||
]
|
||||
|
||||
# Store with metadata including timestamp
|
||||
metadata = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"type": "chat_interaction"
|
||||
}
|
||||
|
||||
self.add_memory(messages, user_id, metadata)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"response": assistant_response,
|
||||
"user_id": user_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to generate response: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"user_id": user_id
|
||||
}
|
||||
|
||||
def get_all_memories(self, user_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get all memories for a user."""
|
||||
try:
|
||||
memories = self.memory.get_all(user_id=user_id)
|
||||
return memories
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to get all memories: {e}")
|
||||
return []
|
||||
|
||||
def delete_memory(self, memory_id: str) -> bool:
|
||||
"""Delete a specific memory."""
|
||||
try:
|
||||
self.memory.delete(memory_id)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to delete memory: {e}")
|
||||
return False
|
||||
|
||||
def delete_all_memories(self, user_id: str) -> bool:
|
||||
"""Delete all memories for a user."""
|
||||
try:
|
||||
self.memory.delete_all(user_id=user_id)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to delete all memories: {e}")
|
||||
return False
|
Reference in New Issue
Block a user