chore(i18n): translate memory prompt and system message to Chinese
Some checks failed
Build and Push Docker / build-and-push (push) Failing after 2m29s
Some checks failed
Build and Push Docker / build-and-push (push) Failing after 2m29s
This commit is contained in:
@ -25,12 +25,10 @@ class Mem0Integration:
|
|||||||
self.llm_model = config["llm"]["config"]["model"]
|
self.llm_model = config["llm"]["config"]["model"]
|
||||||
|
|
||||||
# Memory prompt template
|
# Memory prompt template
|
||||||
self.memory_template = """Based on the following memories about the user:
|
self.memory_template = """根据以下关于用户的记忆:
|
||||||
{memories}
|
{memories}
|
||||||
|
请回应用户的询问:{query}
|
||||||
Please respond to the user's query: {query}
|
在你的回复中,请参考上述记忆以提供个性化的回答。"""
|
||||||
|
|
||||||
In your response, consider the memories above to provide a personalized answer."""
|
|
||||||
|
|
||||||
def search_memories(self, query: str, user_id: str, limit: int = 5) -> List[Any]:
|
def search_memories(self, query: str, user_id: str, limit: int = 5) -> List[Any]:
|
||||||
"""Search for relevant memories about the user."""
|
"""Search for relevant memories about the user."""
|
||||||
@ -122,7 +120,7 @@ In your response, consider the memories above to provide a personalized answer."
|
|||||||
response = self.openai_client.chat.completions.create(
|
response = self.openai_client.chat.completions.create(
|
||||||
model=self.llm_model,
|
model=self.llm_model,
|
||||||
messages=[
|
messages=[
|
||||||
{"role": "system", "content": "You are a helpful assistant with access to user memories. Use the provided memories to personalize your responses."},
|
{"role": "system", "content": "你是一个乐于助人的助手,可以访问用户记忆。请使用提供的记忆来个性化你的回复。"},
|
||||||
{"role": "user", "content": enhanced_prompt}
|
{"role": "user", "content": enhanced_prompt}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
Reference in New Issue
Block a user