| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182 |
- from mem0 import Memory
- from .config import COLLECTION_CONVERSATIONAL, COLLECTION_KNOWLEDGE
- from .prompts import PROMPTS
- NOOP_WHERE = {"$and": [{"user_id": {"$ne": ""}}, {"user_id": {"$ne": ""}}]}
- def make_config(collection_name: str, prompt_key: str) -> dict:
- """Build a mem0 config with selected prompts and shared model/vector settings."""
- prompts = PROMPTS[prompt_key]
- return {
- "llm": {
- "provider": "groq",
- "config": {
- "model": "meta-llama/llama-4-scout-17b-16e-instruct",
- "temperature": 0.025,
- "max_tokens": 1500,
- },
- },
- "vector_store": {
- "provider": "chroma",
- "config": {
- "host": "192.168.0.200",
- "port": 8001,
- "collection_name": collection_name,
- },
- },
- "embedder": {
- "provider": "ollama",
- "config": {
- "model": "nomic-embed-text",
- "ollama_base_url": "http://192.168.0.200:11434",
- },
- },
- "custom_fact_extraction_prompt": prompts["fact_extraction"],
- "custom_update_memory_prompt": prompts["update_memory"],
- }
- def is_effectively_empty(filters) -> bool:
- """Treat missing and known-empty filter shapes as empty filters."""
- if not filters:
- return True
- if filters in ({"AND": []}, {"OR": []}):
- return True
- return False
- def make_safe_search(mem_instance: Memory):
- """Patch mem0 search to survive Chroma errors on empty filters."""
- original = mem_instance.vector_store.search
- def safe_search(query, vectors, limit=10, filters=None):
- if is_effectively_empty(filters):
- return mem_instance.vector_store.collection.query(
- query_embeddings=vectors,
- n_results=limit,
- where=NOOP_WHERE,
- )
- try:
- return original(query=query, vectors=vectors, limit=limit, filters=filters)
- except Exception as exc:
- if "Expected where" in str(exc):
- return mem_instance.vector_store.collection.query(
- query_embeddings=vectors,
- n_results=limit,
- where=NOOP_WHERE,
- )
- raise
- return safe_search
- def build_memories() -> tuple[Memory, Memory]:
- """Create and patch conversational + knowledge Memory instances."""
- memory_conv = Memory.from_config(make_config(COLLECTION_CONVERSATIONAL, "conversational"))
- memory_know = Memory.from_config(make_config(COLLECTION_KNOWLEDGE, "knowledge"))
- memory_conv.vector_store.search = make_safe_search(memory_conv)
- memory_know.vector_store.search = make_safe_search(memory_know)
- return memory_conv, memory_know
|