mem0server.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. import os
  2. import math
  3. import json
  4. import sqlite3
  5. import httpx
  6. from fastapi import FastAPI, Request
  7. from fastapi.responses import JSONResponse, HTMLResponse
  8. from mem0 import Memory
  9. # =============================================================================
  10. # ENVIRONMENT
  11. # =============================================================================
  12. GROQ_API_KEY = os.environ.get("GROQ_API_KEY")
  13. if not GROQ_API_KEY:
  14. raise RuntimeError("GROQ_API_KEY environment variable is not set.")
  15. RERANKER_URL = os.environ.get("RERANKER_URL", "http://192.168.0.200:5200/rerank")
  16. SQLITE_PATH = os.path.expanduser("~/.mem0/history.db")
  17. # =============================================================================
  18. # SAFE JSON RESPONSE
  19. # Chroma and the reranker can emit Infinity/NaN which is invalid JSON.
  20. # Sanitize them to None before serializing.
  21. # =============================================================================
  22. def _sanitize(obj):
  23. if isinstance(obj, float):
  24. if math.isnan(obj) or math.isinf(obj):
  25. return None
  26. if isinstance(obj, dict):
  27. return {k: _sanitize(v) for k, v in obj.items()}
  28. if isinstance(obj, list):
  29. return [_sanitize(i) for i in obj]
  30. return obj
  31. class SafeJSONResponse(JSONResponse):
  32. def render(self, content) -> bytes:
  33. return json.dumps(_sanitize(content), ensure_ascii=False).encode("utf-8")
  34. # =============================================================================
  35. # METADATA SANITIZER
  36. # Chroma MetadataValue only accepts str, int, float, bool.
  37. # Drop None values; coerce anything else (lists, dicts) to str.
  38. # =============================================================================
  39. def sanitize_metadata(meta: dict) -> dict:
  40. clean = {}
  41. for k, v in meta.items():
  42. if v is None:
  43. continue
  44. if isinstance(v, (str, int, float, bool)):
  45. clean[k] = v
  46. else:
  47. clean[k] = str(v)
  48. return clean
  49. # =============================================================================
  50. # PROMPTS
  51. # Mapped to MemoryConfig.custom_fact_extraction_prompt /
  52. # MemoryConfig.custom_update_memory_prompt (top-level fields).
  53. #
  54. # conversational — active, used by /memories on every add
  55. # knowledge — defined for future use; currently bypassed because
  56. # /knowledge always stores verbatim (infer=False)
  57. # =============================================================================
  58. PROMPTS = {
  59. "conversational": {
  60. "fact_extraction": """
  61. You are an intelligent system that extracts useful long-term memory
  62. from a conversation.
  63. Your goal is to identify information that could help future interactions.
  64. Extract facts that describe:
  65. 1. User preferences
  66. 2. Important decisions
  67. 3. Ongoing projects
  68. 4. Tools or technologies being used
  69. 5. Goals or plans
  70. 6. Constraints or requirements
  71. 7. Discoveries or conclusions
  72. 8. Important context about tasks
  73. Ignore:
  74. - greetings
  75. - casual conversation
  76. - general world knowledge
  77. - temporary statements
  78. Return JSON:
  79. {
  80. "facts": [
  81. "fact 1",
  82. "fact 2"
  83. ]
  84. }
  85. Only include information that may be useful later.
  86. If nothing important is present return:
  87. {"facts": []}
  88. """.strip(),
  89. "update_memory": """
  90. You manage a long-term memory database.
  91. You receive:
  92. 1. existing stored memories
  93. 2. new extracted facts
  94. For each fact decide whether to:
  95. ADD
  96. Create a new memory if it contains useful new information.
  97. UPDATE
  98. Modify an existing memory if the new fact refines or corrects it.
  99. DELETE
  100. Remove a memory if it is clearly outdated or incorrect.
  101. NONE
  102. Ignore the fact if it is redundant or trivial.
  103. Guidelines:
  104. - Prefer updating over adding duplicates
  105. - Keep memories concise
  106. - Avoid storing repeated information
  107. - Preserve important context
  108. Return JSON list:
  109. [
  110. { "event": "ADD", "text": "..." },
  111. { "event": "UPDATE", "id": "...", "text": "..." }
  112. ]
  113. """.strip(),
  114. },
  115. "knowledge": {
  116. # Not active during ingest (infer=False bypasses extraction).
  117. # Kept here so it can be enabled if infer=True is ever needed.
  118. "fact_extraction": """
  119. You are a knowledge extraction system that reads source material and produces
  120. a list of objective, encyclopedic facts. Write each fact as a precise,
  121. self-contained sentence. Do NOT reframe facts as user preferences or interests.
  122. Preserve names, terminology, and relationships exactly as they appear.
  123. Examples:
  124. - "Silvio Gesell proposed demurrage as a mechanism to discourage hoarding of currency."
  125. - "The MIDI standard uses a 7-bit checksum for SysEx message validation."
  126. Only extract verifiable facts. Ignore meta-commentary and transitional prose.
  127. Return JSON: {"facts": ["fact 1", "fact 2"]}
  128. """.strip(),
  129. "update_memory": """
  130. You manage a knowledge base that stores objective facts extracted from books,
  131. documents, and reference material. You receive existing facts and new
  132. information. Update, merge, or add facts as needed. Keep each fact as a
  133. precise, self-contained sentence. Remove duplicates and outdated entries.
  134. Return JSON list: [{ "event": "ADD"|"UPDATE"|"DELETE"|"NONE", "text": "..." }]
  135. """.strip(),
  136. },
  137. }
  138. # =============================================================================
  139. # MEM0 CONFIG FACTORY
  140. # Prompts are top-level MemoryConfig fields — not nested inside llm.config.
  141. # =============================================================================
  142. def make_config(collection_name: str, prompt_key: str) -> dict:
  143. prompts = PROMPTS[prompt_key]
  144. return {
  145. "llm": {
  146. "provider": "groq",
  147. "config": {
  148. "model": "meta-llama/llama-4-scout-17b-16e-instruct",
  149. "temperature": 0.025,
  150. "max_tokens": 1500,
  151. },
  152. },
  153. "vector_store": {
  154. "provider": "chroma",
  155. "config": {
  156. "host": "192.168.0.200",
  157. "port": 8001,
  158. "collection_name": collection_name,
  159. },
  160. },
  161. "embedder": {
  162. "provider": "ollama",
  163. "config": {
  164. "model": "nomic-embed-text",
  165. "ollama_base_url": "http://192.168.0.200:11434",
  166. },
  167. },
  168. # Top-level MemoryConfig fields — confirmed from MemoryConfig source
  169. "custom_fact_extraction_prompt": prompts["fact_extraction"],
  170. "custom_update_memory_prompt": prompts["update_memory"],
  171. }
  172. # =============================================================================
  173. # MEMORY INSTANCES
  174. # =============================================================================
  175. memory_conv = Memory.from_config(make_config("openclaw_mem", "conversational"))
  176. memory_know = Memory.from_config(make_config("knowledge_mem", "knowledge"))
  177. # =============================================================================
  178. # CHROMA EMPTY-FILTER PATCH
  179. # mem0 sometimes passes an empty filter dict to Chroma which raises an error.
  180. # Replace with a harmless always-true filter as fallback.
  181. # =============================================================================
  182. NOOP_WHERE = {"$and": [
  183. {"user_id": {"$ne": ""}},
  184. {"user_id": {"$ne": ""}},
  185. ]}
  186. def is_effectively_empty(filters) -> bool:
  187. if not filters:
  188. return True
  189. if filters in ({"AND": []}, {"OR": []}):
  190. return True
  191. return False
  192. def make_safe_search(mem_instance: Memory):
  193. orig = mem_instance.vector_store.search
  194. def safe_search(query, vectors, limit=10, filters=None):
  195. if is_effectively_empty(filters):
  196. return mem_instance.vector_store.collection.query(
  197. query_embeddings=vectors,
  198. n_results=limit,
  199. where=NOOP_WHERE,
  200. )
  201. try:
  202. return orig(query=query, vectors=vectors, limit=limit, filters=filters)
  203. except Exception as e:
  204. if "Expected where" in str(e):
  205. return mem_instance.vector_store.collection.query(
  206. query_embeddings=vectors,
  207. n_results=limit,
  208. where=NOOP_WHERE,
  209. )
  210. raise
  211. return safe_search
  212. memory_conv.vector_store.search = make_safe_search(memory_conv)
  213. memory_know.vector_store.search = make_safe_search(memory_know)
  214. # =============================================================================
  215. # RERANKER
  216. # Calls local reranker to re-order search results by relevance.
  217. # Falls back to raw mem0 order if unreachable.
  218. # =============================================================================
  219. def rerank_results(query: str, items: list, top_k: int) -> list:
  220. if not items:
  221. return items
  222. documents = [r.get("memory", "") for r in items]
  223. try:
  224. resp = httpx.post(
  225. RERANKER_URL,
  226. json={"query": query, "documents": documents, "top_k": top_k},
  227. timeout=5.0,
  228. )
  229. resp.raise_for_status()
  230. reranked = resp.json()["results"]
  231. except Exception as exc:
  232. print(f"[reranker] unavailable, skipping: {exc}")
  233. return items[:top_k]
  234. # Re-attach original mem0 metadata by matching text
  235. text_to_meta = {r.get("memory", ""): r for r in items}
  236. merged = []
  237. for r in reranked:
  238. meta = text_to_meta.get(r["text"])
  239. if meta:
  240. merged.append({**meta, "rerank_score": r["score"]})
  241. return merged
  242. # =============================================================================
  243. # SQLITE HELPER
  244. # mem0 maintains a local SQLite history alongside Chroma.
  245. # Both must be cleaned together or deleted entries reappear after restart.
  246. # =============================================================================
  247. def sqlite_delete_ids(memory_ids: list[str]) -> int:
  248. """Delete rows by memory_id. Returns count deleted."""
  249. if not memory_ids:
  250. return 0
  251. try:
  252. conn = sqlite3.connect(SQLITE_PATH)
  253. cur = conn.cursor()
  254. placeholders = ",".join("?" * len(memory_ids))
  255. cur.execute(
  256. f"DELETE FROM history WHERE memory_id IN ({placeholders})",
  257. memory_ids
  258. )
  259. deleted = cur.rowcount
  260. conn.commit()
  261. conn.close()
  262. return deleted
  263. except Exception as e:
  264. print(f"[sqlite] warning: {e}")
  265. return 0
  266. # =============================================================================
  267. # CHROMA PAGINATION HELPER
  268. # mem0's get_all() is capped at 100 entries. This pages Chroma directly
  269. # in batches of 500 to retrieve the full collection without limits.
  270. # =============================================================================
  271. def chroma_get_all(collection, user_id: str, include: list = None) -> list[dict]:
  272. if include is None:
  273. include = ["metadatas"]
  274. results = []
  275. batch = 500
  276. offset = 0
  277. while True:
  278. page = collection.get(
  279. where={"user_id": {"$eq": user_id}},
  280. limit=batch,
  281. offset=offset,
  282. include=include,
  283. )
  284. ids = page.get("ids", [])
  285. if not ids:
  286. break
  287. for i, id_ in enumerate(ids):
  288. row = {"id": id_}
  289. for field in include:
  290. values = page.get(field, [])
  291. row[field[:-1]] = values[i] if i < len(values) else None
  292. results.append(row)
  293. offset += len(ids)
  294. if len(ids) < batch:
  295. break
  296. return results
  297. # =============================================================================
  298. # SHARED HANDLERS
  299. # =============================================================================
  300. def extract_user_id(data: dict) -> str:
  301. return data.get("userId") or data.get("user_id") or "default"
  302. async def handle_add(req: Request, mem: Memory, verbatim_allowed: bool = False):
  303. """
  304. Shared add handler for /memories and /knowledge.
  305. /knowledge (verbatim_allowed=True) — always infer=False. The ingestor
  306. already summarised; skip the second
  307. LLM extraction pass.
  308. /memories (verbatim_allowed=False) — always LLM extraction using the
  309. conversational prompts above.
  310. Accepts: text | messages, user_id, metadata.
  311. Metadata is sanitized — Chroma rejects None and complex types.
  312. """
  313. data = await req.json()
  314. user_id = extract_user_id(data)
  315. # metadata = sanitize_metadata(data.get("metadata") or {})
  316. raw_meta = data.get("metadata")
  317. metadata = sanitize_metadata(raw_meta) if raw_meta else None
  318. messages = data.get("messages")
  319. text = data.get("text")
  320. if not messages and not text:
  321. return SafeJSONResponse(
  322. content={"error": "Provide 'text' or 'messages'"}, status_code=400
  323. )
  324. if verbatim_allowed:
  325. # /knowledge — store verbatim, ingestor already did the summarisation
  326. content = text or " ".join(
  327. m["content"] for m in messages if m.get("role") == "user"
  328. )
  329. result = mem.add(content, user_id=user_id, metadata=metadata, infer=False)
  330. print(f"[add verbatim] user={user_id} chars={len(content)} meta={metadata}")
  331. return SafeJSONResponse(content=result)
  332. # in the /memories path
  333. kwargs = {"user_id": user_id}
  334. if metadata:
  335. kwargs["metadata"] = metadata
  336. result = mem.add(messages or text, **kwargs)
  337. # # /memories — LLM extracts and deduplicates facts from conversation
  338. # if messages:
  339. # result = mem.add(messages, user_id=user_id)
  340. # else:
  341. # result = mem.add(text, user_id=user_id)
  342. print(f"[add conversational] user={user_id} meta={metadata}")
  343. return SafeJSONResponse(content=result)
  344. async def handle_search(req: Request, mem: Memory):
  345. """Semantic search with reranking. Fetches limit×3 candidates then reranks."""
  346. data = await req.json()
  347. query = (data.get("query") or "").strip()
  348. user_id = extract_user_id(data)
  349. limit = int(data.get("limit", 5))
  350. if not query:
  351. return SafeJSONResponse(content={"results": []})
  352. fetch_k = max(limit * 3, 15)
  353. try:
  354. result = mem.search(query, user_id=user_id, limit=fetch_k)
  355. except Exception:
  356. # Fallback: get_all + simple text filter
  357. all_res = mem.get_all(user_id=user_id)
  358. items = (
  359. all_res.get("results", [])
  360. if isinstance(all_res, dict)
  361. else (all_res if isinstance(all_res, list) else [])
  362. )
  363. q = query.lower()
  364. items = [r for r in items if q in r.get("memory", "").lower()]
  365. result = {"results": items}
  366. items = result.get("results", [])
  367. items = rerank_results(query, items, top_k=limit)
  368. print(f"[search] user={user_id} query={query!r} hits={len(items)}")
  369. return SafeJSONResponse(content={"results": items})
  370. async def handle_recent(req: Request, mem: Memory):
  371. """Return most recently created memories, sorted by created_at desc."""
  372. data = await req.json()
  373. user_id = extract_user_id(data)
  374. if not user_id:
  375. return SafeJSONResponse(content={"error": "Missing userId"}, status_code=400)
  376. limit = int(data.get("limit", 5))
  377. try:
  378. results = mem.get_all(user_id=user_id)
  379. except Exception:
  380. results = mem.search(query="recent", user_id=user_id)
  381. items = results.get("results", [])
  382. items = sorted(items, key=lambda r: r.get("created_at", ""), reverse=True)
  383. return SafeJSONResponse(content={"results": items[:limit]})
  384. # =============================================================================
  385. # APP
  386. # =============================================================================
  387. app = FastAPI(title="mem0 server")
  388. # ---------------------------------------------------------------------------
  389. # DASHBOARD — served from file mounted via docker-compose volume
  390. # ---------------------------------------------------------------------------
  391. DASHBOARD_HTML = open("dashboard.html").read()
  392. @app.get("/dashboard")
  393. async def dashboard():
  394. return HTMLResponse(content=DASHBOARD_HTML)
  395. # ---------------------------------------------------------------------------
  396. # HEALTH
  397. # ---------------------------------------------------------------------------
  398. @app.get("/health")
  399. async def health():
  400. return SafeJSONResponse(content={
  401. "status": "ok",
  402. "reranker_url": RERANKER_URL,
  403. "collections": {
  404. "conversational": "openclaw_mem",
  405. "knowledge": "knowledge_mem",
  406. },
  407. # Show first 80 chars of each prompt for quick verification
  408. "prompts": {
  409. k: {pk: pv[:80] + "…" for pk, pv in pv_dict.items()}
  410. for k, pv_dict in PROMPTS.items()
  411. },
  412. })
  413. # ---------------------------------------------------------------------------
  414. # /memories — conversational collection (OpenClaw)
  415. # ---------------------------------------------------------------------------
  416. @app.post("/memories")
  417. async def add_memory(req: Request):
  418. return await handle_add(req, memory_conv, verbatim_allowed=False)
  419. @app.post("/memories/search")
  420. async def search_memories(req: Request):
  421. return await handle_search(req, memory_conv)
  422. @app.post("/memories/recent")
  423. async def recent_memories(req: Request):
  424. return await handle_recent(req, memory_conv)
  425. @app.delete("/memories")
  426. async def delete_memory(req: Request):
  427. data = await req.json()
  428. return SafeJSONResponse(content=memory_conv.delete(data.get("filter", {})))
  429. # ---------------------------------------------------------------------------
  430. # /knowledge — objective facts collection (book-ingestor)
  431. # ---------------------------------------------------------------------------
  432. @app.post("/knowledge")
  433. async def add_knowledge(req: Request):
  434. return await handle_add(req, memory_know, verbatim_allowed=True)
  435. @app.post("/knowledge/search")
  436. async def search_knowledge(req: Request):
  437. return await handle_search(req, memory_know)
  438. @app.post("/knowledge/recent")
  439. async def recent_knowledge(req: Request):
  440. return await handle_recent(req, memory_know)
  441. @app.delete("/knowledge")
  442. async def delete_knowledge(req: Request):
  443. data = await req.json()
  444. return SafeJSONResponse(content=memory_know.delete(data.get("filter", {})))
  445. @app.post("/knowledge/sources")
  446. async def knowledge_sources(req: Request):
  447. """
  448. Return distinct source_file values with entry counts.
  449. Pages Chroma directly — bypasses mem0's 100-entry get_all cap.
  450. """
  451. data = await req.json()
  452. user_id = extract_user_id(data) or "knowledge_base"
  453. rows = chroma_get_all(memory_know.vector_store.collection, user_id)
  454. counts = {}
  455. for row in rows:
  456. src = (row.get("metadata") or {}).get("source_file", "(no source)")
  457. counts[src] = counts.get(src, 0) + 1
  458. sources = [
  459. {"source_file": k, "count": v}
  460. for k, v in sorted(counts.items(), key=lambda x: -x[1])
  461. ]
  462. print(f"[sources] user={user_id} total={len(rows)} books={len(sources)}")
  463. return SafeJSONResponse(content={"sources": sources, "total": len(rows)})
  464. @app.delete("/knowledge/by-source")
  465. async def delete_knowledge_by_source(req: Request):
  466. """
  467. Delete all entries for a given source_file from both Chroma and SQLite.
  468. Pages Chroma directly to avoid the 100-entry cap on get_all.
  469. """
  470. data = await req.json()
  471. source_file = data.get("source_file")
  472. user_id = extract_user_id(data) or "knowledge_base"
  473. if not source_file:
  474. return SafeJSONResponse(
  475. content={"error": "Missing source_file"}, status_code=400
  476. )
  477. # Collect all IDs matching source_file across all pages
  478. rows = chroma_get_all(memory_know.vector_store.collection, user_id)
  479. to_delete = [
  480. row["id"] for row in rows
  481. if (row.get("metadata") or {}).get("source_file") == source_file
  482. ]
  483. if not to_delete:
  484. return SafeJSONResponse(
  485. content={"deleted": 0, "message": "no entries found for that source"}
  486. )
  487. # Delete from Chroma in one bulk call
  488. try:
  489. memory_know.vector_store.collection.delete(ids=to_delete)
  490. except Exception as e:
  491. return SafeJSONResponse(
  492. content={"error": f"chroma delete failed: {e}"}, status_code=500
  493. )
  494. # Clean SQLite so entries don't reappear after server restart
  495. sqlite_deleted = sqlite_delete_ids(to_delete)
  496. print(f"[delete by-source] source={source_file} "
  497. f"chroma={len(to_delete)} sqlite={sqlite_deleted}")
  498. return SafeJSONResponse(content={
  499. "deleted": len(to_delete),
  500. "sqlite_deleted": sqlite_deleted,
  501. "source_file": source_file,
  502. })
  503. # ---------------------------------------------------------------------------
  504. # /memory/{id} — single entry delete for dashboard per-row buttons
  505. # ---------------------------------------------------------------------------
  506. @app.delete("/memory/{memory_id}")
  507. async def delete_single_memory(memory_id: str, req: Request):
  508. """
  509. Delete one memory by ID from either collection.
  510. Body: { "collection": "knowledge" | "conversational" }
  511. Cleans both Chroma and SQLite.
  512. """
  513. data = await req.json()
  514. collection = data.get("collection", "knowledge")
  515. mem = memory_know if collection == "knowledge" else memory_conv
  516. try:
  517. mem.vector_store.collection.delete(ids=[memory_id])
  518. except Exception as e:
  519. return SafeJSONResponse(
  520. content={"error": f"chroma delete failed: {e}"}, status_code=500
  521. )
  522. sqlite_delete_ids([memory_id])
  523. print(f"[delete single] id={memory_id} collection={collection}")
  524. return SafeJSONResponse(content={"deleted": memory_id})
  525. # ---------------------------------------------------------------------------
  526. # /search — merged results from both collections (OpenClaw autorecall)
  527. # ---------------------------------------------------------------------------
  528. @app.post("/search")
  529. async def search_all(req: Request):
  530. """
  531. Query both collections simultaneously, tag results with _source,
  532. then run a single rerank pass over the merged pool.
  533. """
  534. data = await req.json()
  535. query = (data.get("query") or "").strip()
  536. user_id = extract_user_id(data)
  537. limit = int(data.get("limit", 5))
  538. if not query:
  539. return SafeJSONResponse(content={"results": []})
  540. fetch_k = max(limit * 3, 15)
  541. def fetch(mem: Memory, tag: str):
  542. try:
  543. r = mem.search(query, user_id=user_id, limit=fetch_k)
  544. items = r.get("results", [])
  545. except Exception:
  546. items = []
  547. for item in items:
  548. item["_source"] = tag
  549. return items
  550. conv_items = fetch(memory_conv, "conversational")
  551. know_items = fetch(memory_know, "knowledge")
  552. merged = rerank_results(query, conv_items + know_items, top_k=limit)
  553. print(
  554. f"[search/all] user={user_id} query={query!r} "
  555. f"conv={len(conv_items)} know={len(know_items)} merged={len(merged)}"
  556. )
  557. return SafeJSONResponse(content={"results": merged})
  558. @app.post("/memories/all")
  559. async def memories_all(req: Request):
  560. """
  561. Return all memories for a user, paging Chroma directly.
  562. Bypasses mem0's 100-entry get_all cap.
  563. """
  564. data = await req.json()
  565. user_id = extract_user_id(data) or "main"
  566. rows = chroma_get_all(
  567. memory_conv.vector_store.collection,
  568. user_id,
  569. include=["metadatas", "documents"]
  570. )
  571. items = []
  572. for row in rows:
  573. meta = row.get("metadata") or {}
  574. items.append({
  575. "id": row["id"],
  576. "memory": row.get("document") or meta.get("data", ""),
  577. "created_at": meta.get("created_at"),
  578. "metadata": meta,
  579. "user_id": user_id,
  580. })
  581. items.sort(key=lambda r: r.get("created_at") or "", reverse=True)
  582. print(f"[memories/all] user={user_id} total={len(items)}")
  583. return SafeJSONResponse(content={"results": items})