sqlite_store.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344
  1. from __future__ import annotations
  2. import json
  3. import sqlite3
  4. from dataclasses import dataclass
  5. from datetime import datetime, timezone, timedelta
  6. from pathlib import Path
  7. from typing import Any
  8. from urllib.parse import urlparse
  9. from news_mcp.entity_normalize import normalize_entities
  10. from news_mcp.trends_resolution import resolve_entity_via_trends
  11. @dataclass
  12. class ClusterRow:
  13. cluster_id: str
  14. topic: str
  15. payload: dict
  16. updated_at: datetime
  17. META_LAST_PRUNE_AT = "last_prune_at"
  18. def _article_key(article: dict[str, Any]) -> str:
  19. url = str(article.get("url") or "").strip()
  20. if not url:
  21. return str(article.get("title") or "")
  22. try:
  23. parsed = urlparse(url)
  24. parts = [p for p in parsed.path.split("/") if p]
  25. if parts:
  26. return parts[-1]
  27. except Exception:
  28. pass
  29. return url
  30. def _dedup_articles(articles: list[dict[str, Any]]) -> list[dict[str, Any]]:
  31. seen: set[str] = set()
  32. out: list[dict[str, Any]] = []
  33. for article in articles:
  34. key = _article_key(article)
  35. if key in seen:
  36. continue
  37. seen.add(key)
  38. out.append(article)
  39. return out
  40. def _has_valid_entity_resolutions(resolutions: Any, entities: list[str]) -> bool:
  41. if not isinstance(resolutions, list):
  42. return False
  43. if len(resolutions) != len(entities):
  44. return False
  45. for res in resolutions:
  46. if not isinstance(res, dict):
  47. return False
  48. if not res.get("normalized") or not res.get("canonical_label"):
  49. return False
  50. return True
  51. def sanitize_cluster_payload(cluster: dict[str, Any], *, include_resolutions: bool = True) -> dict[str, Any]:
  52. """Normalize cluster payload so every stored payload is internally consistent."""
  53. out = dict(cluster)
  54. raw_articles = out.get("articles", []) or []
  55. articles = [a for a in raw_articles if isinstance(a, dict)]
  56. out["articles"] = _dedup_articles(articles)
  57. raw_entities = out.get("entities", []) or []
  58. entities = normalize_entities(raw_entities)
  59. out["entities"] = entities
  60. if not include_resolutions:
  61. return out
  62. resolutions = out.get("entityResolutions", None)
  63. if entities:
  64. if not _has_valid_entity_resolutions(resolutions, entities):
  65. out["entityResolutions"] = [resolve_entity_via_trends(e) for e in entities]
  66. else:
  67. # Keep the empty case explicit and stable.
  68. out["entityResolutions"] = []
  69. return out
  70. class SQLiteClusterStore:
  71. def __init__(self, db_path: str | Path):
  72. self.db_path = str(db_path)
  73. self._init_db()
  74. def _conn(self) -> sqlite3.Connection:
  75. return sqlite3.connect(self.db_path)
  76. def _init_db(self) -> None:
  77. Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
  78. with self._conn() as conn:
  79. conn.execute("PRAGMA journal_mode=WAL")
  80. conn.execute("PRAGMA synchronous=NORMAL")
  81. conn.execute("PRAGMA busy_timeout=5000")
  82. conn.execute(
  83. """
  84. CREATE TABLE IF NOT EXISTS clusters (
  85. cluster_id TEXT PRIMARY KEY,
  86. topic TEXT NOT NULL,
  87. payload TEXT NOT NULL,
  88. updated_at TEXT NOT NULL,
  89. summary_payload TEXT,
  90. summary_updated_at TEXT
  91. )
  92. """
  93. )
  94. # If the table already exists without the summary columns,
  95. # add them (SQLite-friendly incremental migrations).
  96. for col_def in [
  97. "summary_payload TEXT",
  98. "summary_updated_at TEXT",
  99. ]:
  100. col = col_def.split()[0]
  101. try:
  102. conn.execute(f"ALTER TABLE clusters ADD COLUMN {col_def}")
  103. except sqlite3.OperationalError:
  104. pass
  105. conn.execute(
  106. "CREATE INDEX IF NOT EXISTS idx_clusters_topic ON clusters(topic)"
  107. )
  108. conn.execute(
  109. "CREATE INDEX IF NOT EXISTS idx_clusters_updated_at ON clusters(updated_at)"
  110. )
  111. conn.execute(
  112. """
  113. CREATE TABLE IF NOT EXISTS feed_state (
  114. feed_key TEXT PRIMARY KEY,
  115. last_hash TEXT NOT NULL,
  116. updated_at TEXT NOT NULL
  117. )
  118. """
  119. )
  120. conn.execute(
  121. """
  122. CREATE TABLE IF NOT EXISTS meta (
  123. key TEXT PRIMARY KEY,
  124. value TEXT NOT NULL
  125. )
  126. """
  127. )
  128. def upsert_clusters(self, clusters: list[dict], topic: str) -> None:
  129. now = datetime.now(timezone.utc)
  130. with self._conn() as conn:
  131. for c in clusters:
  132. c = sanitize_cluster_payload(c)
  133. cluster_id = c["cluster_id"]
  134. payload = json.dumps(c, ensure_ascii=False)
  135. conn.execute(
  136. "INSERT INTO clusters(cluster_id, topic, payload, updated_at) VALUES(?,?,?,?) "
  137. "ON CONFLICT(cluster_id) DO UPDATE SET topic=excluded.topic, payload=excluded.payload, updated_at=excluded.updated_at",
  138. (cluster_id, topic, payload, now.isoformat()),
  139. )
  140. def upsert_cluster_summary(
  141. self,
  142. cluster_id: str,
  143. summary_payload: dict,
  144. ) -> None:
  145. now = datetime.now(timezone.utc).isoformat()
  146. with self._conn() as conn:
  147. conn.execute(
  148. "INSERT INTO clusters(cluster_id, topic, payload, updated_at, summary_payload, summary_updated_at) "
  149. "VALUES(?,?,?,?,?,?) "
  150. "ON CONFLICT(cluster_id) DO UPDATE SET "
  151. "summary_payload=excluded.summary_payload, summary_updated_at=excluded.summary_updated_at",
  152. (
  153. cluster_id,
  154. "", # topic not used for update
  155. json.dumps({}, ensure_ascii=False),
  156. now,
  157. json.dumps(summary_payload, ensure_ascii=False),
  158. now,
  159. ),
  160. )
  161. def get_cluster_summary(self, cluster_id: str, ttl_hours: float) -> dict | None:
  162. cutoff = datetime.now(timezone.utc) - timedelta(hours=ttl_hours)
  163. cutoff_iso = cutoff.isoformat()
  164. with self._conn() as conn:
  165. cur = conn.execute(
  166. "SELECT summary_payload, summary_updated_at FROM clusters "
  167. "WHERE cluster_id=? AND summary_updated_at >= ?",
  168. (cluster_id, cutoff_iso),
  169. )
  170. row = cur.fetchone()
  171. if not row or not row[0]:
  172. return None
  173. return json.loads(row[0])
  174. def get_latest_clusters(self, topic: str, ttl_hours: float, limit: int) -> list[dict]:
  175. cutoff = datetime.now(timezone.utc) - timedelta(hours=ttl_hours)
  176. cutoff_iso = cutoff.isoformat()
  177. with self._conn() as conn:
  178. cur = conn.execute(
  179. "SELECT payload FROM clusters WHERE topic=? AND updated_at >= ? ORDER BY updated_at DESC LIMIT ?",
  180. (topic, cutoff_iso, int(limit)),
  181. )
  182. rows = [json.loads(r[0]) for r in cur.fetchall()]
  183. return rows
  184. def get_latest_clusters_all_topics(self, ttl_hours: float, limit: int) -> list[dict]:
  185. cutoff = datetime.now(timezone.utc) - timedelta(hours=ttl_hours)
  186. cutoff_iso = cutoff.isoformat()
  187. with self._conn() as conn:
  188. cur = conn.execute(
  189. "SELECT payload FROM clusters WHERE updated_at >= ? ORDER BY updated_at DESC LIMIT ?",
  190. (cutoff_iso, int(limit)),
  191. )
  192. return [json.loads(r[0]) for r in cur.fetchall()]
  193. def get_cluster_by_id(self, cluster_id: str) -> dict | None:
  194. with self._conn() as conn:
  195. cur = conn.execute(
  196. "SELECT payload FROM clusters WHERE cluster_id=?",
  197. (cluster_id,),
  198. )
  199. row = cur.fetchone()
  200. return json.loads(row[0]) if row else None
  201. def get_feed_hash(self, feed_key: str) -> str | None:
  202. with self._conn() as conn:
  203. cur = conn.execute(
  204. "SELECT last_hash FROM feed_state WHERE feed_key=?",
  205. (feed_key,),
  206. )
  207. row = cur.fetchone()
  208. return row[0] if row else None
  209. def set_feed_hash(self, feed_key: str, last_hash: str) -> None:
  210. now = datetime.now(timezone.utc).isoformat()
  211. with self._conn() as conn:
  212. conn.execute(
  213. "INSERT INTO feed_state(feed_key, last_hash, updated_at) VALUES(?,?,?) "
  214. "ON CONFLICT(feed_key) DO UPDATE SET last_hash=excluded.last_hash, updated_at=excluded.updated_at",
  215. (feed_key, last_hash, now),
  216. )
  217. def get_feed_state(self, feed_key: str) -> dict | None:
  218. with self._conn() as conn:
  219. cur = conn.execute(
  220. "SELECT last_hash, updated_at FROM feed_state WHERE feed_key=?",
  221. (feed_key,),
  222. )
  223. row = cur.fetchone()
  224. if not row:
  225. return None
  226. return {"last_hash": row[0], "updated_at": row[1]}
  227. def get_meta(self, key: str) -> str | None:
  228. with self._conn() as conn:
  229. cur = conn.execute("SELECT value FROM meta WHERE key=?", (key,))
  230. row = cur.fetchone()
  231. return row[0] if row else None
  232. def set_meta(self, key: str, value: str) -> None:
  233. with self._conn() as conn:
  234. conn.execute(
  235. "INSERT INTO meta(key, value) VALUES(?, ?) "
  236. "ON CONFLICT(key) DO UPDATE SET value=excluded.value",
  237. (key, value),
  238. )
  239. def prune_clusters(self, retention_days: float) -> int:
  240. retention_days = float(retention_days)
  241. if retention_days <= 0:
  242. return 0
  243. cutoff = datetime.now(timezone.utc) - timedelta(days=retention_days)
  244. cutoff_iso = cutoff.isoformat()
  245. pruned_at = datetime.now(timezone.utc).isoformat()
  246. with self._conn() as conn:
  247. cur = conn.execute("DELETE FROM clusters WHERE updated_at < ?", (cutoff_iso,))
  248. deleted = int(cur.rowcount or 0)
  249. conn.execute(
  250. "INSERT INTO meta(key, value) VALUES(?, ?) "
  251. "ON CONFLICT(key) DO UPDATE SET value=excluded.value",
  252. (META_LAST_PRUNE_AT, pruned_at),
  253. )
  254. return deleted
  255. def prune_if_due(self, pruning_enabled: bool, retention_days: float, interval_hours: float = 24.0) -> dict[str, Any]:
  256. retention_days = float(retention_days)
  257. interval_hours = float(interval_hours)
  258. if (not pruning_enabled) or retention_days <= 0:
  259. return {
  260. "enabled": bool(pruning_enabled),
  261. "deleted": 0,
  262. "due": False,
  263. "retention_days": retention_days,
  264. "interval_hours": interval_hours,
  265. "last_prune_at": self.get_meta(META_LAST_PRUNE_AT),
  266. }
  267. last_prune_at = self.get_meta(META_LAST_PRUNE_AT)
  268. now = datetime.now(timezone.utc)
  269. due = True
  270. if last_prune_at:
  271. try:
  272. last_dt = datetime.fromisoformat(last_prune_at)
  273. due = now - last_dt >= timedelta(hours=max(1.0, interval_hours))
  274. except Exception:
  275. due = True
  276. if not due:
  277. return {
  278. "enabled": True,
  279. "deleted": 0,
  280. "due": False,
  281. "retention_days": retention_days,
  282. "interval_hours": interval_hours,
  283. "last_prune_at": last_prune_at,
  284. }
  285. deleted = self.prune_clusters(retention_days)
  286. last_prune_at = self.get_meta(META_LAST_PRUNE_AT)
  287. return {
  288. "enabled": True,
  289. "deleted": deleted,
  290. "due": True,
  291. "retention_days": retention_days,
  292. "interval_hours": interval_hours,
  293. "last_prune_at": last_prune_at,
  294. }
  295. def get_prune_state(self, pruning_enabled: bool, retention_days: float, interval_hours: float = 24.0) -> dict[str, Any]:
  296. return {
  297. "enabled": bool(pruning_enabled),
  298. "retention_days": float(retention_days),
  299. "interval_hours": float(interval_hours),
  300. "last_prune_at": self.get_meta(META_LAST_PRUNE_AT),
  301. }