We're Hiring!
Whitepaper
Docs
Sign In
@bobbyllm
·
19 days ago
·
22 days ago
tool
Total Recall
Get
Last Updated
19 days ago
Created
22 days ago
Tool
Name
Total Recall
Downloads
84+
Saves
0+
Description
Fast, system wide memory in in 3 tiny text files, to complement RAG
Tool Code
Show
""" Total Recall — Persistent JSON Memory Engine Version: 1.2.1 (Wrapper-Enhanced, Safe Init, Full Wipe) Author: BobbyLLM License: MIT Features: • Substring, regex, and tag-based matching • Manual tagging system (sets in-memory, lists on disk — zero corruption) • Combined active + done listing • Plaintext audit log for all operations • Regex deletion (explicit) with automatic tag orphan cleanup • Fast exact-match dedupe • Collision-safe import merging • Timestamping for all adds/imports • Whitespace-only normalization • DONE entries sorted newest → oldest • Path-based import (because OWUI is a RAGGY little bitch) • delete_all_facts(), delete_all_done(), nuke_memory() • Full wipe now clears tags.json too • Guaranteed folder regeneration (cannot crash if directory deleted) • Deterministic, LLM-safe design • Strict wrapper methods for 100% reliable OWUI tool invocation """ from __future__ import annotations import os import re import time import shutil import datetime as dt from typing import Any, Dict, List from pydantic import BaseModel, Field # ------------------------------- # JSON Backend # ------------------------------- try: import orjson as _json def _dumps(obj): return _json.dumps(obj).decode("utf-8") def _loads(s): return _json.loads(s) except Exception: import json as _json def _dumps(obj): return _json.dumps(obj, ensure_ascii=False) def _loads(s): return _json.loads(s) # ------------------------------- # Timestamp helper # ------------------------------- def _ts(): return dt.datetime.now().astimezone().strftime("%B %d, %Y at %H:%M:%S %Z") # ------------------------------- # Helpers # ------------------------------- def _ensure_dir(path: str): os.makedirs(path, exist_ok=True) return path def _normalize_value(v: str) -> str: return v.strip() def _unique_key(base: str, existing: Dict[str, Any]) -> str: key = base i = 2 while key in existing: key = f"{base}_{i}" i += 1 return key def _safe_key(s: str) -> str: s = s.strip().lower() s = re.sub(r"[^a-z0-9_]+", "_", s) return s or f"key_{int(time.time())}" # ------------------------------- # Storage Engine # ------------------------------- class Storage: def __init__(self, base_dir: str, debug: bool = False): self.base = _ensure_dir(os.path.join(base_dir, "total_recall")) self.facts_file = os.path.join(self.base, "facts.json") self.done_dir = _ensure_dir(os.path.join(self.base, "done")) self.tags_file = os.path.join(self.base, "tags.json") self.log_file = os.path.join(self.base, "activity.log") # Ensure core files exist if not os.path.exists(self.facts_file): self._atomic_write(self.facts_file, {}) if not os.path.exists(self.tags_file): self._atomic_write(self.tags_file, {}) # atomic JSON write def _atomic_write(self, path: str, obj: Any): tmp = path + ".tmp" with open(tmp, "w", encoding="utf-8") as f: f.write(_dumps(obj)) os.replace(tmp, path) # audit log def log(self, msg: str): with open(self.log_file, "a", encoding="utf-8") as f: f.write(f"{_ts()} — {msg}\n") # facts def load_facts(self): with open(self.facts_file, "r", encoding="utf-8") as f: return _loads(f.read()) def save_facts(self, data): self._atomic_write(self.facts_file, data) # tags — FIXED: restore sets on load def load_tags(self): with open(self.tags_file, "r", encoding="utf-8") as f: raw = _loads(f.read()) return {t: set(v) for t, v in raw.items()} # FIXED: convert sets→lists when saving def save_tags(self, data): fixed = {t: list(v) for t, v in data.items()} self._atomic_write(self.tags_file, fixed) # DONE entries def write_done(self, entry: Dict[str, Any]): ts = dt.datetime.now().astimezone() folder = _ensure_dir( os.path.join(self.done_dir, str(ts.year), f"{ts.month:02d}") ) jid = entry["id"] path = os.path.join(folder, f"{jid:08d}.json") self._atomic_write(path, entry) # FIXED: log load errors def load_done(self): out = [] for root, _, files in os.walk(self.done_dir): for f in files: if f.endswith(".json"): try: with open(os.path.join(root, f), "r", encoding="utf-8") as fh: out.append(_loads(fh.read())) except Exception as e: try: self.log(f"ERROR loading done file {f}: {e}") except: pass out.sort(key=lambda x: x.get("created_at", ""), reverse=True) return out def delete_done(self, jid: int): name = f"{jid:08d}.json" for root, _, files in os.walk(self.done_dir): if name in files: os.remove(os.path.join(root, name)) break # ------------------------------- # Tools Interface # ------------------------------- class Tools: id = "total_recall" name = "Total Recall" version = "1.2.3" class Valves(BaseModel): storage_dir: str = "" debug: bool = False def __init__(self): self.valves = self.Valves() self._storage = None # cached instance # ============================================================ # SAFE STORAGE INITIALIZATION (NEW IN 1.2.1) # Ensures folders/files ALWAYS exist, even if the user deletes # the entire memory directory while OWUI is running. # ============================================================ def _store(self) -> Storage: base = self.valves.storage_dir or os.getenv("DATA_DIR") or os.getcwd() root = os.path.join(base, "total_recall") # ALWAYS regenerate required structure _ensure_dir(root) _ensure_dir(os.path.join(root, "done")) # Create Storage if needed if self._storage is None: self._storage = Storage(base, debug=self.valves.debug) # Ensure core files always exist even after external deletion if not os.path.exists(self._storage.facts_file): self._storage._atomic_write(self._storage.facts_file, {}) if not os.path.exists(self._storage.tags_file): self._storage._atomic_write(self._storage.tags_file, {}) return self._storage # ============================================================ # CORE MEMORY ENGINE # ============================================================ # Add fact def add_facts(self, text: str, __user__=None): S = self._store() data = S.load_facts() value = _normalize_value(text) key = _unique_key(_safe_key(f"fact_{int(time.time())}"), data) data[key] = {"value": value, "created_at": _ts()} S.save_facts(data) S.log(f"ADD — {value}") return {"ok": True, "added": value} # List facts (no timestamps) def list_facts(self, __user__=None): S = self._store() data = S.load_facts() out = [ rec["value"] for _, rec in sorted(data.items(), key=lambda kv: kv[1]["created_at"]) ] return {"facts": out, "count": len(out)} # List with timestamps def list_facts_with_timestamps(self, __user__=None): S = self._store() data = S.load_facts() items = sorted(data.items(), key=lambda kv: kv[1]["created_at"]) out = [{"value": r["value"], "created_at": r["created_at"]} for _, r in items] return {"facts": out, "count": len(out)} # Delete by substring (with tag cleanup) def delete_fact_substring(self, query: str, __user__=None): S = self._store() data = S.load_facts() tags = S.load_tags() removed = [] for k in list(data.keys()): if query.lower() in data[k]["value"].lower(): removed.append(data[k]["value"]) S.log(f"DELETE — {data[k]['value']}") del data[k] for tag in tags: tags[tag].discard(k) S.save_facts(data) S.save_tags(tags) return {"ok": True, "removed": removed} # Regex delete (with tag cleanup) def delete_fact_regex(self, pattern: str, __user__=None): try: regex = re.compile(pattern) except: return {"ok": False, "error": "Invalid regex"} S = self._store() data = S.load_facts() tags = S.load_tags() removed = [] for k in list(data.keys()): if regex.search(data[k]["value"]): removed.append(data[k]["value"]) S.log(f"DELETE(REGEX) — {data[k]['value']}") del data[k] for tag in tags: tags[tag].discard(k) S.save_facts(data) S.save_tags(tags) return {"ok": True, "removed": removed} # Tagging system def add_tag(self, fact_substring: str, tag: str, __user__=None): S = self._store() data = S.load_facts() tags = S.load_tags() updated = [] for k, rec in data.items(): if fact_substring.lower() in rec["value"].lower(): tags.setdefault(tag.lower(), set()) tags[tag.lower()].add(k) updated.append(rec["value"]) S.save_tags(tags) S.log(f"TAG-ADD — {tag}") return {"ok": True, "tagged": updated} def list_tags(self, __user__=None): return self._store().load_tags() def search_by_tag(self, tag: str, __user__=None): S = self._store() data = S.load_facts() tags = S.load_tags() keys = tags.get(tag.lower(), set()) out = [data[k]["value"] for k in keys if k in data] return {"tag": tag, "results": out} # Move fact → DONE def move_fact_to_done(self, substring: str, __user__=None): S = self._store() data = S.load_facts() for k, rec in list(data.items()): if substring.lower() in rec["value"].lower(): jid = int(time.time()) entry = { "id": jid, "value": rec["value"], "created_at": rec["created_at"], "status": "done", } S.write_done(entry) S.log(f"DONE — {rec['value']}") del data[k] S.save_facts(data) return {"ok": True, "done": rec["value"]} return {"ok": False, "error": "Not found"} # List DONE def list_done(self, __user__=None): return self._store().load_done() # Import JSON def import_facts_from_path(self, path: str, __user__=None): S = self._store() if not os.path.exists(path): return {"ok": False, "error": "File not found"} try: with open(path, "r", encoding="utf-8") as f: raw = _loads(f.read()) except Exception as e: return {"ok": False, "error": f"Invalid JSON: {e}"} data = S.load_facts() added = [] for k, v in raw.items(): val = _normalize_value(v.strip() if isinstance(v, str) else str(v)) newkey = _unique_key(_safe_key(k), data) data[newkey] = {"value": val, "created_at": _ts()} added.append(val) S.save_facts(data) S.log(f"IMPORT — {len(added)} entries from {path}") return {"ok": True, "added": added, "count": len(added)} # Delete ALL active memories def delete_all_facts(self, __user__=None): S = self._store() S.save_facts({}) S.log("NUKE-ACTIVE") return { "ok": True, "msg": "All active memories have been deleted. To also remove done or archived memories, say 'delete done memories'.", } # Delete ALL DONE memories def delete_all_done(self, __user__=None): S = self._store() for root, _, files in os.walk(S.done_dir): for f in files: os.remove(os.path.join(root, f)) S.log("NUKE-DONE") return {"ok": True, "msg": "All done memories deleted."} # Full wipe (NEW: tags.json cleared too) def nuke_memory(self, __user__=None): S = self._store() # wipe facts S.save_facts({}) # wipe tags S.save_tags({}) # wipe done entries for root, _, files in os.walk(S.done_dir): for f in files: os.remove(os.path.join(root, f)) S.log("NUKE-ALL") return {"ok": True, "msg": "All memories (active + tags + done) wiped."} # ============================================================ # STRICT WRAPPERS (increase reliablity in OWUI calls) # ============================================================ def recall_add_fact(self, text: str, __user__=None): return self.add_facts(text) def recall_list_facts(self, __user__=None): return self.list_facts() def recall_list_facts_with_timestamps(self, __user__=None): return self.list_facts_with_timestamps() def recall_add_tag(self, fact_substring: str, tag: str, __user__=None): return self.add_tag(fact_substring, tag) def recall_list_tags(self, __user__=None): return self.list_tags() def recall_search_tag(self, tag: str, __user__=None): return self.search_by_tag(tag) def recall_delete_substring(self, query: str, __user__=None): return self.delete_fact_substring(query) def recall_delete_regex(self, pattern: str, __user__=None): return self.delete_fact_regex(pattern) def recall_move_done(self, substring: str, __user__=None): return self.move_fact_to_done(substring) def recall_list_done(self, __user__=None): return self.list_done() def recall_import(self, path: str, __user__=None): return self.import_facts_from_path(path) def recall_delete_all(self, __user__=None): return self.delete_all_facts() def recall_delete_done(self, __user__=None): return self.delete_all_done() def recall_nuke(self, __user__=None): return self.nuke_memory()
Sponsored by Open WebUI Inc.
We are hiring!
Shape the way humanity engages with
intelligence
.