class EverMemAgentOS:
def __init__(
self,
workdir: str = "/content/evermem_agent_os",
db_name: str = "evermem.sqlite",
embedding_model: str = "sentence-transformers/all-MiniLM-L6-v2",
gen_model: str = "google/flan-t5-small",
stm_max_turns: int = 10,
ltm_topk: int = 6,
consolidate_every: int = 8,
consolidate_trigger_tokens: int = 1400,
compress_target_chars: int = 420,
seed: int = 7,
):
self.workdir = workdir
_ensure_dir(self.workdir)
self.db_path = os.path.be part of(self.workdir, db_name)
self.embedder = SentenceTransformer(embedding_model)
self.embed_dim = self.embedder.get_sentence_embedding_dimension()
self.tokenizer = AutoTokenizer.from_pretrained(gen_model)
self.mannequin = AutoModelForSeq2SeqLM.from_pretrained(gen_model)
self.mannequin.to(self.system)
self.mannequin.eval()
self.stm_max_turns = stm_max_turns
self.ltm_topk = ltm_topk
self.consolidate_every = consolidate_every
self.consolidate_trigger_tokens = consolidate_trigger_tokens
self.compress_target_chars = compress_target_chars
np.random.seed(seed)
self._init_db()
self._init_faiss()
self.stm: Checklist[Dict[str, str]] = []
self.turns = 0
def _init_db(self):
conn = sqlite3.join(self.db_path)
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE IF NOT EXISTS reminiscences (
mid TEXT PRIMARY KEY,
function TEXT,
textual content TEXT,
created_ts INTEGER,
significance REAL,
tokens_est INTEGER,
meta_json TEXT
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS kv_store (
okay TEXT PRIMARY KEY,
v_json TEXT,
updated_ts INTEGER
)
"""
)
cur.execute(
"""
CREATE TABLE IF NOT EXISTS consolidations (
cid TEXT PRIMARY KEY,
created_ts INTEGER,
abstract TEXT,
source_mids_json TEXT
)
"""
)
conn.commit()
conn.shut()
def _init_faiss(self):
self.faiss_index_path = os.path.be part of(self.workdir, "faiss.index")
self.faiss_map_path = os.path.be part of(self.workdir, "faiss_map.json")
if os.path.exists(self.faiss_index_path) and os.path.exists(self.faiss_map_path):
self.index = faiss.read_index(self.faiss_index_path)
with open(self.faiss_map_path, "r", encoding="utf-8") as f:
self.id_map = json.load(f)
self.id_map = {int(okay): v for okay, v in self.id_map.gadgets()}
self.next_faiss_id = (max(self.id_map.keys()) + 1) if self.id_map else 0
return
self.index = faiss.IndexFlatIP(self.embed_dim)
self.id_map: Dict[int, str] = {}
self.next_faiss_id = 0
self._persist_faiss()
def _persist_faiss(self):
faiss.write_index(self.index, self.faiss_index_path)
with open(self.faiss_map_path, "w", encoding="utf-8") as f:
json.dump({str(okay): v for okay, v in self.id_map.gadgets()}, f)
def _embed(self, texts: Checklist[str]) -> np.ndarray:
vecs = self.embedder.encode(texts, convert_to_numpy=True, normalize_embeddings=True)
if vecs.ndim == 1:
vecs = vecs.reshape(1, -1)
return vecs.astype("float32")
def _tokens_est(self, textual content: str) -> int:
textual content = textual content or ""
return max(1, int(len(textual content.cut up()) * 1.25))
def _importance_score(self, function: str, textual content: str, meta: Dict[str, Any]) -> float:
base = 0.35
length_bonus = min(0.45, math.log1p(len(textual content)) / 20.0)
role_bonus = 0.08 if function == "user" else 0.03
pin = 0.35 if meta.get("pinned") else 0.0
sign = meta.get("signal", "")
signal_bonus = 0.18 if sign in {"decision", "preference", "fact", "task"} else 0.0
q_bonus = 0.06 if "?" in textual content else 0.0
number_bonus = 0.05 if any(ch.isdigit() for ch in textual content) else 0.0
return float(min(1.0, base + length_bonus + role_bonus + pin + signal_bonus + q_bonus + number_bonus))
def upsert_kv(self, okay: str, v: Any):
conn = sqlite3.join(self.db_path)
cur = conn.cursor()
cur.execute(
"INSERT INTO kv_store (k, v_json, updated_ts) VALUES (?, ?, ?) ON CONFLICT(k) DO UPDATE SET v_json=excluded.v_json, updated_ts=excluded.updated_ts",
(okay, json.dumps(v, ensure_ascii=False), _now_ts()),
)
conn.commit()
conn.shut()
def get_kv(self, okay: str, default=None):
conn = sqlite3.join(self.db_path)
cur = conn.cursor()
cur.execute("SELECT v_json FROM kv_store WHERE k=?", (okay,))
row = cur.fetchone()
conn.shut()
if not row:
return default
strive:
return json.hundreds(row[0])
besides Exception:
return default
def add_memory(self, function: str, textual content: str, meta: Non-compulsory[Dict[str, Any]] = None) -> str:
meta = meta or {}
textual content = (textual content or "").strip()
mid = meta.get("mid") or f"m:{_sha(f'{_now_ts()}::{role}::{text[:80]}::{np.random.randint(0, 10**9)}')}"
created_ts = _now_ts()
tokens_est = self._tokens_est(textual content)
significance = float(meta.get("importance")) if meta.get("importance") will not be None else self._importance_score(function, textual content, meta)
conn = sqlite3.join(self.db_path)
cur = conn.cursor()
cur.execute(
"INSERT OR REPLACE INTO memories (mid, role, text, created_ts, importance, tokens_est, meta_json) VALUES (?, ?, ?, ?, ?, ?, ?)",
(mid, function, textual content, created_ts, significance, tokens_est, json.dumps(meta, ensure_ascii=False)),
)
conn.commit()
conn.shut()
vec = self._embed([text])
fid = self.next_faiss_id
self.next_faiss_id += 1
self.index.add(vec)
self.id_map[fid] = mid
self._persist_faiss()
return midSubscribe to Updates
Get the latest tech insights from TechnologiesDigest.com on AI, innovation, and the future of digital technology.
Trending
- Bitcoin Bulls Strike Again However $78K Might Stay Resistance
- 14 previous software program bugs that took manner too lengthy to squash
- Construct an EverMem-Model Persistent AI Agent OS with Hierarchical Reminiscence, FAISS Vector Retrieval, SQLite Storage, and Automated Reminiscence Consolidation
- A $1.5T DoD finances in a break up Congress would demand prime political maneuvers
- The Provide Chain Classes 2026 Can’t Ignore and AI’s Defining Function
- YuanLab AI Releases Yuan 3.0 Extremely: A Flagship Multimodal MoE Basis Mannequin, Constructed for Stronger Intelligence and Unmatched Effectivity
- Scaling organizational construction with Meshery’s increasing ecosystem
- TCA Members Affirm Acceleration of International eSIM Development in 2025



