Reusable Snippets
System prompt injection (any LLM)
def build_system_prompt(brain: CognitiveBrain, query: str) -> str:
memory = brain.search(query)
context = memory.format(max_facts=8)
base = "You are a helpful assistant with access to long-term memory."
if context:
return f"{base}\n\n{context}"
return base
Memory-augmented chat loop
from atlas_memory import CognitiveBrain
from openai import OpenAI
brain = CognitiveBrain(api_key="atlas_...", user_id="user-123")
client = OpenAI()
history = []
def chat(user_message: str) -> str:
# 1. Retrieve relevant memory
memory = brain.search(user_message)
# 2. Build messages
messages = [
{"role": "system", "content": f"Assistant with memory.\n\n{memory.format()}"},
*history,
{"role": "user", "content": user_message},
]
# 3. Get response
response = client.chat.completions.create(model="gpt-4o", messages=messages)
assistant_msg = response.choices[0].message.content
# 4. Save important content to memory
brain.add(f"User said: {user_message}")
history.extend([
{"role": "user", "content": user_message},
{"role": "assistant", "content": assistant_msg},
])
return assistant_msg
Health check + cold start wait
import time
import requests
def wait_for_atlas(base_url: str, timeout: int = 120):
"""Wait for Atlas to finish loading models on cold start."""
start = time.time()
while time.time() - start < timeout:
try:
r = requests.get(f"{base_url}/brain/health", timeout=5)
data = r.json()
if data.get("ready") and data.get("models_loaded"):
print("Atlas is ready.")
return True
except Exception:
pass
print("Waiting for Atlas to load models...")
time.sleep(5)
raise TimeoutError("Atlas did not become ready in time.")
Batch ingest from a list of facts
def batch_ingest(brain: CognitiveBrain, facts: list[str]) -> int:
"""Ingest a list of facts and return total facts stored."""
total = 0
for fact in facts:
result = brain.add(fact)
total += result.facts_ingested
return total
# Usage
facts = [
"The backend API is deployed on AWS ECS.",
"Alice leads the backend team.",
"The frontend is built with Next.js.",
"Deployments run via GitHub Actions.",
]
print(f"Stored {batch_ingest(brain, facts)} facts.")