Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 9 additions & 10 deletions braindrain/observer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import json
import sqlite3
from dataclasses import asdict, dataclass, field
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any

Expand Down Expand Up @@ -75,7 +75,6 @@ def _init_schema(self) -> None:
)

def record_event(self, event: BrainEvent) -> dict[str, Any]:
payload = asdict(event)
with self._connect() as conn:
cursor = conn.execute(
"""
Expand All @@ -91,14 +90,14 @@ def record_event(self, event: BrainEvent) -> dict[str, Any]:
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
payload["timestamp"],
payload["session_id"],
payload["event_type"],
payload["tool_name"],
json.dumps(payload["files_touched"]),
payload["token_cost"],
payload["duration_ms"],
json.dumps(payload["metadata"]),
event.timestamp,
event.session_id,
event.event_type,
event.tool_name,
json.dumps(event.files_touched),
event.token_cost,
event.duration_ms,
json.dumps(event.metadata),
),
)
pruned = self._prune_oldest(conn)
Expand Down
60 changes: 29 additions & 31 deletions braindrain/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import sqlite3
import time
import uuid
from dataclasses import asdict, dataclass, field
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any

Expand Down Expand Up @@ -144,7 +144,6 @@ def touch_session(
return existing

def upsert_session(self, summary: SessionSummary) -> None:
payload = asdict(summary)
with self._connect() as conn:
conn.execute(
"""
Expand Down Expand Up @@ -172,16 +171,16 @@ def upsert_session(self, summary: SessionSummary) -> None:
updated_at = excluded.updated_at
""",
(
payload["session_id"],
payload["start_time"],
payload["end_time"],
payload["events_count"],
json.dumps(payload["tools_used"]),
json.dumps(payload["files_modified"]),
json.dumps(payload["key_decisions"]),
json.dumps(payload["errors"]),
payload["token_total"],
payload["updated_at"],
summary.session_id,
summary.start_time,
summary.end_time,
summary.events_count,
json.dumps(summary.tools_used),
json.dumps(summary.files_modified),
json.dumps(summary.key_decisions),
json.dumps(summary.errors),
summary.token_total,
summary.updated_at,
),
)

Expand Down Expand Up @@ -237,11 +236,10 @@ def should_dream(self, *, quiet_minutes: int | None = None, now: float | None =
return (current - latest) >= quiet * 60

def record_episode(self, episode: EpisodeRecord) -> dict[str, Any]:
payload = asdict(episode)
if not payload["episode_id"]:
payload["episode_id"] = str(uuid.uuid4())
if not payload["created_at"]:
payload["created_at"] = time.time()
if not episode.episode_id:
episode.episode_id = str(uuid.uuid4())
if not episode.created_at:
episode.created_at = time.time()
with self._connect() as conn:
conn.execute(
"""
Expand All @@ -262,22 +260,22 @@ def record_episode(self, episode: EpisodeRecord) -> dict[str, Any]:
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
payload["episode_id"],
payload["session_id"],
payload["problem"],
payload["context"],
payload["action"],
payload["outcome"],
json.dumps(payload["evidence_refs"]),
payload["local_critique"],
payload["global_reflection"],
payload["confidence"],
json.dumps(payload["tags"]),
payload["created_at"],
payload["promoted_lesson_id"],
episode.episode_id,
episode.session_id,
episode.problem,
episode.context,
episode.action,
episode.outcome,
json.dumps(episode.evidence_refs),
episode.local_critique,
episode.global_reflection,
episode.confidence,
json.dumps(episode.tags),
episode.created_at,
episode.promoted_lesson_id,
),
)
return {"episode_id": payload["episode_id"]}
return {"episode_id": episode.episode_id}

def mark_episode_promoted(self, episode_id: str, lesson_id: str) -> None:
with self._connect() as conn:
Expand Down
108 changes: 64 additions & 44 deletions braindrain/wiki_brain.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import time
import uuid
from collections import defaultdict
from dataclasses import asdict, dataclass, field
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any

Expand All @@ -34,6 +34,27 @@ class BrainRecord:
last_accessed: float = 0.0
access_count: int = 0

def to_dict(self) -> dict[str, Any]:
return {
"record_id": self.record_id,
"record_class": self.record_class,
"title": self.title,
"content": self.content,
"source": self.source,
"category": self.category,
"status": self.status,
"importance": self.importance,
"confidence": self.confidence,
"tags": self.tags,
"evidence_refs": self.evidence_refs,
"metadata": self.metadata,
"supersedes_id": self.supersedes_id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"last_accessed": self.last_accessed,
"access_count": self.access_count,
}


class WikiBrain:
"""SQLite durable memory with FTS-backed retrieval and metrics."""
Expand Down Expand Up @@ -138,22 +159,21 @@ def _init_schema(self) -> None:
self._fts_available = False

def store_record(self, record: BrainRecord) -> dict[str, Any]:
payload = asdict(record)
now = time.time()
if not payload["record_id"]:
payload["record_id"] = str(uuid.uuid4())
if not payload["created_at"]:
payload["created_at"] = now
payload["updated_at"] = now
if not record.record_id:
record.record_id = str(uuid.uuid4())
if not record.created_at:
record.created_at = now
record.updated_at = now

contradiction = self.detect_contradiction(
content=payload["content"],
title=payload["title"],
record_class=payload["record_class"],
exclude_record_id=payload["record_id"],
content=record.content,
title=record.title,
record_class=record.record_class,
exclude_record_id=record.record_id,
)
if contradiction:
payload["supersedes_id"] = contradiction["record_id"]
record.supersedes_id = contradiction["record_id"]

with self._connect() as conn:
conn.execute(
Expand All @@ -179,33 +199,33 @@ def store_record(self, record: BrainRecord) -> dict[str, Any]:
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
payload["record_id"],
payload["record_class"],
payload["title"],
payload["content"],
payload["source"],
payload["category"],
payload["status"],
payload["importance"],
payload["confidence"],
json.dumps(payload["tags"]),
json.dumps(payload["evidence_refs"]),
json.dumps(payload["metadata"]),
payload["supersedes_id"],
payload["created_at"],
payload["updated_at"],
payload["last_accessed"],
payload["access_count"],
record.record_id,
record.record_class,
record.title,
record.content,
record.source,
record.category,
record.status,
record.importance,
record.confidence,
json.dumps(record.tags),
json.dumps(record.evidence_refs),
json.dumps(record.metadata),
record.supersedes_id,
record.created_at,
record.updated_at,
record.last_accessed,
record.access_count,
),
)
if payload["supersedes_id"]:
if record.supersedes_id:
conn.execute(
"""
UPDATE brain_records
SET status = 'superseded', updated_at = ?
WHERE record_id = ?
""",
(now, payload["supersedes_id"]),
(now, record.supersedes_id),
)
if self._fts_available:
conn.execute(
Expand All @@ -226,21 +246,21 @@ def store_record(self, record: BrainRecord) -> dict[str, Any]:
)
""",
(
payload["record_id"],
payload["record_id"],
payload["title"],
payload["content"],
" ".join(payload["tags"]),
payload["record_class"],
payload["category"],
payload["status"],
record.record_id,
record.record_id,
record.title,
record.content,
" ".join(record.tags),
record.record_class,
record.category,
record.status,
),
)

return {
"record_id": payload["record_id"],
"status": payload["status"],
"supersedes_id": payload["supersedes_id"],
"record_id": record.record_id,
"status": record.status,
"supersedes_id": record.supersedes_id,
}

def store_fact(
Expand Down Expand Up @@ -344,7 +364,7 @@ def cognitive_recall(
)
ranked.append(
{
"record": asdict(record),
"record": record.to_dict(),
"score": round(score, 6),
"signal_breakdown": {
"similarity": round(similarity, 6),
Expand All @@ -361,7 +381,7 @@ def cognitive_recall(

def review_playbook(self, *, query: str = "", limit: int = 10) -> list[dict[str, Any]]:
records = self.query_records(query=query, record_class="lesson", limit=limit)
return [asdict(record) for record in records]
return [record.to_dict() for record in records]

def detect_contradiction(
self,
Expand Down