Initial commit: OpenClaw Memory System MVP

This commit is contained in:
daotong
2026-04-04 11:15:57 +00:00
commit f18f4daea5
14 changed files with 1389 additions and 0 deletions

14
.env.example Normal file
View File

@@ -0,0 +1,14 @@
# Database Configuration
DB_HOST=postgres
DB_PORT=5432
DB_NAME=openclaw
DB_USER=postgres
DB_PASSWORD=postgres
# API Configuration
PORT=3000
NODE_ENV=production
# Embedding Service (optional, for semantic search)
# EMBEDDING_API_URL=http://embedding-service:8080
# EMBEDDING_API_KEY=your-key

43
Dockerfile Normal file
View File

@@ -0,0 +1,43 @@
# Build stage
FROM node:20-alpine AS builder
WORKDIR /app
# Copy package files
COPY package*.json ./
RUN npm ci
# Copy source code
COPY . .
# Build TypeScript
RUN npm run build
# Production stage
FROM node:20-alpine
WORKDIR /app
# Copy package files and install production dependencies only
COPY package*.json ./
RUN npm ci --production
# Copy built files from builder
COPY --from=builder /app/dist ./dist
# Create non-root user
RUN addgroup -g 1001 -S nodejs && \
adduser -S memory -u 1001 && \
chown -R memory:nodejs /app
USER memory
# Expose port
EXPOSE 3000
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD node -e "require('http').get('http://localhost:3000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"
# Start server
CMD ["node", "dist/index.js"]

269
README.md Normal file
View File

@@ -0,0 +1,269 @@
# OpenClaw Memory System
A structured memory system for OpenClaw agents with semantic search, automatic evolution, and multi-container access.
## Features
- 🧠 **Structured Storage** - PostgreSQL-backed with rich metadata
- 🔍 **Semantic Search** - Full-text search with flexible filtering
- 📊 **Analytics** - Memory statistics and usage patterns
- 🔄 **Evolution** - Automatic merging, archiving, and forgetting
- 🌐 **Multi-Container Access** - REST API for all OpenClaw containers
- 📝 **Audit Trail** - Complete mutation and access logging
## Quick Start
### Docker Compose (Recommended)
```bash
# Create .env file
cp .env.example .env
# Edit .env with your database credentials
# nano .env
# Start the service
docker-compose up -d
# Check health
curl http://localhost:3000/health
```
### Manual Docker Build
```bash
# Build image
docker build -t openclaw-memory:latest .
# Run container
docker run -d \
--name openclaw-memory \
-p 3000:3000 \
-e DB_HOST=postgres \
-e DB_PORT=5432 \
-e DB_NAME=openclaw \
-e DB_USER=postgres \
-e DB_PASSWORD=your_password \
openclaw-memory:latest
```
### Local Development
```bash
# Install dependencies
npm install
# Run database migration
npm run migrate
# Start development server
npm run dev
```
## Database Setup
The service requires PostgreSQL with the `pgvector` extension. If using an existing database:
```sql
CREATE EXTENSION IF NOT EXISTS vector;
```
Then run the migration:
```bash
npm run migrate
```
## API Endpoints
### Create Memory
```bash
curl -X POST http://localhost:3000/api/memories \
-H "Content-Type: application/json" \
-d '{
"content": "The user prefers concise responses without filler words",
"type": "preference",
"category": "personal",
"priority": 2,
"tags": ["communication", "style"],
"source_session": "session-123"
}'
```
### Get Memory
```bash
curl http://localhost:3000/api/memories/{id}
```
### Update Memory
```bash
curl -X PUT http://localhost:3000/api/memories/{id} \
-H "Content-Type: application/json" \
-d '{
"priority": 1,
"tags": ["communication", "style", "important"]
}'
```
### Search Memories
```bash
curl "http://localhost:3000/api/memories/search?query=preference&type=preference&limit=10"
```
### Get Context for Session
```bash
curl "http://localhost:3000/api/memories/context?session=session-123&limit=5"
```
### Get Related Memories
```bash
curl http://localhost:3000/api/memories/{id}/related?limit=10
```
### Merge Memories
```bash
curl -X POST http://localhost:3000/api/memories/{id}/merge \
-H "Content-Type: application/json" \
-d '{
"target_id": "target-memory-id",
"reason": "Similar content about user preferences"
}'
```
### Archive Memory
```bash
curl -X POST http://localhost:3000/api/memories/{id}/archive \
-H "Content-Type: application/json" \
-d '{
"reason": "Outdated information"
}'
```
### Get Analytics
```bash
curl http://localhost:3000/api/memories/analytics
```
## Memory Types
| Type | Description |
|------|-------------|
| `event` | Something that happened |
| `insight` | Learning or discovery |
| `pattern` | Recurring behavior or theme |
| `preference` | User preference |
| `decision` | Decision made |
## Memory Categories
| Category | Description |
|----------|-------------|
| `work` | Work-related |
| `personal` | Personal |
| `technical` | Technical |
| `social` | Social |
## Priority Levels
| Priority | Description |
|----------|-------------|
| 1 | Critical (highest) |
| 2 | High |
| 3 | Medium (default) |
| 4 | Low |
| 5 | Trivial (lowest) |
## Integration with OpenClaw
### Environment Variables
Set these in your OpenClaw containers:
```bash
MEMORY_API_URL=http://openclaw-memory:3000
MEMORY_API_ENABLED=true
```
### Example: Create Memory from Agent
```typescript
// In your agent code
const response = await fetch(`${process.env.MEMORY_API_URL}/api/memories`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
content: "User corrected my response about X",
type: "insight",
category: "work",
priority: 2,
source_session: session.id
})
});
```
### Example: Get Context Before Session
```typescript
// Load context memories before starting a new session
const response = await fetch(
`${process.env.MEMORY_API_URL}/api/memories/context?session=${session.id}&limit=5`
);
const { data } = await response.json();
// Inject into system prompt
const systemPrompt = `
## Relevant Memories
${data.map(m => `- ${m.summary || m.content}`).join('\n')}
`;
```
## Schema
See `src/db/schema.sql` for the complete database schema including:
- `memories` - Main memories table
- `memory_accesses` - Access log
- `memory_mutations` - Mutation log
## Migration from Files
The memory system can import data from:
- `MEMORY.md` - Long-term memory
- `memory/YYYY-MM-DD.md` - Daily memory files
- `.learnings/*.md` - Learning records
A migration script will be provided in future versions.
## Development
```bash
# Install dependencies
npm install
# Run in development mode
npm run dev
# Build for production
npm run build
# Run database migration
npm run migrate
```
## License
MIT
## Author
daotong

32
docker-compose.yml Normal file
View File

@@ -0,0 +1,32 @@
version: '3.8'
services:
memory:
build: .
container_name: openclaw-memory
ports:
- "3000:3000"
environment:
- DB_HOST=postgres
- DB_PORT=5432
- DB_NAME=openclaw
- DB_USER=postgres
- DB_PASSWORD=${DB_PASSWORD:-postgres}
- PORT=3000
- NODE_ENV=production
depends_on:
postgres:
condition: service_healthy
networks:
- openclaw-network
restart: unless-stopped
healthcheck:
test: ["CMD", "node", "-e", "require('http').get('http://localhost:3000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
interval: 30s
timeout: 3s
retries: 3
start_period: 5s
networks:
openclaw-network:
external: true

31
package.json Normal file
View File

@@ -0,0 +1,31 @@
{
"name": "openclaw-memory",
"version": "1.0.0",
"description": "Structured memory system for OpenClaw agents",
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"dev": "ts-node src/index.ts",
"migrate": "ts-node src/db/migrate.ts"
},
"keywords": ["openclaw", "memory", "semantic-search"],
"author": "daotong",
"license": "MIT",
"dependencies": {
"express": "^4.18.2",
"pg": "^8.11.3",
"dotenv": "^16.3.1",
"cors": "^2.8.5",
"uuid": "^9.0.1"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^20.10.0",
"@types/pg": "^8.10.9",
"@types/cors": "^2.8.17",
"@types/uuid": "^9.0.7",
"typescript": "^5.3.3",
"ts-node": "^10.9.2"
}
}

57
src/app.ts Normal file
View File

@@ -0,0 +1,57 @@
import express from 'express';
import cors from 'cors';
import dotenv from 'dotenv';
dotenv.config();
import {
createMemory,
getMemory,
updateMemory,
deleteMemory,
searchMemories,
getContext,
getRelated,
mergeMemories,
archiveMemory,
forgetMemory,
getAnalytics
} from './routes/memories';
const app = express();
const PORT = process.env.PORT || 3000;
// Middleware
app.use(cors());
app.use(express.json());
// Health check
app.get('/health', (req, res) => {
res.json({ status: 'ok', service: 'openclaw-memory', timestamp: new Date().toISOString() });
});
// Routes
app.post('/api/memories', createMemory);
app.get('/api/memories/:id', getMemory);
app.put('/api/memories/:id', updateMemory);
app.delete('/api/memories/:id', deleteMemory);
app.get('/api/memories/search', searchMemories);
app.get('/api/memories/context', getContext);
app.get('/api/memories/:id/related', getRelated);
app.post('/api/memories/:id/merge', mergeMemories);
app.post('/api/memories/:id/archive', archiveMemory);
app.post('/api/memories/:id/forget', forgetMemory);
app.get('/api/memories/analytics', getAnalytics);
// 404 handler
app.use((req, res) => {
res.status(404).json({ success: false, error: 'Not found' });
});
// Error handler
app.use((err: any, req: express.Request, res: express.Response, next: express.NextFunction) => {
console.error('Error:', err);
res.status(500).json({ success: false, error: err.message || 'Internal server error' });
});
export default app;

43
src/db/migrate.ts Normal file
View File

@@ -0,0 +1,43 @@
import pg from 'pg';
import fs from 'fs';
import path from 'path';
import dotenv from 'dotenv';
dotenv.config();
const { Pool } = pg;
async function migrate() {
const pool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
database: process.env.DB_NAME || 'openclaw',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
});
try {
console.log('Connecting to database...');
await pool.connect();
console.log('Reading schema...');
const schemaPath = path.join(__dirname, 'schema.sql');
const schema = fs.readFileSync(schemaPath, 'utf-8');
console.log('Applying schema...');
await pool.query(schema);
console.log('✅ Schema applied successfully!');
} catch (error) {
console.error('❌ Migration failed:', error);
process.exit(1);
} finally {
await pool.end();
}
}
if (require.main === module) {
migrate();
}
export default migrate;

27
src/db/pool.ts Normal file
View File

@@ -0,0 +1,27 @@
import pg from 'pg';
import dotenv from 'dotenv';
dotenv.config();
const { Pool } = pg;
export const pool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432'),
database: process.env.DB_NAME || 'openclaw',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
// Test connection
pool.on('connect', () => {
console.log('✅ Connected to PostgreSQL');
});
pool.on('error', (err) => {
console.error('❌ PostgreSQL connection error:', err);
process.exit(-1);
});

122
src/db/schema.sql Normal file
View File

@@ -0,0 +1,122 @@
-- Memory System Database Schema for PostgreSQL + pgvector
-- Enable pgvector extension (requires pgvector to be installed in PostgreSQL)
CREATE EXTENSION IF NOT EXISTS vector;
-- Memories table
CREATE TABLE IF NOT EXISTS memories (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- Content fields
content TEXT NOT NULL,
summary TEXT,
embedding vector(1536), -- For semantic search (1536 is OpenAI's dimension)
-- Classification
type VARCHAR(50) NOT NULL DEFAULT 'event',
category VARCHAR(50) NOT NULL DEFAULT 'work',
priority INTEGER NOT NULL DEFAULT 3 CHECK (priority >= 1 AND priority <= 5),
-- Relationships
parent_id UUID REFERENCES memories(id) ON DELETE SET NULL,
related_ids UUID[] DEFAULT '{}',
source_session VARCHAR(100),
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
last_accessed TIMESTAMPTZ NOT NULL DEFAULT now(),
access_count INTEGER NOT NULL DEFAULT 0,
-- Evolution
status VARCHAR(20) NOT NULL DEFAULT 'active',
confidence FLOAT CHECK (confidence >= 0 AND confidence <= 1),
decay_rate FLOAT NOT NULL DEFAULT 0.01,
-- Indexing
tags TEXT[] DEFAULT '{}',
keywords TEXT[] DEFAULT '{}',
-- Constraints
CONSTRAINT valid_type CHECK (type IN ('event', 'insight', 'pattern', 'preference', 'decision')),
CONSTRAINT valid_category CHECK (category IN ('work', 'personal', 'technical', 'social')),
CONSTRAINT valid_status CHECK (status IN ('active', 'merged', 'archived', 'forgotten'))
);
-- Indexes for semantic search
CREATE INDEX idx_memories_embedding ON memories USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100);
-- Indexes for common filters
CREATE INDEX idx_memories_type ON memories(type);
CREATE INDEX idx_memories_category ON memories(category);
CREATE INDEX idx_memories_priority ON memories(priority);
CREATE INDEX idx_memories_status ON memories(status);
CREATE INDEX idx_memories_created_at ON memories(created_at DESC);
CREATE INDEX idx_memories_last_accessed ON memories(last_accessed DESC);
-- GIN indexes for array fields
CREATE INDEX idx_memories_tags ON memories USING GIN(tags);
CREATE INDEX idx_memories_keywords ON memories USING GIN(keywords);
CREATE INDEX idx_memories_related_ids ON memories USING GIN(related_ids);
-- Full-text search index (as fallback)
CREATE INDEX idx_memories_content_search ON memories USING GIN(to_tsvector('english', content));
-- Memory access log
CREATE TABLE IF NOT EXISTS memory_accesses (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
memory_id UUID NOT NULL REFERENCES memories(id) ON DELETE CASCADE,
context TEXT,
session VARCHAR(100),
accessed_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_memory_accesses_memory_id ON memory_accesses(memory_id);
CREATE INDEX idx_memory_accesses_accessed_at ON memory_accesses(accessed_at DESC);
-- Memory mutations log
CREATE TABLE IF NOT EXISTS memory_mutations (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
memory_id UUID NOT NULL REFERENCES memories(id) ON DELETE CASCADE,
mutation_type VARCHAR(20) NOT NULL,
old_content TEXT,
new_content TEXT,
reason TEXT,
mutated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT valid_mutation_type CHECK (mutation_type IN ('create', 'update', 'merge', 'forget', 'archive'))
);
CREATE INDEX idx_memory_mutations_memory_id ON memory_mutations(memory_id);
CREATE INDEX idx_memory_mutations_mutated_at ON memory_mutations(mutated_at DESC);
-- Function to update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Trigger to auto-update updated_at
CREATE TRIGGER update_memories_updated_at
BEFORE UPDATE ON memories
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Function to update last_accessed
CREATE OR REPLACE FUNCTION increment_access_count()
RETURNS TRIGGER AS $$
BEGIN
NEW.last_accessed = now();
NEW.access_count = OLD.access_count + 1;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Trigger on memory_accesses insert
CREATE TRIGGER update_memory_access_stats
AFTER INSERT ON memory_accesses
FOR EACH ROW
EXECUTE PROCEDURE increment_access_count();

10
src/index.ts Normal file
View File

@@ -0,0 +1,10 @@
import app from './app';
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`🧠 OpenClaw Memory System`);
console.log(`📡 Server running on http://0.0.0.0:${PORT}`);
console.log(`🔍 Health check: http://0.0.0.0:${PORT}/health`);
console.log(`📚 API endpoint: http://0.0.0.0:${PORT}/api/memories`);
});

View File

@@ -0,0 +1,90 @@
export type MemoryType = 'event' | 'insight' | 'pattern' | 'preference' | 'decision';
export type MemoryCategory = 'work' | 'personal' | 'technical' | 'social';
export type MemoryStatus = 'active' | 'merged' | 'archived' | 'forgotten';
export type MutationType = 'create' | 'update' | 'merge' | 'forget' | 'archive';
export interface Memory {
id: string;
content: string;
summary?: string;
embedding?: number[];
type: MemoryType;
category: MemoryCategory;
priority: number; // 1-5, 1=highest
parent_id?: string;
related_ids: string[];
source_session?: string;
created_at: string;
updated_at: string;
last_accessed: string;
access_count: number;
status: MemoryStatus;
confidence?: number;
decay_rate: number;
tags: string[];
keywords: string[];
}
export interface CreateMemoryInput {
content: string;
summary?: string;
type?: MemoryType;
category?: MemoryCategory;
priority?: number;
parent_id?: string;
related_ids?: string[];
source_session?: string;
tags?: string[];
keywords?: string[];
confidence?: number;
decay_rate?: number;
}
export interface UpdateMemoryInput {
content?: string;
summary?: string;
type?: MemoryType;
category?: MemoryCategory;
priority?: number;
status?: MemoryStatus;
tags?: string[];
keywords?: string[];
related_ids?: string[];
confidence?: number;
}
export interface MemoryAccess {
id: string;
memory_id: string;
context?: string;
session?: string;
accessed_at: string;
}
export interface MemoryMutation {
id: string;
memory_id: string;
mutation_type: MutationType;
old_content?: string;
new_content?: string;
reason?: string;
mutated_at: string;
}
export interface SearchResult {
memories: Memory[];
total: number;
}
export interface SearchOptions {
query?: string;
type?: MemoryType[];
category?: MemoryCategory[];
priority?: number;
priority_min?: number;
priority_max?: number;
status?: MemoryStatus[];
tags?: string[];
limit?: number;
offset?: number;
}

199
src/routes/memories.ts Normal file
View File

@@ -0,0 +1,199 @@
import { Request, Response } from 'express';
import memoryService from '../services/memory.service';
import { CreateMemoryInput, UpdateMemoryInput, SearchOptions } from '../models/memory.model';
// POST /api/memories - Create a new memory
export async function createMemory(req: Request, res: Response): Promise<void> {
try {
const input: CreateMemoryInput = req.body;
const memory = await memoryService.create(input);
res.status(201).json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// GET /api/memories/:id - Get a memory by ID
export async function getMemory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const memory = await memoryService.getById(id);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
// Log access
await memoryService.logAccess(
id,
req.query.context as string,
req.query.session as string
);
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// PUT /api/memories/:id - Update a memory
export async function updateMemory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const input: UpdateMemoryInput = req.body;
const memory = await memoryService.update(id, input);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// DELETE /api/memories/:id - Archive a memory
export async function deleteMemory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const reason = req.body.reason || 'Deleted via API';
const memory = await memoryService.archive(id, reason);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// GET /api/memories/search - Search memories
export async function searchMemories(req: Request, res: Response): Promise<void> {
try {
const options: SearchOptions = {
query: req.query.query as string,
type: req.query.type ? (req.query.type as string).split(',') : undefined,
category: req.query.category ? (req.query.category as string).split(',') : undefined,
priority: req.query.priority ? parseInt(req.query.priority as string) : undefined,
priority_min: req.query.priority_min ? parseInt(req.query.priority_min as string) : undefined,
priority_max: req.query.priority_max ? parseInt(req.query.priority_max as string) : undefined,
status: req.query.status ? (req.query.status as string).split(',') : ['active'],
tags: req.query.tags ? (req.query.tags as string).split(',') : undefined,
limit: req.query.limit ? parseInt(req.query.limit as string) : 20,
offset: req.query.offset ? parseInt(req.query.offset as string) : 0,
};
const result = await memoryService.search(options);
res.json({ success: true, ...result });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// GET /api/memories/context - Get context memories for a session
export async function getContext(req: Request, res: Response): Promise<void> {
try {
const session = req.query.session as string;
const limit = req.query.limit ? parseInt(req.query.limit as string) : 5;
if (!session) {
res.status(400).json({ success: false, error: 'Session parameter is required' });
return;
}
const memories = await memoryService.getContext(session, limit);
res.json({ success: true, data: memories });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// GET /api/memories/:id/related - Get related memories
export async function getRelated(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const limit = req.query.limit ? parseInt(req.query.limit as string) : 10;
const memories = await memoryService.getRelated(id, limit);
res.json({ success: true, data: memories });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// POST /api/memories/:id/merge - Merge memories
export async function mergeMemories(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const { target_id, reason } = req.body;
if (!target_id || !reason) {
res.status(400).json({ success: false, error: 'target_id and reason are required' });
return;
}
const memory = await memoryService.merge(id, target_id, reason);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// POST /api/memories/:id/archive - Archive a memory
export async function archiveMemory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const reason = req.body.reason || 'Archived via API';
const memory = await memoryService.archive(id, reason);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// POST /api/memories/:id/forget - Forget a memory
export async function forgetMemory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const reason = req.body.reason || 'Forgotten via API';
const memory = await memoryService.forget(id, reason);
if (!memory) {
res.status(404).json({ success: false, error: 'Memory not found' });
return;
}
res.json({ success: true, data: memory });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}
// GET /api/memories/analytics - Get analytics
export async function getAnalytics(req: Request, res: Response): Promise<void> {
try {
const analytics = await memoryService.getAnalytics();
res.json({ success: true, data: analytics });
} catch (error: any) {
res.status(500).json({ success: false, error: error.message });
}
}

View File

@@ -0,0 +1,435 @@
import { pool } from '../db/pool';
import {
Memory,
CreateMemoryInput,
UpdateMemoryInput,
SearchResult,
SearchOptions,
MemoryAccess,
MemoryMutation
} from '../models/memory.model';
export class MemoryService {
// Create a new memory
async create(input: CreateMemoryInput): Promise<Memory> {
const {
content,
summary,
type = 'event',
category = 'work',
priority = 3,
parent_id,
related_ids = [],
source_session,
tags = [],
keywords = [],
confidence,
decay_rate = 0.01
} = input;
const query = `
INSERT INTO memories (
content, summary, type, category, priority,
parent_id, related_ids, source_session,
tags, keywords, confidence, decay_rate
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
RETURNING *
`;
const values = [
content,
summary,
type,
category,
priority,
parent_id,
related_ids,
source_session,
tags,
keywords,
confidence,
decay_rate
];
const result = await pool.query(query, values);
const memory = this.mapRowToMemory(result.rows[0]);
// Log mutation
await this.logMutation(memory.id, 'create', null, content, 'Memory created');
return memory;
}
// Get a memory by ID
async getById(id: string): Promise<Memory | null> {
const query = `
SELECT * FROM memories
WHERE id = $1 AND status = 'active'
`;
const result = await pool.query(query, [id]);
if (result.rows.length === 0) {
return null;
}
return this.mapRowToMemory(result.rows[0]);
}
// Update a memory
async update(id: string, input: UpdateMemoryInput): Promise<Memory | null> {
const {
content,
summary,
type,
category,
priority,
status,
tags,
keywords,
related_ids,
confidence
} = input;
// Get current memory for mutation log
const current = await this.getById(id);
if (!current) {
return null;
}
const updates: string[] = [];
const values: any[] = [];
let paramIndex = 1;
if (content !== undefined) {
updates.push(`content = $${paramIndex++}`);
values.push(content);
}
if (summary !== undefined) {
updates.push(`summary = $${paramIndex++}`);
values.push(summary);
}
if (type !== undefined) {
updates.push(`type = $${paramIndex++}`);
values.push(type);
}
if (category !== undefined) {
updates.push(`category = $${paramIndex++}`);
values.push(category);
}
if (priority !== undefined) {
updates.push(`priority = $${paramIndex++}`);
values.push(priority);
}
if (status !== undefined) {
updates.push(`status = $${paramIndex++}`);
values.push(status);
}
if (tags !== undefined) {
updates.push(`tags = $${paramIndex++}`);
values.push(tags);
}
if (keywords !== undefined) {
updates.push(`keywords = $${paramIndex++}`);
values.push(keywords);
}
if (related_ids !== undefined) {
updates.push(`related_ids = $${paramIndex++}`);
values.push(related_ids);
}
if (confidence !== undefined) {
updates.push(`confidence = $${paramIndex++}`);
values.push(confidence);
}
if (updates.length === 0) {
return current;
}
values.push(id);
const query = `
UPDATE memories
SET ${updates.join(', ')}
WHERE id = $${paramIndex}
RETURNING *
`;
const result = await pool.query(query, values);
const updated = this.mapRowToMemory(result.rows[0]);
// Log mutation
await this.logMutation(id, 'update', current.content, updated.content, 'Memory updated');
return updated;
}
// Search memories with filters
async search(options: SearchOptions = {}): Promise<SearchResult> {
const {
query,
type,
category,
priority,
priority_min,
priority_max,
status = ['active'],
tags,
limit = 20,
offset = 0
} = options;
const conditions: string[] = [];
const values: any[] = [];
let paramIndex = 1;
// Add conditions
if (status && status.length > 0) {
conditions.push(`status = ANY($${paramIndex++})`);
values.push(status);
}
if (type && type.length > 0) {
conditions.push(`type = ANY($${paramIndex++})`);
values.push(type);
}
if (category && category.length > 0) {
conditions.push(`category = ANY($${paramIndex++})`);
values.push(category);
}
if (priority) {
conditions.push(`priority = $${paramIndex++}`);
values.push(priority);
} else {
if (priority_min !== undefined) {
conditions.push(`priority >= $${paramIndex++}`);
values.push(priority_min);
}
if (priority_max !== undefined) {
conditions.push(`priority <= $${paramIndex++}`);
values.push(priority_max);
}
}
if (tags && tags.length > 0) {
conditions.push(`tags && $${paramIndex++}`);
values.push(tags);
}
if (query) {
// Full-text search fallback (when embedding is not available)
conditions.push(`to_tsvector('english', content) @@ to_tsquery('english', $${paramIndex++})`);
values.push(query.split(' ').join(' & '));
}
// Build WHERE clause
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
// Get total count
const countQuery = `SELECT COUNT(*) FROM memories ${whereClause}`;
const countResult = await pool.query(countQuery, values);
const total = parseInt(countResult.rows[0].count);
// Get memories
const selectQuery = `
SELECT * FROM memories
${whereClause}
ORDER BY priority ASC, created_at DESC
LIMIT $${paramIndex++} OFFSET $${paramIndex++}
`;
values.push(limit, offset);
const result = await pool.query(selectQuery, values);
const memories = result.rows.map(row => this.mapRowToMemory(row));
return { memories, total };
}
// Get context memories for a session
async getContext(session: string, limit: number = 5): Promise<Memory[]> {
// Get memories from this session or commonly accessed
const query = `
SELECT m.* FROM memories m
WHERE m.status = 'active'
AND (
m.source_session = $1
OR m.access_count > 2
OR m.priority <= 2
)
ORDER BY
CASE WHEN m.source_session = $1 THEN 0 ELSE 1 END,
m.priority ASC,
m.last_accessed DESC
LIMIT $2
`;
const result = await pool.query(query, [session, limit]);
return result.rows.map(row => this.mapRowToMemory(row));
}
// Get related memories
async getRelated(id: string, limit: number = 10): Promise<Memory[]> {
const memory = await this.getById(id);
if (!memory) {
return [];
}
const query = `
SELECT * FROM memories
WHERE id != $1
AND status = 'active'
AND (
id = ANY($2)
OR parent_id = $1
OR $1 = ANY(related_ids)
)
ORDER BY priority ASC, created_at DESC
LIMIT $3
`;
const result = await pool.query(query, [id, memory.related_ids, limit]);
return result.rows.map(row => this.mapRowToMemory(row));
}
// Log memory access
async logAccess(memoryId: string, context?: string, session?: string): Promise<void> {
const query = `
INSERT INTO memory_accesses (memory_id, context, session)
VALUES ($1, $2, $3)
`;
await pool.query(query, [memoryId, context, session]);
}
// Log memory mutation
async logMutation(
memoryId: string,
type: string,
oldContent: string | null,
newContent: string | null,
reason: string
): Promise<void> {
const query = `
INSERT INTO memory_mutations (memory_id, mutation_type, old_content, new_content, reason)
VALUES ($1, $2, $3, $4, $5)
`;
await pool.query(query, [memoryId, type, oldContent, newContent, reason]);
}
// Merge memories
async merge(sourceId: string, targetId: string, reason: string): Promise<Memory | null> {
const source = await this.getById(sourceId);
const target = await this.getById(targetId);
if (!source || !target) {
return null;
}
// Merge content
const mergedContent = `${target.content}\n\n[Merged from ${source.id}]\n${source.content}`;
// Merge tags and keywords
const mergedTags = Array.from(new Set([...target.tags, ...source.tags]));
const mergedKeywords = Array.from(new Set([...target.keywords, ...source.keywords]));
// Merge related IDs
const mergedRelatedIds = Array.from(new Set([...target.related_ids, ...source.related_ids, sourceId]));
// Update target
const updated = await this.update(targetId, {
content: mergedContent,
tags: mergedTags,
keywords: mergedKeywords,
related_ids: mergedRelatedIds,
confidence: Math.max(target.confidence || 0, source.confidence || 0)
});
// Mark source as merged
await this.update(sourceId, { status: 'merged' });
await this.logMutation(sourceId, 'merge', source.content, null, `Merged into ${targetId}: ${reason}`);
return updated;
}
// Archive memory
async archive(id: string, reason: string): Promise<Memory | null> {
const updated = await this.update(id, { status: 'archived' });
if (updated) {
await this.logMutation(id, 'archive', updated.content, null, reason);
}
return updated;
}
// Forget memory
async forget(id: string, reason: string): Promise<Memory | null> {
const updated = await this.update(id, { status: 'forgotten' });
if (updated) {
await this.logMutation(id, 'forget', updated.content, null, reason);
}
return updated;
}
// Get analytics
async getAnalytics(): Promise<any> {
const queries = [
// Total memories
pool.query('SELECT COUNT(*) as total FROM memories WHERE status = $1', ['active']),
// By type
pool.query('SELECT type, COUNT(*) as count FROM memories WHERE status = $1 GROUP BY type', ['active']),
// By category
pool.query('SELECT category, COUNT(*) as count FROM memories WHERE status = $1 GROUP BY category', ['active']),
// By priority
pool.query('SELECT priority, COUNT(*) as count FROM memories WHERE status = $1 GROUP BY priority', ['active']),
// Access stats
pool.query('SELECT AVG(access_count) as avg_access, MAX(access_count) as max_access FROM memories WHERE status = $1', ['active']),
];
const results = await Promise.all(queries);
return {
total: parseInt(results[0].rows[0].total),
by_type: results[1].rows,
by_category: results[2].rows,
by_priority: results[3].rows,
access_stats: {
avg_access: parseFloat(results[4].rows[0].avg_access) || 0,
max_access: parseInt(results[4].rows[0].max_access) || 0
}
};
}
// Helper: Map database row to Memory model
private mapRowToMemory(row: any): Memory {
return {
id: row.id,
content: row.content,
summary: row.summary,
embedding: row.embedding,
type: row.type,
category: row.category,
priority: row.priority,
parent_id: row.parent_id,
related_ids: row.related_ids || [],
source_session: row.source_session,
created_at: row.created_at,
updated_at: row.updated_at,
last_accessed: row.last_accessed,
access_count: row.access_count,
status: row.status,
confidence: row.confidence,
decay_rate: row.decay_rate,
tags: row.tags || [],
keywords: row.keywords || []
};
}
}
export default new MemoryService();

17
tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "commonjs",
"lib": ["ES2022"],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"moduleResolution": "node"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}