Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions backend/src/ai/mcp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ export const mcp = (app: any) => {
const srv_ready = srv
.connect(trans)
.then(() => {
console.log("[MCP] Server started and transport connected");
console.error("[MCP] Server started and transport connected");
})
.catch((error) => {
console.error("[MCP] Failed to initialize transport:", error);
Expand All @@ -436,7 +436,7 @@ export const mcp = (app: any) => {
send_err(res, -32600, "Request body must be a JSON object");
return;
}
console.log("[MCP] Incoming request:", JSON.stringify(pay));
console.error("[MCP] Incoming request:", JSON.stringify(pay));
set_hdrs(res);
await trans.handleRequest(req, res, pay);
} catch (error) {
Expand Down
10 changes: 5 additions & 5 deletions backend/src/core/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ if (is_pg) {
const admin = pool("postgres");
try {
await admin.query(`CREATE DATABASE ${db_name}`);
console.log(`[DB] Created ${db_name}`);
console.error(`[DB] Created ${db_name}`);
} catch (e: any) {
if (e.code !== "42P04") throw e;
} finally {
Expand Down Expand Up @@ -201,11 +201,11 @@ if (is_pg) {
// Initialize VectorStore
if (env.vector_backend === "valkey") {
vector_store = new ValkeyVectorStore();
console.log("[DB] Using Valkey VectorStore");
console.error("[DB] Using Valkey VectorStore");
} else {
const vt = process.env.OM_VECTOR_TABLE || "openmemory_vectors";
vector_store = new PostgresVectorStore({ run_async, get_async, all_async }, v.replace(/"/g, ""));
console.log(`[DB] Using Postgres VectorStore with table: ${v}`);
console.error(`[DB] Using Postgres VectorStore with table: ${v}`);
}
};
init().catch((err) => {
Expand Down Expand Up @@ -542,10 +542,10 @@ if (is_pg) {

if (env.vector_backend === "valkey") {
vector_store = new ValkeyVectorStore();
console.log("[DB] Using Valkey VectorStore");
console.error("[DB] Using Valkey VectorStore");
} else {
vector_store = new PostgresVectorStore({ run_async, get_async, all_async }, sqlite_vector_table);
console.log(`[DB] Using SQLite VectorStore with table: ${sqlite_vector_table}`);
console.error(`[DB] Using SQLite VectorStore with table: ${sqlite_vector_table}`);
}

transaction = {
Expand Down
2 changes: 1 addition & 1 deletion backend/src/core/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Pool } from "pg";

const is_pg = env.metadata_backend === "postgres";

const log = (msg: string) => console.log(`[MIGRATE] ${msg}`);
const log = (msg: string) => console.error(`[MIGRATE] ${msg}`);

interface Migration {
version: string;
Expand Down
2 changes: 1 addition & 1 deletion backend/src/core/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export const load_models = (): model_cfg => {
try {
const yml = readFileSync(p, "utf-8");
cfg = parse_yaml(yml);
console.log(
console.error(
`[MODELS] Loaded models.yml (${Object.keys(cfg).length} sectors)`,
);
return cfg;
Expand Down
2 changes: 1 addition & 1 deletion backend/src/core/telemetry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export const sendTelemetry = async () => {
if (!res.ok) {
console.warn(``)
} else {
console.log(`[telemetry] sent`)
console.error(`[telemetry] sent`)
}
} catch {
// silently ignore telemetry errors
Expand Down
4 changes: 2 additions & 2 deletions backend/src/memory/decay.ts
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,12 @@ const top_keywords = (t: string, k = 5): string[] => {

export const apply_decay = async () => {
if (active_q > 0) {
console.log(`[decay] skipped - ${active_q} active queries`);
console.error(`[decay] skipped - ${active_q} active queries`);
return;
}
const now_ts = Date.now();
if (now_ts - last_decay < cooldown) {
console.log(
console.error(
`[decay] skipped - cooldown active (${((cooldown - (now_ts - last_decay)) / 1000).toFixed(0)}s remaining)`,
);
return;
Expand Down
8 changes: 4 additions & 4 deletions backend/src/memory/embed.ts
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ async function get_sem_emb(t: string, s: string): Promise<number[]> {
try {
const result = await embed_with_provider(provider, t, s);
if (i > 0) {
console.log(
console.error(
`[EMBED] Fallback to ${provider} succeeded for sector: ${s}`,
);
}
Expand Down Expand Up @@ -207,7 +207,7 @@ async function emb_batch_with_fallback(
}
}
if (i > 0) {
console.log(
console.error(
`[EMBED] Fallback to ${provider} succeeded for batch`,
);
}
Expand Down Expand Up @@ -555,7 +555,7 @@ export async function embedMultiSector(
simp &&
(env.emb_kind === "gemini" || env.emb_kind === "openai")
) {
console.log(
console.error(
`[EMBED] Simple mode (1 batch for ${secs.length} sectors)`,
);
const tb: Record<string, string> = {};
Expand All @@ -566,7 +566,7 @@ export async function embedMultiSector(
r.push({ sector: s, vector: v, dim: v.length }),
);
} else {
console.log(`[EMBED] Advanced mode (${secs.length} calls)`);
console.error(`[EMBED] Advanced mode (${secs.length} calls)`);
const par = env.adv_embed_parallel && env.emb_kind !== "gemini";
if (par) {
const p = secs.map(async (s) => {
Expand Down
14 changes: 7 additions & 7 deletions backend/src/memory/reflect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,18 +108,18 @@ const boost = async (ids: string[]) => {
};

export const run_reflection = async () => {
console.log("[REFLECT] Starting reflection job...");
console.error("[REFLECT] Starting reflection job...");
const min = env.reflect_min || 20;
const mems = await q.all_mem.all(100, 0);
console.log(
console.error(
`[REFLECT] Fetched ${mems.length} memories (min required: ${min})`,
);
if (mems.length < min) {
console.log("[REFLECT] Not enough memories, skipping");
console.error("[REFLECT] Not enough memories, skipping");
return { created: 0, reason: "low" };
}
const cls = cluster(mems);
console.log(`[REFLECT] Clustered into ${cls.length} groups`);
console.error(`[REFLECT] Clustered into ${cls.length} groups`);
let n = 0;
for (const c of cls) {
const txt = summ(c);
Expand All @@ -131,7 +131,7 @@ export const run_reflection = async () => {
freq: c.n,
at: new Date().toISOString(),
};
console.log(
console.error(
`[REFLECT] Creating reflection: ${c.n} memories, salience=${s.toFixed(3)}, sector=${c.mem[0].primary_sector}`,
);
await add_hsg_memory(txt, j(["reflect:auto"]), meta);
Expand All @@ -140,7 +140,7 @@ export const run_reflection = async () => {
n++;
}
if (n > 0) await log_maint_op("reflect", n);
console.log(`[REFLECT] Job complete: created ${n} reflections`);
console.error(`[REFLECT] Job complete: created ${n} reflections`);
return { created: n, clusters: cls.length };
};

Expand All @@ -153,7 +153,7 @@ export const start_reflection = () => {
() => run_reflection().catch((e) => console.error("[REFLECT]", e)),
int,
);
console.log(`[REFLECT] Started: every ${env.reflect_interval || 10}m`);
console.error(`[REFLECT] Started: every ${env.reflect_interval || 10}m`);
};

export const stop_reflection = () => {
Expand Down
12 changes: 6 additions & 6 deletions backend/src/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ async function get_existing_indexes(): Promise<Set<string>> {
}

async function run_migrations() {
console.log("[MIGRATE] Starting automatic migration...");
console.error("[MIGRATE] Starting automatic migration...");

const existing_tables = await get_existing_tables();
const existing_indexes = await get_existing_indexes();
Expand All @@ -58,7 +58,7 @@ async function run_migrations() {

for (const [table_name, schema] of Object.entries(SCHEMA_DEFINITIONS)) {
if (!existing_tables.has(table_name)) {
console.log(`[MIGRATE] Creating table: ${table_name}`);
console.error(`[MIGRATE] Creating table: ${table_name}`);
const statements = schema.split(";").filter((s) => s.trim());
for (const stmt of statements) {
if (stmt.trim()) {
Expand All @@ -73,19 +73,19 @@ async function run_migrations() {
const match = index_sql.match(/create index if not exists (\w+)/);
const index_name = match ? match[1] : null;
if (index_name && !existing_indexes.has(index_name)) {
console.log(`[MIGRATE] Creating index: ${index_name}`);
console.error(`[MIGRATE] Creating index: ${index_name}`);
await run_async(index_sql);
created_indexes++;
}
}

console.log(
console.error(
`[MIGRATE] Migration complete: ${created_tables} tables, ${created_indexes} indexes created`,
);

const final_tables = await get_existing_tables();
console.log(`[MIGRATE] Total tables: ${final_tables.size}`);
console.log(`[MIGRATE] Tables: ${Array.from(final_tables).join(", ")}`);
console.error(`[MIGRATE] Total tables: ${final_tables.size}`);
console.error(`[MIGRATE] Tables: ${Array.from(final_tables).join(", ")}`);
}

run_migrations().catch((err) => {
Expand Down
2 changes: 1 addition & 1 deletion backend/src/ops/dynamics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ export async function applyDualPhaseDecayToAllMemories(): Promise<void> {
);
});
await Promise.all(ops);
console.log(`[DECAY] Applied to ${mems.length} memories`);
console.error(`[DECAY] Applied to ${mems.length} memories`);
}

export async function buildAssociativeWaypointGraphFromMemories(): Promise<
Expand Down
22 changes: 11 additions & 11 deletions backend/src/ops/ingest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ const link = async (
try {
await q.ins_waypoint.run(rid, cid, user_id || "anonymous", 1.0, ts, ts);
await transaction.commit();
console.log(
console.error(
`[INGEST] Linked: ${rid.slice(0, 8)} -> ${cid.slice(0, 8)} (section ${idx})`,
);
} catch (e) {
Expand Down Expand Up @@ -155,15 +155,15 @@ export async function ingestDocument(
}

const secs = split(text, sz);
console.log(`[INGEST] Document: ${exMeta.estimated_tokens} tokens`);
console.log(`[INGEST] Splitting into ${secs.length} sections`);
console.error(`[INGEST] Document: ${exMeta.estimated_tokens} tokens`);
console.error(`[INGEST] Splitting into ${secs.length} sections`);

let rid: string;
const cids: string[] = [];

try {
rid = await mkRoot(text, ex, meta, user_id);
console.log(`[INGEST] Root memory created: ${rid}`);
console.error(`[INGEST] Root memory created: ${rid}`);
for (let i = 0; i < secs.length; i++) {
try {
const cid = await mkChild(
Expand All @@ -176,7 +176,7 @@ export async function ingestDocument(
);
cids.push(cid);
await link(rid, cid, i, user_id);
console.log(
console.error(
`[INGEST] Section ${i + 1}/${secs.length} processed: ${cid}`,
);
} catch (e) {
Expand All @@ -187,7 +187,7 @@ export async function ingestDocument(
throw e;
}
}
console.log(
console.error(
`[INGEST] Completed: ${cids.length} sections linked to ${rid}`,
);
return {
Expand Down Expand Up @@ -237,15 +237,15 @@ export async function ingestURL(
}

const secs = split(ex.text, sz);
console.log(`[INGEST] URL: ${ex.metadata.estimated_tokens} tokens`);
console.log(`[INGEST] Splitting into ${secs.length} sections`);
console.error(`[INGEST] URL: ${ex.metadata.estimated_tokens} tokens`);
console.error(`[INGEST] Splitting into ${secs.length} sections`);

let rid: string;
const cids: string[] = [];

try {
rid = await mkRoot(ex.text, ex, { ...meta, source_url: url }, user_id);
console.log(`[INGEST] Root memory for URL: ${rid}`);
console.error(`[INGEST] Root memory for URL: ${rid}`);
for (let i = 0; i < secs.length; i++) {
try {
const cid = await mkChild(
Expand All @@ -258,7 +258,7 @@ export async function ingestURL(
);
cids.push(cid);
await link(rid, cid, i, user_id);
console.log(
console.error(
`[INGEST] URL section ${i + 1}/${secs.length} processed: ${cid}`,
);
} catch (e) {
Expand All @@ -269,7 +269,7 @@ export async function ingestURL(
throw e;
}
}
console.log(
console.error(
`[INGEST] URL completed: ${cids.length} sections linked to ${rid}`,
);
return {
Expand Down
26 changes: 13 additions & 13 deletions backend/src/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ const ASC = ` ____ __ __

const app = server({ max_payload_size: env.max_payload_size });

console.log(ASC);
console.log(`[CONFIG] Vector Dimension: ${env.vec_dim}`);
console.log(`[CONFIG] Cache Segments: ${env.cache_segments}`);
console.log(`[CONFIG] Max Active Queries: ${env.max_active}`);
console.error(ASC);
console.error(`[CONFIG] Vector Dimension: ${env.vec_dim}`);
console.error(`[CONFIG] Cache Segments: ${env.cache_segments}`);
console.error(`[CONFIG] Max Active Queries: ${env.max_active}`);

// Warn about configuration mismatch that causes embedding incompatibility
if (env.emb_kind !== "synthetic" && (tier === "hybrid" || tier === "fast")) {
Expand Down Expand Up @@ -67,19 +67,19 @@ routes(app);

mcp(app);
if (env.mode === "langgraph") {
console.log("[MODE] LangGraph integration enabled");
console.error("[MODE] LangGraph integration enabled");
}

const decayIntervalMs = env.decay_interval_minutes * 60 * 1000;
console.log(
console.error(
`[DECAY] Interval: ${env.decay_interval_minutes} minutes (${decayIntervalMs / 1000}s)`,
);

setInterval(async () => {
console.log("[DECAY] Running HSG decay process...");
console.error("[DECAY] Running HSG decay process...");
try {
const result = await run_decay_process();
console.log(
console.error(
`[DECAY] Completed: ${result.decayed}/${result.processed} memories updated`,
);
} catch (error) {
Expand All @@ -88,10 +88,10 @@ setInterval(async () => {
}, decayIntervalMs);
setInterval(
async () => {
console.log("[PRUNE] Pruning weak waypoints...");
console.error("[PRUNE] Pruning weak waypoints...");
try {
const pruned = await prune_weak_waypoints();
console.log(`[PRUNE] Completed: ${pruned} waypoints removed`);
console.error(`[PRUNE] Completed: ${pruned} waypoints removed`);
} catch (error) {
console.error("[PRUNE] Failed:", error);
}
Expand All @@ -100,7 +100,7 @@ setInterval(
);
run_decay_process()
.then((result: any) => {
console.log(
console.error(
`[INIT] Initial decay: ${result.decayed}/${result.processed} memories updated`,
);
})
Expand All @@ -109,9 +109,9 @@ run_decay_process()
start_reflection();
start_user_summary_reflection();

console.log(`[SERVER] Starting on port ${env.port}`);
console.error(`[SERVER] Starting on port ${env.port}`);
app.listen(env.port, () => {
console.log(`[SERVER] Running on http://localhost:${env.port}`);
console.error(`[SERVER] Running on http://localhost:${env.port}`);
sendTelemetry().catch(() => {
// ignore telemetry failures
});
Expand Down
2 changes: 1 addition & 1 deletion backend/src/server/middleware/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ export function authenticate_api_request(req: any, res: any, next: any) {
export function log_authenticated_request(req: any, res: any, next: any) {
const key = extract_api_key(req);
if (key)
console.log(
console.error(
`[AUTH] ${req.method} ${req.path} [${crypto.createHash("sha256").update(key).digest("hex").slice(0, 8)}...]`,
);
next();
Expand Down
Loading