From cc4ccc67ebf4c5a05dfe1c111b58815d5d4edd91 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 12:01:53 +0800 Subject: [PATCH 01/85] chore(memos-local): checkpoint memory lifecycle changes --- apps/memos-local-openclaw/plugin-impl.ts | 29 +- .../memos-local-openclaw/src/capture/index.ts | 42 ++- apps/memos-local-openclaw/src/index.ts | 5 +- .../src/ingest/task-processor.ts | 29 +- .../memos-local-openclaw/src/ingest/worker.ts | 132 +++---- .../src/storage/sqlite.ts | 33 +- .../src/tools/memory-get.ts | 7 +- .../src/tools/memory-search.ts | 6 + .../src/tools/memory-timeline.ts | 14 +- .../tests/capture.test.ts | 20 +- .../tests/integration.test.ts | 70 ++++ .../tests/plugin-impl-access.test.ts | 126 +++++++ .../tests/shutdown-lifecycle.test.ts | 116 ++++++ .../tests/task-processor.test.ts | 26 ++ .../tests/worker-lifecycle.test.ts | 116 ++++++ ...3-08-v4-hub-sharing-implementation-plan.md | 344 ++++++++++++++++++ 16 files changed, 1014 insertions(+), 101 deletions(-) create mode 100644 apps/memos-local-openclaw/tests/plugin-impl-access.test.ts create mode 100644 apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts create mode 100644 apps/memos-local-openclaw/tests/worker-lifecycle.test.ts create mode 100644 docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md diff --git a/apps/memos-local-openclaw/plugin-impl.ts b/apps/memos-local-openclaw/plugin-impl.ts index c48826cb4..462919779 100644 --- a/apps/memos-local-openclaw/plugin-impl.ts +++ b/apps/memos-local-openclaw/plugin-impl.ts @@ -13,6 +13,11 @@ import { captureMessages } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; +function ownerFilterFor(agentId: string | undefined): string[] { + const resolvedAgentId = agentId && agentId.trim().length > 0 ? agentId : "main"; + return [`agent:${resolvedAgentId}`, "public"]; +} + const pluginConfigSchema = { type: "object" as const, additionalProperties: true, @@ -99,7 +104,7 @@ const memosLocalPlugin = { }; const agentId = (context as any)?.agentId ?? "main"; - const ownerFilter = [`agent:${agentId}`, "public"]; + const ownerFilter = ownerFilterFor(agentId); const result = await engine.search({ query, maxResults, minScore, ownerFilter }); if (result.hits.length === 0) { @@ -157,7 +162,7 @@ const memosLocalPlugin = { seq: Type.Number({ description: "From search hit ref.seq" }), window: Type.Optional(Type.Number({ description: "Context window ±N (default 2)" })), }), - async execute(_toolCallId, params) { + async execute(_toolCallId, params, context) { const { sessionKey, chunkId, turnId, seq, window: win } = params as { sessionKey: string; chunkId: string; @@ -166,9 +171,17 @@ const memosLocalPlugin = { window?: number; }; + const agentId = (context as any)?.agentId ?? "main"; + const ownerFilter = ownerFilterFor(agentId); const w = win ?? DEFAULTS.timelineWindowDefault; - const neighbors = store.getNeighborChunks(sessionKey, turnId, seq, w); - const anchorChunk = store.getChunk(chunkId); + const anchorChunk = store.getChunkForOwners(chunkId, ownerFilter); + if (!anchorChunk) { + return { + content: [{ type: "text", text: "Timeline (0 entries):\n\n" }], + details: { entries: [], anchorRef: { sessionKey, chunkId, turnId, seq } }, + }; + } + const neighbors = store.getNeighborChunks(sessionKey, turnId, seq, w, ownerFilter); const anchorTs = anchorChunk?.createdAt ?? 0; const entries = neighbors.map((chunk) => { @@ -212,11 +225,12 @@ const memosLocalPlugin = { Type.Number({ description: `Max chars (default ${DEFAULTS.getMaxCharsDefault}, max ${DEFAULTS.getMaxCharsMax})` }), ), }), - async execute(_toolCallId, params) { + async execute(_toolCallId, params, context) { const { chunkId, maxChars } = params as { chunkId: string; maxChars?: number }; const limit = Math.min(maxChars ?? DEFAULTS.getMaxCharsDefault, DEFAULTS.getMaxCharsMax); - const chunk = store.getChunk(chunkId); + const agentId = (context as any)?.agentId ?? "main"; + const chunk = store.getChunkForOwners(chunkId, ownerFilterFor(agentId)); if (!chunk) { return { content: [{ type: "text", text: `Chunk not found: ${chunkId}` }], @@ -515,8 +529,9 @@ const memosLocalPlugin = { api.logger.info(`memos-local: started (embedding: ${embedder.provider})`); } }, - stop: () => { + stop: async () => { viewer.stop(); + await worker.flush(); store.close(); api.logger.info("memos-local: stopped"); }, diff --git a/apps/memos-local-openclaw/src/capture/index.ts b/apps/memos-local-openclaw/src/capture/index.ts index 4dd0ec689..b5d451abd 100644 --- a/apps/memos-local-openclaw/src/capture/index.ts +++ b/apps/memos-local-openclaw/src/capture/index.ts @@ -25,9 +25,12 @@ const INBOUND_META_SENTINELS = [ ]; const SENTINEL_FAST_RE = new RegExp( - INBOUND_META_SENTINELS.map(s => s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("|"), + INBOUND_META_SENTINELS.map((s) => s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("|"), ); +const ENVELOPE_PREFIX_RE = + /^\s*\[(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}(?::\d{2})?\s+[A-Z]{3}[+-]\d{1,2}\]\s*/; + /** * Extract writable messages from a conversation turn. * @@ -39,7 +42,7 @@ export function captureMessages( messages: Array<{ role: string; content: string; toolName?: string }>, sessionKey: string, turnId: string, - _evidenceTag: string, + evidenceTag: string, log: Logger, owner?: string, ): ConversationMessage[] { @@ -59,6 +62,8 @@ export function captureMessages( let content = msg.content; if (role === "user") { content = stripInboundMetadata(content); + } else { + content = stripEvidenceWrappers(content, evidenceTag); } if (!content.trim()) continue; @@ -89,17 +94,15 @@ export function captureMessages( * Also strips the envelope timestamp prefix like "[Tue 2026-03-03 21:58 GMT+8] " */ export function stripInboundMetadata(text: string): string { - // Strip envelope timestamp prefix: "[Tue 2026-03-03 21:58 GMT+8] actual message" - let cleaned = text.replace( - /^\[(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}(?::\d{2})?\s+[A-Z]{3}[+-]\d{1,2}\]\s*/, - "", - ); + let cleaned = stripEnvelopePrefix(text); // Strip OpenClaw envelope tags: [message_id: ...], [[reply_to_current]], etc. cleaned = cleaned.replace(/\[message_id:\s*[a-f0-9-]+\]/gi, ""); cleaned = cleaned.replace(/\[\[reply_to_current\]\]/gi, ""); - if (!SENTINEL_FAST_RE.test(cleaned)) return cleaned.trim(); + if (!SENTINEL_FAST_RE.test(cleaned)) { + return stripEnvelopePrefix(cleaned).trim(); + } const lines = cleaned.split("\n"); const result: string[] = []; @@ -110,7 +113,7 @@ export function stripInboundMetadata(text: string): string { const line = lines[i]; const trimmed = line.trim(); - if (!inMetaBlock && INBOUND_META_SENTINELS.some(s => s === trimmed)) { + if (!inMetaBlock && INBOUND_META_SENTINELS.some((s) => s === trimmed)) { if (lines[i + 1]?.trim() === "```json") { inMetaBlock = true; inFencedJson = false; @@ -135,5 +138,24 @@ export function stripInboundMetadata(text: string): string { result.push(line); } - return result.join("\n").trim(); + return stripEnvelopePrefix(result.join("\n")).trim(); +} + +function stripEnvelopePrefix(text: string): string { + return text.replace(ENVELOPE_PREFIX_RE, ""); +} + +function stripEvidenceWrappers(text: string, evidenceTag: string): string { + const tag = evidenceTag.trim(); + if (!tag) return text; + + const escapedTag = tag.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const wrapperRe = new RegExp(`\\[${escapedTag}\\][\\s\\S]*?\\[\\/${escapedTag}\\]`, "g"); + + return text + .replace(wrapperRe, "") + .replace(/[ \t]{2,}/g, " ") + .replace(/\s+([,.;:!?])/g, "$1") + .replace(/\n{3,}/g, "\n\n") + .trim(); } diff --git a/apps/memos-local-openclaw/src/index.ts b/apps/memos-local-openclaw/src/index.ts index 4f84b6665..dcea12d86 100644 --- a/apps/memos-local-openclaw/src/index.ts +++ b/apps/memos-local-openclaw/src/index.ts @@ -14,7 +14,7 @@ export interface MemosLocalPlugin { onConversationTurn: (messages: Array<{ role: string; content: string }>, sessionKey?: string, owner?: string) => void; /** Wait for all pending ingest operations to complete. */ flush: () => Promise; - shutdown: () => void; + shutdown: () => Promise; } export interface PluginInitOptions { @@ -91,8 +91,9 @@ export function initPlugin(opts: PluginInitOptions = {}): MemosLocalPlugin { await worker.flush(); }, - shutdown(): void { + async shutdown(): Promise { ctx.log.info("Shutting down memos-local plugin..."); + await worker.flush(); store.close(); }, }; diff --git a/apps/memos-local-openclaw/src/ingest/task-processor.ts b/apps/memos-local-openclaw/src/ingest/task-processor.ts index fe7779dd5..d043d4673 100644 --- a/apps/memos-local-openclaw/src/ingest/task-processor.ts +++ b/apps/memos-local-openclaw/src/ingest/task-processor.ts @@ -30,6 +30,8 @@ const SKIP_REASONS = { export class TaskProcessor { private summarizer: Summarizer; private processing = false; + private pendingEvents: Array<{ sessionKey: string; latestTimestamp: number; owner: string }> = []; + private drainPromise: Promise | null = null; private onTaskCompletedCallback?: (task: Task) => void; constructor( @@ -48,18 +50,31 @@ export class TaskProcessor { * Determines if a new task boundary was crossed and handles transition. */ async onChunksIngested(sessionKey: string, latestTimestamp: number, owner?: string): Promise { - this.ctx.log.debug(`TaskProcessor.onChunksIngested called session=${sessionKey} ts=${latestTimestamp} owner=${owner ?? "agent:main"} processing=${this.processing}`); - if (this.processing) { - this.ctx.log.debug("TaskProcessor.onChunksIngested skipped — already processing"); - return; + const resolvedOwner = owner ?? "agent:main"; + this.ctx.log.debug(`TaskProcessor.onChunksIngested called session=${sessionKey} ts=${latestTimestamp} owner=${resolvedOwner} processing=${this.processing}`); + this.pendingEvents.push({ sessionKey, latestTimestamp, owner: resolvedOwner }); + + if (!this.drainPromise) { + this.drainPromise = this.drainPending(); } + + await this.drainPromise; + } + + private async drainPending(): Promise { this.processing = true; try { - await this.detectAndProcess(sessionKey, latestTimestamp, owner ?? "agent:main"); - } catch (err) { - this.ctx.log.error(`TaskProcessor error: ${err}`); + while (this.pendingEvents.length > 0) { + const next = this.pendingEvents.shift()!; + try { + await this.detectAndProcess(next.sessionKey, next.latestTimestamp, next.owner); + } catch (err) { + this.ctx.log.error(`TaskProcessor error: ${err}`); + } + } } finally { this.processing = false; + this.drainPromise = null; } } diff --git a/apps/memos-local-openclaw/src/ingest/worker.ts b/apps/memos-local-openclaw/src/ingest/worker.ts index db53ba62c..cbb8021d8 100644 --- a/apps/memos-local-openclaw/src/ingest/worker.ts +++ b/apps/memos-local-openclaw/src/ingest/worker.ts @@ -45,73 +45,79 @@ export class IngestWorker { private async processQueue(): Promise { this.processing = true; - const t0 = performance.now(); - - let lastSessionKey: string | undefined; - let lastOwner: string | undefined; - let lastTimestamp = 0; - let stored = 0; - let skipped = 0; - let merged = 0; - let duplicated = 0; - let errors = 0; - const resultLines: string[] = []; - const inputLines: string[] = []; - const totalMessages = this.queue.length; - - while (this.queue.length > 0) { - const msg = this.queue.shift()!; - inputLines.push(`[${msg.role}] ${msg.content}`); - try { - const result = await this.ingestMessage(msg); - lastSessionKey = msg.sessionKey; - lastOwner = msg.owner ?? "agent:main"; - lastTimestamp = Math.max(lastTimestamp, msg.timestamp); - if (result === "skipped") { - skipped++; - resultLines.push(`[${msg.role}] ⏭ exact-dup → ${msg.content}`); - } else if (result.action === "stored") { - stored++; - resultLines.push(`[${msg.role}] ✅ stored → ${result.summary ?? msg.content}`); - } else if (result.action === "duplicate") { - duplicated++; - resultLines.push(`[${msg.role}] 🔁 dedup(${result.reason ?? "similar"}) → ${msg.content}`); - } else if (result.action === "merged") { - merged++; - resultLines.push(`[${msg.role}] 🔀 merged → ${msg.content}`); + + try { + while (this.queue.length > 0) { + const t0 = performance.now(); + const batchSize = this.queue.length; + let lastSessionKey: string | undefined; + let lastOwner: string | undefined; + let lastTimestamp = 0; + let stored = 0; + let skipped = 0; + let merged = 0; + let duplicated = 0; + let errors = 0; + const resultLines: string[] = []; + const inputLines: string[] = []; + + while (this.queue.length > 0) { + const msg = this.queue.shift()!; + inputLines.push(`[${msg.role}] ${msg.content}`); + try { + const result = await this.ingestMessage(msg); + lastSessionKey = msg.sessionKey; + lastOwner = msg.owner ?? "agent:main"; + lastTimestamp = Math.max(lastTimestamp, msg.timestamp); + if (result === "skipped") { + skipped++; + resultLines.push(`[${msg.role}] ⏭ exact-dup → ${msg.content}`); + } else if (result.action === "stored") { + stored++; + resultLines.push(`[${msg.role}] ✅ stored → ${result.summary ?? msg.content}`); + } else if (result.action === "duplicate") { + duplicated++; + resultLines.push(`[${msg.role}] 🔁 dedup(${result.reason ?? "similar"}) → ${msg.content}`); + } else if (result.action === "merged") { + merged++; + resultLines.push(`[${msg.role}] 🔀 merged → ${msg.content}`); + } + } catch (err) { + errors++; + resultLines.push(`[${msg.role}] ❌ error → ${msg.content}`); + this.ctx.log.error(`Failed to ingest message turn=${msg.turnId}: ${err}`); + } } - } catch (err) { - errors++; - resultLines.push(`[${msg.role}] ❌ error → ${msg.content}`); - this.ctx.log.error(`Failed to ingest message turn=${msg.turnId}: ${err}`); - } - } - const dur = performance.now() - t0; - - if (stored + merged > 0 || skipped > 0 || duplicated > 0) { - this.store.recordToolCall("memory_add", dur, errors === 0); - try { - const inputInfo = { - session: lastSessionKey, - messages: totalMessages, - details: inputLines, - }; - const stats = [`stored=${stored}`, skipped > 0 ? `skipped=${skipped}` : null, duplicated > 0 ? `dedup=${duplicated}` : null, merged > 0 ? `merged=${merged}` : null, errors > 0 ? `errors=${errors}` : null].filter(Boolean).join(", "); - this.store.recordApiLog("memory_add", inputInfo, `${stats}\n${resultLines.join("\n")}`, dur, errors === 0); - } catch (_) { /* best-effort */ } - } + const dur = performance.now() - t0; + + if (stored + merged > 0 || skipped > 0 || duplicated > 0) { + this.store.recordToolCall("memory_add", dur, errors === 0); + try { + const inputInfo = { + session: lastSessionKey, + messages: batchSize, + details: inputLines, + }; + const stats = [`stored=${stored}`, skipped > 0 ? `skipped=${skipped}` : null, duplicated > 0 ? `dedup=${duplicated}` : null, merged > 0 ? `merged=${merged}` : null, errors > 0 ? `errors=${errors}` : null].filter(Boolean).join(", "); + this.store.recordApiLog("memory_add", inputInfo, `${stats}\n${resultLines.join("\n")}`, dur, errors === 0); + } catch (_) { /* best-effort */ } + } - if (lastSessionKey) { - this.ctx.log.debug(`Calling TaskProcessor.onChunksIngested session=${lastSessionKey} ts=${lastTimestamp} owner=${lastOwner}`); - this.taskProcessor - .onChunksIngested(lastSessionKey, lastTimestamp, lastOwner) - .catch((err) => this.ctx.log.error(`TaskProcessor post-ingest error: ${err}`)); + if (lastSessionKey) { + this.ctx.log.debug(`Calling TaskProcessor.onChunksIngested session=${lastSessionKey} ts=${lastTimestamp} owner=${lastOwner}`); + try { + await this.taskProcessor.onChunksIngested(lastSessionKey, lastTimestamp, lastOwner); + } catch (err) { + this.ctx.log.error(`TaskProcessor post-ingest error: ${err}`); + } + } + } + } finally { + this.processing = false; + for (const resolve of this.flushResolvers) resolve(); + this.flushResolvers = []; } - - this.processing = false; - for (const resolve of this.flushResolvers) resolve(); - this.flushResolvers = []; } private async ingestMessage(msg: ConversationMessage): Promise< diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index cc47cbd82..e3a4354da 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -640,16 +640,37 @@ export class SqliteStore { return row ? rowToChunk(row) : null; } - getChunksByRef(ref: ChunkRef): Chunk | null { - return this.getChunk(ref.chunkId); + getChunkForOwners(chunkId: string, ownerFilter?: string[]): Chunk | null { + if (!ownerFilter || ownerFilter.length === 0) return this.getChunk(chunkId); + + const placeholders = ownerFilter.map(() => "?").join(","); + const row = this.db.prepare( + `SELECT * FROM chunks WHERE id = ? AND owner IN (${placeholders}) LIMIT 1`, + ).get(chunkId, ...ownerFilter) as ChunkRow | undefined; + return row ? rowToChunk(row) : null; } - getNeighborChunks(sessionKey: string, turnId: string, seq: number, window: number): Chunk[] { - const allRows = this.db.prepare(` + getChunksByRef(ref: ChunkRef, ownerFilter?: string[]): Chunk | null { + return this.getChunkForOwners(ref.chunkId, ownerFilter); + } + + getNeighborChunks(sessionKey: string, turnId: string, seq: number, window: number, ownerFilter?: string[]): Chunk[] { + let sql = ` SELECT * FROM chunks - WHERE session_key = ? + WHERE session_key = ?`; + const params: any[] = [sessionKey]; + + if (ownerFilter && ownerFilter.length > 0) { + const placeholders = ownerFilter.map(() => "?").join(","); + sql += ` AND owner IN (${placeholders})`; + params.push(...ownerFilter); + } + + sql += ` ORDER BY created_at, seq - `).all(sessionKey) as ChunkRow[]; + `; + + const allRows = this.db.prepare(sql).all(...params) as ChunkRow[]; const targetIdx = allRows.findIndex( (r) => r.turn_id === turnId && r.seq === seq, diff --git a/apps/memos-local-openclaw/src/tools/memory-get.ts b/apps/memos-local-openclaw/src/tools/memory-get.ts index 270e6bae4..a8efb2579 100644 --- a/apps/memos-local-openclaw/src/tools/memory-get.ts +++ b/apps/memos-local-openclaw/src/tools/memory-get.ts @@ -2,6 +2,11 @@ import type { SqliteStore } from "../storage/sqlite"; import type { ToolDefinition, GetResult, ChunkRef } from "../types"; import { DEFAULTS } from "../types"; +function resolveOwnerFilter(owner: unknown): string[] { + const resolvedOwner = typeof owner === "string" && owner.trim().length > 0 ? owner : "agent:main"; + return resolvedOwner === "public" ? ["public"] : [resolvedOwner, "public"]; +} + export function createMemoryGetTool(store: SqliteStore): ToolDefinition { return { name: "memory_get", @@ -36,7 +41,7 @@ export function createMemoryGetTool(store: SqliteStore): ToolDefinition { DEFAULTS.getMaxCharsMax, ); - const chunk = store.getChunksByRef(ref); + const chunk = store.getChunksByRef(ref, resolveOwnerFilter(input.owner)); if (!chunk) { return { error: `Chunk not found: ${ref.chunkId}` }; diff --git a/apps/memos-local-openclaw/src/tools/memory-search.ts b/apps/memos-local-openclaw/src/tools/memory-search.ts index b975d9bd9..ede0291ba 100644 --- a/apps/memos-local-openclaw/src/tools/memory-search.ts +++ b/apps/memos-local-openclaw/src/tools/memory-search.ts @@ -1,6 +1,11 @@ import type { RecallEngine } from "../recall/engine"; import type { ToolDefinition } from "../types"; +function resolveOwnerFilter(owner: unknown): string[] { + const resolvedOwner = typeof owner === "string" && owner.trim().length > 0 ? owner : "agent:main"; + return resolvedOwner === "public" ? ["public"] : [resolvedOwner, "public"]; +} + export function createMemorySearchTool(engine: RecallEngine): ToolDefinition { return { name: "memory_search", @@ -29,6 +34,7 @@ export function createMemorySearchTool(engine: RecallEngine): ToolDefinition { query: (input.query as string) ?? "", maxResults: input.maxResults as number | undefined, minScore: input.minScore as number | undefined, + ownerFilter: resolveOwnerFilter(input.owner), }); return result; }, diff --git a/apps/memos-local-openclaw/src/tools/memory-timeline.ts b/apps/memos-local-openclaw/src/tools/memory-timeline.ts index 5c61336d5..92d4031f4 100644 --- a/apps/memos-local-openclaw/src/tools/memory-timeline.ts +++ b/apps/memos-local-openclaw/src/tools/memory-timeline.ts @@ -2,6 +2,11 @@ import type { SqliteStore } from "../storage/sqlite"; import type { ToolDefinition, TimelineResult, TimelineEntry, ChunkRef } from "../types"; import { DEFAULTS } from "../types"; +function resolveOwnerFilter(owner: unknown): string[] { + const resolvedOwner = typeof owner === "string" && owner.trim().length > 0 ? owner : "agent:main"; + return resolvedOwner === "public" ? ["public"] : [resolvedOwner, "public"]; +} + export function createMemoryTimelineTool(store: SqliteStore): ToolDefinition { return { name: "memory_timeline", @@ -33,18 +38,25 @@ export function createMemoryTimelineTool(store: SqliteStore): ToolDefinition { const ref = input.ref as ChunkRef; const window = (input.window as number) ?? DEFAULTS.timelineWindowDefault; + const ownerFilter = resolveOwnerFilter(input.owner); + const anchorChunk = store.getChunksByRef(ref, ownerFilter); + if (!anchorChunk) { + return { entries: [], anchorRef: ref } satisfies TimelineResult; + } + const neighbors = store.getNeighborChunks( ref.sessionKey, ref.turnId, ref.seq, window, + ownerFilter, ); const entries: TimelineEntry[] = neighbors.map((chunk) => { let relation: TimelineEntry["relation"] = "before"; if (chunk.id === ref.chunkId) { relation = "current"; - } else if (chunk.createdAt > (store.getChunk(ref.chunkId)?.createdAt ?? 0)) { + } else if (chunk.createdAt > anchorChunk.createdAt) { relation = "after"; } diff --git a/apps/memos-local-openclaw/tests/capture.test.ts b/apps/memos-local-openclaw/tests/capture.test.ts index 4a6d598d3..97ee5c5a7 100644 --- a/apps/memos-local-openclaw/tests/capture.test.ts +++ b/apps/memos-local-openclaw/tests/capture.test.ts @@ -46,7 +46,7 @@ describe("captureMessages", () => { expect(result[0].toolName).toBe("web_search"); }); - it("should preserve original content without any stripping", () => { + it("should strip explicit evidence wrapper blocks from assistant messages", () => { const msgs = [ { role: "assistant", @@ -55,9 +55,21 @@ describe("captureMessages", () => { ]; const result = captureMessages(msgs, "s1", "t1", "STORED_MEMORY", noopLog); expect(result).toHaveLength(1); - expect(result[0].content).toBe( - "Based on memory: [STORED_MEMORY]some evidence[/STORED_MEMORY] the answer is 42.", - ); + expect(result[0].content).toBe("Based on memory: the answer is 42."); + }); + + it("should not strip ordinary mentions of the evidence tag", () => { + const msgs = [ + { + role: "assistant", + content: "The literal token STORED_MEMORY appears in this docs note.", + }, + ]; + + const result = captureMessages(msgs, "s1", "t1", "STORED_MEMORY", noopLog); + + expect(result).toHaveLength(1); + expect(result[0].content).toBe("The literal token STORED_MEMORY appears in this docs note."); }); it("should skip empty messages", () => { diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 616966a22..6cd6b0b93 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -41,6 +41,21 @@ I think the path alias is wrong in the tsconfig configuration.` }, { role: "assistant", content: "The error shows a missing path alias for @/components/Chart. Check your tsconfig.json paths configuration - it should have: \"@/*\": [\"./src/*\"] or similar mapping." }, ], "session-frontend"); + plugin.onConversationTurn([ + { role: "user", content: "alpha private marker only alpha should see this rollout note" }, + { role: "assistant", content: "Recorded alpha private marker deployment note." }, + ], "session-alpha-private", "agent:alpha"); + + plugin.onConversationTurn([ + { role: "user", content: "beta private marker only beta should see this rollback note" }, + { role: "assistant", content: "Recorded beta private marker rollback note." }, + ], "session-beta-private", "agent:beta"); + + plugin.onConversationTurn([ + { role: "user", content: "shared public marker all agents can use this shared convention" }, + { role: "assistant", content: "Recorded shared public marker convention." }, + ], "session-public", "public"); + // Wait for all async ingest to complete await plugin.flush(); }, 120_000); @@ -156,6 +171,61 @@ describe("Integration: memory_get", () => { }); }); +describe("Integration: owner isolation for initPlugin tools", () => { + it("memory_search should respect owner on initPlugin path", async () => { + const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; + + const betaSearch = (await searchTool.handler({ + query: "alpha private marker", + owner: "agent:beta", + })) as any; + + expect(betaSearch.hits).toHaveLength(0); + + const publicSearch = (await searchTool.handler({ + query: "shared public marker", + owner: "agent:beta", + })) as any; + + expect(publicSearch.hits.length).toBeGreaterThan(0); + expect(publicSearch.hits.some((hit: any) => hit.ref.sessionKey === "session-public")).toBe(true); + }); + + it("memory_timeline should not expose another owner's chunks on initPlugin path", async () => { + const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; + const timelineTool = plugin.tools.find((t) => t.name === "memory_timeline")!; + + const alphaSearch = (await searchTool.handler({ + query: "alpha private marker", + owner: "agent:alpha", + })) as any; + + expect(alphaSearch.hits.length).toBeGreaterThan(0); + + const ref = alphaSearch.hits[0].ref; + const leaked = (await timelineTool.handler({ ref, owner: "agent:beta", window: 2 })) as any; + + expect(leaked.entries).toEqual([]); + }); + + it("memory_get should not expose another owner's chunk on initPlugin path", async () => { + const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; + const getTool = plugin.tools.find((t) => t.name === "memory_get")!; + + const alphaSearch = (await searchTool.handler({ + query: "alpha private marker", + owner: "agent:alpha", + })) as any; + + expect(alphaSearch.hits.length).toBeGreaterThan(0); + + const ref = alphaSearch.hits[0].ref; + const leaked = (await getTool.handler({ ref, owner: "agent:beta" })) as any; + + expect(leaked.error).toContain(ref.chunkId); + }); +}); + describe("Integration: evidence anti-writeback", () => { it("should not store evidence wrapper blocks in memory", async () => { plugin.onConversationTurn([ diff --git a/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts new file mode 100644 index 000000000..41951a04d --- /dev/null +++ b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts @@ -0,0 +1,126 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import plugin from "../plugin-impl"; + +function makeApi(stateDir: string) { + const tools = new Map(); + const events = new Map(); + let service: any; + + const api = { + pluginConfig: {}, + resolvePath(input: string) { + return input === "~/.openclaw" ? stateDir : input; + }, + logger: { + info: () => {}, + warn: () => {}, + }, + registerTool(def: any) { + tools.set(def.name, def); + }, + registerService(def: any) { + service = def; + }, + on(eventName: string, handler: Function) { + events.set(eventName, handler); + }, + } as any; + + plugin.register(api); + + return { tools, events, service }; +} + +async function waitFor(predicate: () => Promise | boolean, timeoutMs = 8000) { + const start = Date.now(); + while (Date.now() - start < timeoutMs) { + if (await predicate()) return; + await new Promise((resolve) => setTimeout(resolve, 100)); + } + throw new Error("Timed out waiting for condition"); +} + +describe("plugin-impl owner isolation", () => { + let tmpDir: string; + let tools: Map; + let events: Map; + let service: any; + + beforeEach(async () => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-plugin-impl-access-")); + ({ tools, events, service } = makeApi(tmpDir)); + + const agentEnd = events.get("agent_end")!; + + await agentEnd({ + success: true, + agentId: "alpha", + sessionKey: "alpha-session", + messages: [ + { role: "user", content: "alpha private marker deployment guide" }, + { role: "assistant", content: "alpha private marker response" }, + ], + }); + + await agentEnd({ + success: true, + agentId: "beta", + sessionKey: "beta-session", + messages: [ + { role: "user", content: "beta private marker rollback guide" }, + { role: "assistant", content: "beta private marker response" }, + ], + }); + + const publicWrite = tools.get("memory_write_public"); + await publicWrite.execute("call-public", { content: "shared public marker convention" }, { agentId: "alpha" }); + + const search = tools.get("memory_search"); + await waitFor(async () => { + const result = await search.execute("call-search", { query: "alpha private marker", maxResults: 5, minScore: 0.1 }, { agentId: "alpha" }); + return (result?.details?.hits?.length ?? 0) > 0; + }); + }); + + afterEach(() => { + service?.stop?.(); + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it("memory_search should scope results by agentId", async () => { + const search = tools.get("memory_search"); + + const alpha = await search.execute("call-search", { query: "alpha private marker", maxResults: 5, minScore: 0.1 }, { agentId: "alpha" }); + const beta = await search.execute("call-search", { query: "alpha private marker", maxResults: 5, minScore: 0.1 }, { agentId: "beta" }); + const publicHit = await search.execute("call-search", { query: "shared public marker", maxResults: 5, minScore: 0.1 }, { agentId: "beta" }); + + expect(alpha.details.hits.length).toBeGreaterThan(0); + expect(beta.details?.hits ?? []).toEqual([]); + expect(publicHit.details.hits.length).toBeGreaterThan(0); + }); + + it("memory_timeline should not leak another agent's private neighbors", async () => { + const search = tools.get("memory_search"); + const timeline = tools.get("memory_timeline"); + + const alpha = await search.execute("call-search", { query: "alpha private marker", maxResults: 5, minScore: 0.1 }, { agentId: "alpha" }); + const ref = alpha.details.hits[0].ref; + const betaTimeline = await timeline.execute("call-timeline", ref, { agentId: "beta" }); + + expect(betaTimeline.details.entries).toEqual([]); + }); + + it("memory_get should not return another agent's private chunk", async () => { + const search = tools.get("memory_search"); + const getTool = tools.get("memory_get"); + + const alpha = await search.execute("call-search", { query: "alpha private marker", maxResults: 5, minScore: 0.1 }, { agentId: "alpha" }); + const ref = alpha.details.hits[0].ref; + const betaGet = await getTool.execute("call-get", { chunkId: ref.chunkId }, { agentId: "beta" }); + + expect(betaGet.details.error).toBe("not_found"); + }); +}); diff --git a/apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts b/apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts new file mode 100644 index 000000000..fd523b776 --- /dev/null +++ b/apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts @@ -0,0 +1,116 @@ +import { describe, it, expect, vi, afterEach } from "vitest"; + +const noopLog = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +afterEach(() => { + vi.resetModules(); + vi.clearAllMocks(); +}); + +describe("shutdown lifecycle", () => { + it("initPlugin.shutdown should wait for worker.flush before closing the store", async () => { + const events: string[] = []; + let release!: () => void; + const gate = new Promise((resolve) => { + release = resolve; + }); + + class MockStore { + close(): void { + events.push("close"); + } + } + + class MockWorker { + enqueue(): void {} + flush(): Promise { + events.push("flush"); + return gate; + } + } + + vi.doMock("../src/storage/sqlite", () => ({ SqliteStore: MockStore })); + vi.doMock("../src/ingest/worker", () => ({ IngestWorker: MockWorker })); + vi.doMock("../src/embedding", () => ({ Embedder: class { provider = "mock"; } })); + vi.doMock("../src/recall/engine", () => ({ RecallEngine: class {} })); + vi.doMock("../src/capture", () => ({ captureMessages: () => [] })); + vi.doMock("../src/tools", () => ({ + createMemorySearchTool: () => ({ name: "memory_search" }), + createMemoryTimelineTool: () => ({ name: "memory_timeline" }), + createMemoryGetTool: () => ({ name: "memory_get" }), + })); + + const { initPlugin } = await import("../src/index"); + const plugin = initPlugin({ stateDir: "/tmp/memos-shutdown-test", log: noopLog as any }); + + const shutdownPromise = Promise.resolve(plugin.shutdown() as any); + expect(events).toEqual(["flush"]); + + release(); + await shutdownPromise; + expect(events).toEqual(["flush", "close"]); + }); + + it("plugin service stop should wait for worker.flush before closing the store", async () => { + const events: string[] = []; + let release!: () => void; + const gate = new Promise((resolve) => { + release = resolve; + }); + + class MockStore { + close(): void { + events.push("close"); + } + } + + class MockWorker { + enqueue(): void {} + flush(): Promise { + events.push("flush"); + return gate; + } + } + + class MockViewer { + async start(): Promise { return "http://127.0.0.1:18799"; } + stop(): void { events.push("viewer-stop"); } + getResetToken(): string { return "token"; } + } + + let registeredService: { stop: () => Promise | void } | undefined; + + vi.doMock("../src/storage/sqlite", () => ({ SqliteStore: MockStore })); + vi.doMock("../src/ingest/worker", () => ({ IngestWorker: MockWorker })); + vi.doMock("../src/embedding", () => ({ Embedder: class { provider = "mock"; } })); + vi.doMock("../src/recall/engine", () => ({ RecallEngine: class { async search() { return { hits: [], meta: {} }; } async searchSkills() { return []; } } })); + vi.doMock("../src/capture", () => ({ captureMessages: () => [] })); + vi.doMock("../src/viewer/server", () => ({ ViewerServer: MockViewer })); + + const pluginModule = await import("../plugin-impl"); + const plugin = pluginModule.default; + plugin.register({ + pluginConfig: {}, + resolvePath: () => "/tmp/memos-service-stop", + logger: noopLog, + registerTool: () => {}, + registerService: (service: any) => { registeredService = service; }, + on: () => {}, + } as any); + + expect(registeredService).toBeDefined(); + const stopPromise = Promise.resolve(registeredService!.stop() as any); + expect(events).toContain("flush"); + expect(events).not.toContain("close"); + + release(); + await stopPromise; + expect(events).toContain("viewer-stop"); + expect(events[events.length - 1]).toBe("close"); + }); +}); diff --git a/apps/memos-local-openclaw/tests/task-processor.test.ts b/apps/memos-local-openclaw/tests/task-processor.test.ts index b23f32ec4..60e2cf4dc 100644 --- a/apps/memos-local-openclaw/tests/task-processor.test.ts +++ b/apps/memos-local-openclaw/tests/task-processor.test.ts @@ -74,6 +74,32 @@ afterEach(() => { }); describe("TaskProcessor", () => { + it("should drain queued onChunksIngested calls instead of dropping them while busy", async () => { + const calls: string[] = []; + let releaseFirst!: () => void; + const firstGate = new Promise((resolve) => { + releaseFirst = resolve; + }); + + const detectSpy = vi.spyOn(processor as any, "detectAndProcess").mockImplementation(async (sessionKey: string) => { + calls.push(sessionKey); + if (calls.length === 1) { + await firstGate; + } + }); + + const first = processor.onChunksIngested("s1", 1, "agent:main"); + await Promise.resolve(); + const second = processor.onChunksIngested("s2", 2, "agent:main"); + + expect(detectSpy).toHaveBeenCalledTimes(1); + + releaseFirst(); + await Promise.all([first, second]); + + expect(calls).toEqual(["s1", "s2"]); + }); + it("should create a new task when none exists", async () => { const now = Date.now(); insertTestChunk({ id: "c1", sessionKey: "s1", createdAt: now }); diff --git a/apps/memos-local-openclaw/tests/worker-lifecycle.test.ts b/apps/memos-local-openclaw/tests/worker-lifecycle.test.ts new file mode 100644 index 000000000..eb2bdde8a --- /dev/null +++ b/apps/memos-local-openclaw/tests/worker-lifecycle.test.ts @@ -0,0 +1,116 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { IngestWorker } from "../src/ingest/worker"; +import { SqliteStore } from "../src/storage/sqlite"; +import type { ConversationMessage, Logger, PluginContext } from "../src/types"; + +const noopLog: Logger = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +function makeCtx(tmpDir: string): PluginContext { + return { + stateDir: tmpDir, + workspaceDir: tmpDir, + config: { + storage: { dbPath: path.join(tmpDir, "test.db") }, + recall: { + maxResultsDefault: 6, + maxResultsMax: 20, + minScoreDefault: 0.45, + minScoreFloor: 0.35, + rrfK: 60, + mmrLambda: 0.7, + recencyHalfLifeDays: 14, + }, + }, + log: noopLog, + }; +} + +function makeMessage(id: string, sessionKey = "s1"): ConversationMessage { + return { + role: "user", + content: `message-${id}`, + timestamp: Date.now(), + turnId: `turn-${id}`, + sessionKey, + owner: "agent:main", + }; +} + +describe("IngestWorker lifecycle", () => { + let tmpDir: string; + let store: SqliteStore; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-worker-test-")); + store = new SqliteStore(path.join(tmpDir, "test.db"), noopLog); + }); + + afterEach(() => { + store.close(); + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it("flush should wait for task post-processing to finish", async () => { + const worker = new IngestWorker(store, { embed: vi.fn(), embedQuery: vi.fn() } as any, makeCtx(tmpDir)); + vi.spyOn(worker as any, "ingestMessage").mockResolvedValue({ action: "stored", summary: "ok" }); + + let release!: () => void; + const gate = new Promise((resolve) => { + release = resolve; + }); + + vi.spyOn(worker.getTaskProcessor(), "onChunksIngested").mockImplementation(async () => { + await gate; + }); + + worker.enqueue([makeMessage("1")]); + + let flushed = false; + const flushPromise = worker.flush().then(() => { + flushed = true; + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + expect(flushed).toBe(false); + + release(); + await flushPromise; + expect(flushed).toBe(true); + }); + + it("flush should not resolve while messages queued during task processing are still pending", async () => { + const worker = new IngestWorker(store, { embed: vi.fn(), embedQuery: vi.fn() } as any, makeCtx(tmpDir)); + const ingestSpy = vi.spyOn(worker as any, "ingestMessage").mockResolvedValue({ action: "stored", summary: "ok" }); + + let release!: () => void; + const gate = new Promise((resolve) => { + release = resolve; + }); + + let calls = 0; + vi.spyOn(worker.getTaskProcessor(), "onChunksIngested").mockImplementation(async () => { + calls += 1; + if (calls === 1) { + worker.enqueue([makeMessage("2")]); + await gate; + } + }); + + worker.enqueue([makeMessage("1")]); + const flushPromise = worker.flush(); + + setTimeout(() => release(), 0); + await flushPromise; + + expect(ingestSpy).toHaveBeenCalledTimes(2); + expect(calls).toBe(2); + }); +}); diff --git a/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md b/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md new file mode 100644 index 000000000..1f02b252b --- /dev/null +++ b/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md @@ -0,0 +1,344 @@ +# V4 Hub Sharing Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Deliver the v4 Hub-Spoke memory and skill sharing architecture in safe, testable phases without blocking existing local memory behavior. + +**Architecture:** Keep the current local memory plugin behavior intact, then layer in a centralized Hub mode, a Client connector, Hub-side shared search/indexing, and group/public sharing flows. Treat the OpenClaw default-model fallback as a separate capability track so platform uncertainty does not block the Hub MVP. + +**Tech Stack:** TypeScript, `better-sqlite3`, existing local `RecallEngine`, HTTP server routes, FTS5, vector embeddings, Viewer UI, Vitest. + +--- + +## Delivery Strategy + +- **Execution style:** 3 major workstreams in parallel where possible +- **Critical path:** `T1 → T2/T3 → T5 → T6 → T7 → T9/T11 → T13` +- **Do not block MVP on:** advanced Viewer polish, full admin ergonomics, OpenClaw default-model fallback +- **MVP definition:** Hub up, user join/approve works, group/public task sharing works, local+Hub search works, Hub skill publish/pull works, Hub outage degrades to local-only + +## Recommended Staffing + +- **Track A — Platform/Core:** config, schema, Hub server, auth, search +- **Track B — Client Flows:** connector, sync, federated search, tools +- **Track C — UX/Viewer:** onboarding, admin approval UI, scope switch, shared result views + +## Phase Schedule (Recommended) + +```mermaid +gantt + title V4 Hub-Spoke Development Schedule + dateFormat YYYY-MM-DD + axisFormat %m/%d + + section Foundation + T1 Types & config :a1, 2026-03-09, 3d + + section Parallel Base + T2 Hub schema & store :a2, after a1, 4d + T3 Hub server skeleton & auth :a3, after a1, 4d + T4 Default model fallback spike :a4, after a1, 3d + + section Core Search Path + T5 Hub search/index pipeline :a5, after a2, 5d + T6 Client connector & onboarding :a6, after a3, 4d + T7 Local + Hub search integration :a7, after a5, 4d + + section Sharing Features + T8 Task share & incremental sync :a8, after a6, 4d + T9 Search tool adaptation :a9, after a7, 3d + T10 Skill publish/pull via Hub :a10, after a6, 4d + + section Productization + T11 Tool registration & plugin wiring :a11, after a8, 3d + T12 Viewer/admin UX :a12, after a8, 5d + + section Hardening + T13 Integration tests & docs :a13, after a11, 5d +``` + +## Milestones + +| Milestone | Exit Criteria | Target | +|---|---|---| +| M1 Foundation Ready | Config resolves hub/client mode; schema and server skeleton compile | End of Week 1 | +| M2 Search Closed Loop | Hub can ingest shared task data and return filtered search results | Mid Week 2 | +| M3 Client Closed Loop | Client can join team, authenticate, search local + Hub, and degrade on Hub outage | End of Week 2 | +| M4 Sharing Closed Loop | Task share/unshare and skill publish/pull complete end-to-end | Mid Week 3 | +| M5 Product Ready | Viewer/admin flows, tests, docs, fallback behavior verified | End of Week 3 | + +## Task Graph + +```text +T1 Types & Config +├─ T2 Hub Schema & Store +├─ T3 Hub Server Skeleton & Auth +├─ T4 Default Model Fallback Spike +│ +├─ T5 Hub Search/Index Pipeline <- T2 + T3 +├─ T6 Client Connector & Onboarding <- T3 (+ T4 optional for fallback hookup) +├─ T7 Local + Hub Search Integration <- T5 + T6 +│ +├─ T8 Task Share & Incremental Sync <- T5 + T6 +├─ T9 Search Tool Adaptation <- T7 +├─ T10 Skill Publish/Pull via Hub <- T5 + T6 +│ +├─ T11 Tool Registration & Wiring <- T8 + T9 + T10 +├─ T12 Viewer/Admin UX <- T8 + T9 + T10 +└─ T13 Integration Tests & Docs <- T11 + T12 +``` + +## Work Breakdown + +### Task 1: Types and configuration foundation + +**Files:** +- Create: `apps/memos-local-openclaw/src/sharing/types.ts` +- Modify: `apps/memos-local-openclaw/src/types.ts` +- Modify: `apps/memos-local-openclaw/src/config.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Add Hub/Client mode config types +- Define `HubSearchHit`, `NetworkSearchResult`, `UserInfo`, `GroupInfo`, `SkillBundle` +- Define fallback capability flags instead of assuming OpenClaw APIs always exist + +**Done when:** +- Types compile +- Config parsing supports hub/client branches cleanly +- Existing local-only config remains backward compatible + +### Task 2: Hub schema and store layer + +**Files:** +- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` +- Test: `apps/memos-local-openclaw/tests/storage.test.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Add `hub_users`, `hub_groups`, `hub_group_members`, `hub_tasks`, `hub_chunks`, `hub_embeddings`, `hub_skills` +- Add uniqueness constraints for source IDs +- Add CRUD helpers for user approval, group membership, shared task/skill upsert, shared delete + +**Done when:** +- Repeated share requests are idempotent +- Group membership queries are fast and test-covered +- Existing local tables remain backward compatible + +### Task 3: Hub server skeleton and auth + +**Files:** +- Create: `apps/memos-local-openclaw/src/hub/server.ts` +- Create: `apps/memos-local-openclaw/src/hub/auth.ts` +- Create: `apps/memos-local-openclaw/src/hub/user-manager.ts` +- Modify: `apps/memos-local-openclaw/index.ts` +- Test: `apps/memos-local-openclaw/tests/plugin-impl-access.test.ts` + +**Deliverables:** +- Start/stop Hub HTTP server in hub mode +- Implement team-token join flow and JWT user-token verification +- Register `/hub/info`, `/hub/join`, `/hub/me`, `/hub/admin/*` skeleton routes +- Add rate limiting middleware + +**Done when:** +- Admin can bootstrap team +- Pending user can join and wait for approval +- Approved user receives valid token and blocked user is rejected + +### Task 4: Default-model fallback spike + +**Files:** +- Modify: `apps/memos-local-openclaw/src/embedding/index.ts` +- Modify: `apps/memos-local-openclaw/src/ingest/providers/index.ts` +- Modify: `apps/memos-local-openclaw/src/types.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Add an `openclaw` provider abstraction if host capabilities exist +- Detect host capability instead of assuming `api.embed()` / `api.complete()` +- Preserve current local/heuristic fallback as final safety net + +**Done when:** +- No explicit provider still works +- Missing host capability does not break startup +- Fallback chain is logged and testable + +### Task 5: Hub search and indexing pipeline + +**Files:** +- Create: `apps/memos-local-openclaw/src/hub/search.ts` +- Modify: `apps/memos-local-openclaw/src/hub/server.ts` +- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` +- Test: `apps/memos-local-openclaw/tests/recall.test.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Receive shared task/chunk payloads from clients +- Re-embed and FTS-index all shared chunks on Hub +- Filter by requester user groups and `public` +- Return `remoteHitId`-based Hub search results + +**Done when:** +- A shared task becomes searchable on Hub +- A user cannot see data from groups they do not belong to +- `memory-detail` honors permissions and hit expiry + +### Task 6: Client connector and onboarding + +**Files:** +- Create: `apps/memos-local-openclaw/src/client/connector.ts` +- Modify: `apps/memos-local-openclaw/src/viewer/server.ts` +- Modify: `apps/memos-local-openclaw/src/viewer/html.ts` +- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` +- Test: `apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts` + +**Deliverables:** +- Persist Hub connection state in `client_hub_connection` +- Implement join-team and create-team state machine +- Add connection health and reconnect handling +- Expose waiting-approved / active / rejected states to Viewer + +**Done when:** +- Fresh install can choose create-team or join-team +- Client survives Hub restart and reconnects +- Rejected client is visibly blocked from Hub actions + +### Task 7: Local + Hub search integration + +**Files:** +- Create: `apps/memos-local-openclaw/src/client/federated-search.ts` +- Modify: `apps/memos-local-openclaw/src/recall/engine.ts` +- Modify: `apps/memos-local-openclaw/src/types.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Execute local search and Hub search in parallel for `group/all` +- Return local and Hub results in separate sections +- Degrade to local-only when Hub is unavailable + +**Done when:** +- `scope=local` is unchanged +- `scope=group/all` returns stable two-section results +- Hub outage does not break the tool + +### Task 8: Task share and incremental sync + +**Files:** +- Create: `apps/memos-local-openclaw/src/client/sync.ts` +- Modify: `apps/memos-local-openclaw/index.ts` +- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` +- Test: `apps/memos-local-openclaw/tests/task-processor.test.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Implement `task_share` and `task_unshare` +- Push full task on first share, then incremental chunks on `agent_end` +- Track sync cursor or last-pushed chunk for idempotent uploads + +**Done when:** +- Shared task appears on Hub +- New chunks for shared task are pushed once +- Unshare removes data from Hub and stops future push + +### Task 9: Search tool adaptation + +**Files:** +- Modify: `apps/memos-local-openclaw/src/tools/memory-search.ts` +- Modify: `apps/memos-local-openclaw/index.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Add `scope: local | group | all` to `memory_search` +- Add Hub-aware formatting to `skill_search` +- Preserve current local tool UX for existing users + +**Done when:** +- Existing prompts still work unchanged +- New Hub scopes return intelligible, separable outputs + +### Task 10: Skill publish and pull via Hub + +**Files:** +- Modify: `apps/memos-local-openclaw/src/skill/installer.ts` +- Create: `apps/memos-local-openclaw/src/client/skill-sync.ts` +- Modify: `apps/memos-local-openclaw/index.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Publish full skill bundle to Hub with group/public scope +- Pull bundle from Hub with client-side safety validation +- Store provenance for pulled skills + +**Done when:** +- Group member can publish and another group member can pull +- Unauthorized user cannot pull group-restricted skill +- Malformed bundle is rejected atomically + +### Task 11: Tool registration and plugin wiring + +**Files:** +- Modify: `apps/memos-local-openclaw/index.ts` +- Test: `apps/memos-local-openclaw/tests/plugin-impl-access.test.ts` + +**Deliverables:** +- Register `task_share`, `task_unshare`, `network_memory_detail`, `network_skill_pull`, `network_team_info` +- Start Hub services in hub mode and connector in client mode +- Keep local-only mode intact + +**Done when:** +- Tool list changes by mode as intended +- Startup/shutdown lifecycle remains clean + +### Task 12: Viewer and admin UX + +**Files:** +- Modify: `apps/memos-local-openclaw/src/viewer/server.ts` +- Modify: `apps/memos-local-openclaw/src/viewer/html.ts` +- Test: `apps/memos-local-openclaw/tests/integration.test.ts` + +**Deliverables:** +- Hub-side admin approval and group management +- Client-side connection status and scope switch +- Shared search results with owner/group metadata +- Skill browser and pull actions + +**Done when:** +- Admin can approve users and manage groups in Viewer +- Client can see its state and accessible Hub content clearly + +### Task 13: Integration tests and docs + +**Files:** +- Modify: `apps/memos-local-openclaw/tests/integration.test.ts` +- Modify: `apps/memos-local-openclaw/tests/storage.test.ts` +- Modify: `apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts` +- Modify: `apps/memos-local-openclaw/README.md` + +**Deliverables:** +- End-to-end tests for join, approve, group isolation, task share, incremental sync, skill pull, Hub outage fallback, fallback model behavior +- README updates for hub/client setup and default model behavior + +**Done when:** +- MVP flow is test-covered end-to-end +- README is sufficient for a fresh teammate to run Hub and join as client + +## Release Recommendation + +- **MVP Cut:** T1–T11 complete, T12 basic UI only, T13 essential integration tests only +- **Post-MVP Cut:** advanced admin UX, analytics, team-token rotation UX polish, richer Hub browsing +- **Spike Before Coding:** verify whether OpenClaw host truly exposes embedding/completion APIs; if not, keep local/heuristic fallback as the documented default fallback path + +## Suggested Calendar + +| Week | Focus | Primary Owners | +|---|---|---| +| Week 1 | Foundation + Hub base (`T1-T4`) | Core + Platform | +| Week 2 | Hub search + connector + combined search (`T5-T7`) | Core + Client | +| Week 3 | Share/pull flows + tools + basic UI (`T8-T12`) | Client + UX | +| Week 4 | Hardening, test expansion, docs (`T13`) | Whole team | + +## Critical Path Notes + +- `T4` must not block Hub MVP unless OpenClaw fallback is a release requirement +- `T12` should not block API completion; ship a minimal admin UI first +- `T13` should prioritize permission isolation and outage fallback before UI polish From 9250d8d67bca8925108fb769e8bb14e3b1649d9a Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 12:41:52 +0800 Subject: [PATCH 02/85] feat(memos-local): add v4 sharing config foundation --- apps/memos-local-openclaw/src/config.ts | 17 ++++ .../src/sharing/types.contract.ts | 40 ++++++++++ .../memos-local-openclaw/src/sharing/types.ts | 77 +++++++++++++++++++ apps/memos-local-openclaw/src/types.ts | 27 +++++++ .../tests/integration.test.ts | 71 +++++++++++++++++ 5 files changed, 232 insertions(+) create mode 100644 apps/memos-local-openclaw/src/sharing/types.contract.ts create mode 100644 apps/memos-local-openclaw/src/sharing/types.ts diff --git a/apps/memos-local-openclaw/src/config.ts b/apps/memos-local-openclaw/src/config.ts index 64acc97e5..1399628d2 100644 --- a/apps/memos-local-openclaw/src/config.ts +++ b/apps/memos-local-openclaw/src/config.ts @@ -54,6 +54,23 @@ export function resolveConfig(raw: Partial | undefined, stateD posthogApiKey: cfg.telemetry?.posthogApiKey ?? process.env.POSTHOG_API_KEY ?? "", posthogHost: cfg.telemetry?.posthogHost ?? process.env.POSTHOG_HOST ?? "", }, + sharing: { + enabled: cfg.sharing?.enabled ?? false, + role: cfg.sharing?.role ?? "client", + hub: { + port: cfg.sharing?.hub?.port ?? 18800, + teamName: cfg.sharing?.hub?.teamName ?? "", + teamToken: cfg.sharing?.hub?.teamToken ?? "", + }, + client: { + hubAddress: cfg.sharing?.client?.hubAddress ?? "", + userToken: cfg.sharing?.client?.userToken ?? "", + }, + capabilities: { + hostEmbedding: cfg.sharing?.capabilities?.hostEmbedding ?? false, + hostCompletion: cfg.sharing?.capabilities?.hostCompletion ?? false, + }, + }, }; } diff --git a/apps/memos-local-openclaw/src/sharing/types.contract.ts b/apps/memos-local-openclaw/src/sharing/types.contract.ts new file mode 100644 index 000000000..8abef57ed --- /dev/null +++ b/apps/memos-local-openclaw/src/sharing/types.contract.ts @@ -0,0 +1,40 @@ +import type { + ClientModeConfig as RootClientModeConfig, + HubModeConfig as RootHubModeConfig, + SharingCapabilities as RootSharingCapabilities, + SharingConfig as RootSharingConfig, + SharingRole as RootSharingRole, +} from "../types"; +import type { + ClientModeConfig as SharingClientModeConfig, + GroupInfo, + HubModeConfig as SharingHubModeConfig, + HubSearchHit, + NetworkSearchResult, + SharingCapabilities as SharingSharingCapabilities, + SharingConfig as SharingSharingConfig, + SharingRole as SharingSharingRole, + SkillBundle, + UserInfo, +} from "./types"; + +type Assert = T; +type Equal = + (() => T extends A ? 1 : 2) extends (() => T extends B ? 1 : 2) + ? ((() => T extends B ? 1 : 2) extends (() => T extends A ? 1 : 2) ? true : false) + : false; +type Extends = A extends B ? true : false; + +type _SharingRoleMatchesRoot = Assert>; +type _SharingCapabilitiesMatchRoot = Assert>; +type _HubModeConfigMatchesRoot = Assert>; +type _ClientModeConfigMatchesRoot = Assert>; +type _SharingConfigMatchesRoot = Assert>; + +type _GroupInfoExists = Assert>; +type _UserInfoExists = Assert>; +type _HubSearchHitExists = Assert>; +type _NetworkSearchResultExists = Assert>; +type _SkillBundleExists = Assert>; + +export {}; diff --git a/apps/memos-local-openclaw/src/sharing/types.ts b/apps/memos-local-openclaw/src/sharing/types.ts new file mode 100644 index 000000000..7c28ce45a --- /dev/null +++ b/apps/memos-local-openclaw/src/sharing/types.ts @@ -0,0 +1,77 @@ +import type { + ClientModeConfig, + HubModeConfig, + Role, + SearchResult, + SharingCapabilities, + SharingConfig, + SharingRole, + SkillGenerateOutput, +} from "../types"; + +export type HubScope = "local" | "group" | "all"; +export type SharedVisibility = "group" | "public"; +export type UserRole = "admin" | "member"; +export type UserStatus = "pending" | "active" | "blocked" | "rejected"; + +export type { ClientModeConfig, HubModeConfig, SharingCapabilities, SharingConfig, SharingRole }; + +export interface GroupInfo { + id: string; + name: string; + description?: string; +} + +export interface UserInfo { + id: string; + username: string; + deviceName?: string; + role: UserRole; + status: UserStatus; + groups: GroupInfo[]; +} + +export interface HubSearchHit { + remoteHitId: string; + summary: string; + excerpt: string; + hubRank: number; + taskTitle: string | null; + ownerName: string; + groupName: string | null; + visibility: SharedVisibility; + source: { + ts: number; + role: Role; + }; +} + +export interface HubSearchMeta { + totalCandidates: number; + searchedGroups: string[]; + includedPublic: boolean; +} + +export interface HubSearchResult { + hits: HubSearchHit[]; + meta: HubSearchMeta; +} + +export interface NetworkSearchResult { + local: SearchResult; + hub: HubSearchResult; +} + +export interface SkillBundleMetadata { + id: string; + name: string; + description: string; + version: number; + qualityScore: number | null; +} + +export interface SkillBundle { + metadata: SkillBundleMetadata; + bundle: SkillGenerateOutput; +} + diff --git a/apps/memos-local-openclaw/src/types.ts b/apps/memos-local-openclaw/src/types.ts index aac7f6e5f..b759707e7 100644 --- a/apps/memos-local-openclaw/src/types.ts +++ b/apps/memos-local-openclaw/src/types.ts @@ -255,6 +255,32 @@ export interface TelemetryConfig { posthogHost?: string; } +export type SharingRole = "hub" | "client"; + +export interface SharingCapabilities { + hostEmbedding?: boolean; + hostCompletion?: boolean; +} + +export interface HubModeConfig { + port?: number; + teamName?: string; + teamToken?: string; +} + +export interface ClientModeConfig { + hubAddress?: string; + userToken?: string; +} + +export interface SharingConfig { + enabled?: boolean; + role?: SharingRole; + hub?: HubModeConfig; + client?: ClientModeConfig; + capabilities?: SharingCapabilities; +} + export interface MemosLocalConfig { summarizer?: SummarizerConfig; embedding?: EmbeddingConfig; @@ -280,6 +306,7 @@ export interface MemosLocalConfig { }; skillEvolution?: SkillEvolutionConfig; telemetry?: TelemetryConfig; + sharing?: SharingConfig; } // ─── Defaults ─── diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 6cd6b0b93..d70ef9e27 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -3,6 +3,7 @@ import * as fs from "fs"; import * as path from "path"; import * as os from "os"; import { initPlugin, type MemosLocalPlugin } from "../src/index"; +import { buildContext, resolveConfig } from "../src/config"; let plugin: MemosLocalPlugin; let tmpDir: string; @@ -65,6 +66,76 @@ afterAll(() => { fs.rmSync(tmpDir, { recursive: true, force: true }); }); +describe("Integration: v4 types and config foundation", () => { + it("should keep local-only config backward compatible while adding sharing defaults", () => { + const stateDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-config-")); + + const resolved = resolveConfig(undefined, stateDir) as any; + + expect(resolved.storage.dbPath).toContain("memos-local"); + expect(resolved.recall.maxResultsDefault).toBe(6); + expect(resolved.sharing).toBeDefined(); + expect(resolved.sharing.enabled).toBe(false); + expect(resolved.sharing.role).toBe("client"); + expect(resolved.sharing.hub.port).toBe(18800); + expect(resolved.sharing.client.hubAddress).toBe(""); + expect(resolved.sharing.capabilities.hostEmbedding).toBe(false); + expect(resolved.sharing.capabilities.hostCompletion).toBe(false); + + fs.rmSync(stateDir, { recursive: true, force: true }); + }); + + it("should resolve sharing env vars and expose hub/client config via buildContext", () => { + const stateDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-context-")); + const prevTeamToken = process.env.MEMOS_TEAM_TOKEN; + const prevUserToken = process.env.MEMOS_USER_TOKEN; + + process.env.MEMOS_TEAM_TOKEN = "team-secret"; + process.env.MEMOS_USER_TOKEN = "user-secret"; + + try { + const ctx = buildContext( + stateDir, + process.cwd(), + { + sharing: { + enabled: true, + role: "hub", + hub: { + port: 19001, + teamName: "Core Team", + teamToken: "${MEMOS_TEAM_TOKEN}", + }, + client: { + hubAddress: "10.0.0.8:18800", + userToken: "${MEMOS_USER_TOKEN}", + }, + capabilities: { + hostEmbedding: true, + hostCompletion: true, + }, + }, + } as any, + ) as any; + + expect(ctx.config.sharing.enabled).toBe(true); + expect(ctx.config.sharing.role).toBe("hub"); + expect(ctx.config.sharing.hub.teamToken).toBe("team-secret"); + expect(ctx.config.sharing.client.userToken).toBe("user-secret"); + expect(ctx.config.sharing.capabilities.hostEmbedding).toBe(true); + expect(ctx.config.sharing.capabilities.hostCompletion).toBe(true); + } finally { + if (prevTeamToken === undefined) delete process.env.MEMOS_TEAM_TOKEN; + else process.env.MEMOS_TEAM_TOKEN = prevTeamToken; + + if (prevUserToken === undefined) delete process.env.MEMOS_USER_TOKEN; + else process.env.MEMOS_USER_TOKEN = prevUserToken; + + fs.rmSync(stateDir, { recursive: true, force: true }); + } + }); +}); + describe("Integration: memory_search", () => { it("should find docker deployment details", async () => { const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; From bfde1810894d182ada2da1cfb29b9dcdef9e253e Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 13:50:56 +0800 Subject: [PATCH 03/85] feat(memos-local): add hub storage foundation --- .../src/storage/sqlite.ts | 593 ++++++++++++++++++ .../tests/storage.test.ts | 353 +++++++++++ 2 files changed, 946 insertions(+) diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index e3a4354da..0031faf1e 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -3,6 +3,7 @@ import { createHash } from "crypto"; import * as fs from "fs"; import * as path from "path"; import type { Chunk, ChunkRef, DedupStatus, Task, TaskStatus, Skill, SkillStatus, SkillVisibility, SkillVersion, TaskSkillLink, TaskSkillRelation, Logger } from "../types"; +import type { GroupInfo, SharedVisibility, UserInfo, UserRole, UserStatus } from "../sharing/types"; export class SqliteStore { private db: Database.Database; @@ -109,6 +110,7 @@ export class SqliteStore { this.migrateOwnerFields(); this.migrateSkillVisibility(); this.migrateSkillEmbeddingsAndFts(); + this.migrateHubTables(); this.log.debug("Database schema initialized"); } @@ -584,6 +586,160 @@ export class SqliteStore { }; } + + private migrateHubTables(): void { + this.db.exec(` + CREATE TABLE IF NOT EXISTS client_hub_connection ( + id INTEGER PRIMARY KEY CHECK (id = 1), + hub_url TEXT NOT NULL, + user_id TEXT NOT NULL, + username TEXT NOT NULL, + user_token TEXT NOT NULL, + role TEXT NOT NULL, + connected_at INTEGER NOT NULL + ); + + CREATE TABLE IF NOT EXISTS hub_users ( + id TEXT PRIMARY KEY, + username TEXT NOT NULL UNIQUE, + device_name TEXT NOT NULL DEFAULT '', + role TEXT NOT NULL, + status TEXT NOT NULL, + token_hash TEXT NOT NULL DEFAULT '', + created_at INTEGER NOT NULL, + approved_at INTEGER + ); + CREATE INDEX IF NOT EXISTS idx_hub_users_status ON hub_users(status); + CREATE INDEX IF NOT EXISTS idx_hub_users_role ON hub_users(role); + + CREATE TABLE IF NOT EXISTS hub_groups ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + description TEXT NOT NULL DEFAULT '', + created_at INTEGER NOT NULL + ); + + CREATE TABLE IF NOT EXISTS hub_group_members ( + group_id TEXT NOT NULL REFERENCES hub_groups(id) ON DELETE CASCADE, + user_id TEXT NOT NULL REFERENCES hub_users(id) ON DELETE CASCADE, + joined_at INTEGER NOT NULL, + PRIMARY KEY (group_id, user_id) + ); + CREATE INDEX IF NOT EXISTS idx_hub_group_members_user ON hub_group_members(user_id); + + CREATE TABLE IF NOT EXISTS hub_tasks ( + id TEXT PRIMARY KEY, + source_task_id TEXT NOT NULL, + source_user_id TEXT NOT NULL, + title TEXT NOT NULL, + summary TEXT NOT NULL DEFAULT '', + group_id TEXT, + visibility TEXT NOT NULL, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + UNIQUE(source_user_id, source_task_id) + ); + CREATE INDEX IF NOT EXISTS idx_hub_tasks_visibility ON hub_tasks(visibility); + CREATE INDEX IF NOT EXISTS idx_hub_tasks_group ON hub_tasks(group_id); + + CREATE TABLE IF NOT EXISTS hub_chunks ( + id TEXT PRIMARY KEY, + hub_task_id TEXT NOT NULL REFERENCES hub_tasks(id) ON DELETE CASCADE, + source_chunk_id TEXT NOT NULL, + source_user_id TEXT NOT NULL, + role TEXT NOT NULL, + content TEXT NOT NULL, + summary TEXT NOT NULL DEFAULT '', + kind TEXT NOT NULL DEFAULT 'paragraph', + created_at INTEGER NOT NULL, + UNIQUE(source_user_id, source_chunk_id) + ); + CREATE INDEX IF NOT EXISTS idx_hub_chunks_task ON hub_chunks(hub_task_id); + + CREATE TABLE IF NOT EXISTS hub_embeddings ( + chunk_id TEXT PRIMARY KEY REFERENCES hub_chunks(id) ON DELETE CASCADE, + vector BLOB NOT NULL, + dimensions INTEGER NOT NULL, + updated_at INTEGER NOT NULL + ); + + CREATE VIRTUAL TABLE IF NOT EXISTS hub_chunks_fts USING fts5( + summary, + content, + content='hub_chunks', + content_rowid='rowid', + tokenize='porter unicode61' + ); + + CREATE TRIGGER IF NOT EXISTS hub_chunks_ai AFTER INSERT ON hub_chunks BEGIN + INSERT INTO hub_chunks_fts(rowid, summary, content) + VALUES (new.rowid, new.summary, new.content); + END; + + CREATE TRIGGER IF NOT EXISTS hub_chunks_ad AFTER DELETE ON hub_chunks BEGIN + INSERT INTO hub_chunks_fts(hub_chunks_fts, rowid, summary, content) + VALUES ('delete', old.rowid, old.summary, old.content); + END; + + CREATE TRIGGER IF NOT EXISTS hub_chunks_au AFTER UPDATE ON hub_chunks BEGIN + INSERT INTO hub_chunks_fts(hub_chunks_fts, rowid, summary, content) + VALUES ('delete', old.rowid, old.summary, old.content); + INSERT INTO hub_chunks_fts(rowid, summary, content) + VALUES (new.rowid, new.summary, new.content); + END; + + CREATE TABLE IF NOT EXISTS hub_skills ( + id TEXT PRIMARY KEY, + source_skill_id TEXT NOT NULL, + source_user_id TEXT NOT NULL, + name TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + version INTEGER NOT NULL, + group_id TEXT, + visibility TEXT NOT NULL, + bundle TEXT NOT NULL, + quality_score REAL, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + UNIQUE(source_user_id, source_skill_id) + ); + CREATE INDEX IF NOT EXISTS idx_hub_skills_visibility ON hub_skills(visibility); + CREATE INDEX IF NOT EXISTS idx_hub_skills_group ON hub_skills(group_id); + + CREATE TABLE IF NOT EXISTS hub_skill_embeddings ( + skill_id TEXT PRIMARY KEY REFERENCES hub_skills(id) ON DELETE CASCADE, + vector BLOB NOT NULL, + dimensions INTEGER NOT NULL, + updated_at INTEGER NOT NULL + ); + + CREATE VIRTUAL TABLE IF NOT EXISTS hub_skills_fts USING fts5( + name, + description, + content='hub_skills', + content_rowid='rowid', + tokenize='porter unicode61' + ); + + CREATE TRIGGER IF NOT EXISTS hub_skills_ai AFTER INSERT ON hub_skills BEGIN + INSERT INTO hub_skills_fts(rowid, name, description) + VALUES (new.rowid, new.name, new.description); + END; + + CREATE TRIGGER IF NOT EXISTS hub_skills_ad AFTER DELETE ON hub_skills BEGIN + INSERT INTO hub_skills_fts(hub_skills_fts, rowid, name, description) + VALUES ('delete', old.rowid, old.name, old.description); + END; + + CREATE TRIGGER IF NOT EXISTS hub_skills_au AFTER UPDATE ON hub_skills BEGIN + INSERT INTO hub_skills_fts(hub_skills_fts, rowid, name, description) + VALUES ('delete', old.rowid, old.name, old.description); + INSERT INTO hub_skills_fts(rowid, name, description) + VALUES (new.rowid, new.name, new.description); + END; + `); + } + // ─── Write ─── insertChunk(chunk: Chunk): void { @@ -1232,6 +1388,222 @@ export class SqliteStore { .map(r => r.session_key); } + // ─── Hub / Client connection ─── + + setClientHubConnection(conn: ClientHubConnection): void { + this.db.prepare(` + INSERT INTO client_hub_connection (id, hub_url, user_id, username, user_token, role, connected_at) + VALUES (1, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + hub_url = excluded.hub_url, + user_id = excluded.user_id, + username = excluded.username, + user_token = excluded.user_token, + role = excluded.role, + connected_at = excluded.connected_at + `).run(conn.hubUrl, conn.userId, conn.username, conn.userToken, conn.role, conn.connectedAt); + } + + getClientHubConnection(): ClientHubConnection | null { + const row = this.db.prepare('SELECT * FROM client_hub_connection WHERE id = 1').get() as ClientHubConnectionRow | undefined; + return row ? rowToClientHubConnection(row) : null; + } + + clearClientHubConnection(): void { + this.db.prepare('DELETE FROM client_hub_connection WHERE id = 1').run(); + } + + // ─── Hub Users / Groups ─── + + upsertHubUser(user: HubUserRecord): void { + this.db.prepare(` + INSERT INTO hub_users (id, username, device_name, role, status, token_hash, created_at, approved_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + username = excluded.username, + device_name = excluded.device_name, + role = excluded.role, + status = excluded.status, + token_hash = excluded.token_hash, + created_at = excluded.created_at, + approved_at = excluded.approved_at + `).run(user.id, user.username, user.deviceName ?? "", user.role, user.status, user.tokenHash, user.createdAt, user.approvedAt); + } + + getHubUser(userId: string): HubUserRecord | null { + const row = this.db.prepare('SELECT * FROM hub_users WHERE id = ?').get(userId) as HubUserRow | undefined; + if (!row) return null; + return this.attachGroupsToHubUser(rowToHubUser(row)); + } + + listHubUsers(status?: UserStatus): HubUserRecord[] { + const rows = status + ? this.db.prepare('SELECT * FROM hub_users WHERE status = ? ORDER BY created_at').all(status) as HubUserRow[] + : this.db.prepare('SELECT * FROM hub_users ORDER BY created_at').all() as HubUserRow[]; + return rows.map((row) => this.attachGroupsToHubUser(rowToHubUser(row))); + } + + upsertHubGroup(group: HubGroupRecord): void { + this.db.prepare(` + INSERT INTO hub_groups (id, name, description, created_at) + VALUES (?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + name = excluded.name, + description = excluded.description, + created_at = excluded.created_at + `).run(group.id, group.name, group.description, group.createdAt); + } + + listHubGroups(): HubGroupRecord[] { + const rows = this.db.prepare('SELECT * FROM hub_groups ORDER BY name').all() as HubGroupRow[]; + return rows.map(rowToHubGroup); + } + + addHubGroupMember(groupId: string, userId: string, joinedAt = Date.now()): void { + this.db.prepare(` + INSERT INTO hub_group_members (group_id, user_id, joined_at) + VALUES (?, ?, ?) + ON CONFLICT(group_id, user_id) DO UPDATE SET joined_at = excluded.joined_at + `).run(groupId, userId, joinedAt); + } + + removeHubGroupMember(groupId: string, userId: string): void { + this.db.prepare('DELETE FROM hub_group_members WHERE group_id = ? AND user_id = ?').run(groupId, userId); + } + + getGroupsForHubUser(userId: string): GroupInfo[] { + const rows = this.db.prepare(` + SELECT g.* + FROM hub_group_members gm + JOIN hub_groups g ON g.id = gm.group_id + WHERE gm.user_id = ? + ORDER BY g.name + `).all(userId) as HubGroupRow[]; + return rows.map((row) => ({ id: row.id, name: row.name, description: row.description || undefined })); + } + + // ─── Hub Shared Data ─── + + upsertHubTask(task: HubTaskRecord): void { + this.db.prepare(` + INSERT INTO hub_tasks (id, source_task_id, source_user_id, title, summary, group_id, visibility, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(source_user_id, source_task_id) DO UPDATE SET + title = excluded.title, + summary = excluded.summary, + group_id = excluded.group_id, + visibility = excluded.visibility, + created_at = excluded.created_at, + updated_at = excluded.updated_at + `).run(task.id, task.sourceTaskId, task.sourceUserId, task.title, task.summary, task.groupId, task.visibility, task.createdAt, task.updatedAt); + } + + getHubTaskBySource(sourceUserId: string, sourceTaskId: string): HubTaskRecord | null { + const row = this.db.prepare('SELECT * FROM hub_tasks WHERE source_user_id = ? AND source_task_id = ?').get(sourceUserId, sourceTaskId) as HubTaskRow | undefined; + return row ? rowToHubTask(row) : null; + } + + upsertHubChunk(chunk: HubChunkUpsertInput): void { + if (!chunk.sourceTaskId) throw new Error("sourceTaskId is required for hub chunk upserts"); + const taskId = this.resolveCanonicalHubTaskId(chunk.hubTaskId, chunk.sourceUserId, chunk.sourceTaskId); + this.db.prepare(` + INSERT INTO hub_chunks (id, hub_task_id, source_chunk_id, source_user_id, role, content, summary, kind, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(source_user_id, source_chunk_id) DO UPDATE SET + hub_task_id = excluded.hub_task_id, + role = excluded.role, + content = excluded.content, + summary = excluded.summary, + kind = excluded.kind, + created_at = excluded.created_at + `).run(chunk.id, taskId, chunk.sourceChunkId, chunk.sourceUserId, chunk.role, chunk.content, chunk.summary, chunk.kind, chunk.createdAt); + } + + getHubChunkBySource(sourceUserId: string, sourceChunkId: string): HubChunkRecord | null { + const row = this.db.prepare('SELECT * FROM hub_chunks WHERE source_user_id = ? AND source_chunk_id = ?').get(sourceUserId, sourceChunkId) as HubChunkRow | undefined; + return row ? rowToHubChunk(row) : null; + } + + deleteHubTaskBySource(sourceUserId: string, sourceTaskId: string): void { + this.db.prepare('DELETE FROM hub_tasks WHERE source_user_id = ? AND source_task_id = ?').run(sourceUserId, sourceTaskId); + } + + upsertHubSkill(skill: HubSkillRecord): void { + this.db.prepare(` + INSERT INTO hub_skills (id, source_skill_id, source_user_id, name, description, version, group_id, visibility, bundle, quality_score, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(source_user_id, source_skill_id) DO UPDATE SET + name = excluded.name, + description = excluded.description, + version = excluded.version, + group_id = excluded.group_id, + visibility = excluded.visibility, + bundle = excluded.bundle, + quality_score = excluded.quality_score, + created_at = excluded.created_at, + updated_at = excluded.updated_at + `).run(skill.id, skill.sourceSkillId, skill.sourceUserId, skill.name, skill.description, skill.version, skill.groupId, skill.visibility, skill.bundle, skill.qualityScore, skill.createdAt, skill.updatedAt); + } + + getHubSkillBySource(sourceUserId: string, sourceSkillId: string): HubSkillRecord | null { + const row = this.db.prepare('SELECT * FROM hub_skills WHERE source_user_id = ? AND source_skill_id = ?').get(sourceUserId, sourceSkillId) as HubSkillRow | undefined; + return row ? rowToHubSkill(row) : null; + } + + upsertHubSkillEmbedding(skillId: string, vector: number[], sourceUserId: string, sourceSkillId: string): void { + if (!sourceUserId || !sourceSkillId) throw new Error("sourceUserId and sourceSkillId are required for hub skill embedding upserts"); + const canonicalSkillId = this.resolveCanonicalHubSkillId(skillId, sourceUserId, sourceSkillId); + const buf = Buffer.allocUnsafe(vector.length * 4); + for (let i = 0; i < vector.length; i++) buf.writeFloatLE(vector[i], i * 4); + this.db.prepare(` + INSERT INTO hub_skill_embeddings (skill_id, vector, dimensions, updated_at) + VALUES (?, ?, ?, ?) + ON CONFLICT(skill_id) DO UPDATE SET + vector = excluded.vector, + dimensions = excluded.dimensions, + updated_at = excluded.updated_at + `).run(canonicalSkillId, buf, vector.length, Date.now()); + } + + getHubSkillEmbedding(skillId: string): number[] | null { + const row = this.db.prepare('SELECT vector, dimensions FROM hub_skill_embeddings WHERE skill_id = ?').get(skillId) as { vector: Buffer; dimensions: number } | undefined; + if (!row) return null; + const out: number[] = []; + for (let i = 0; i < row.dimensions; i++) out.push(row.vector.readFloatLE(i * 4)); + return out; + } + + deleteHubSkillBySource(sourceUserId: string, sourceSkillId: string): void { + this.db.prepare('DELETE FROM hub_skills WHERE source_user_id = ? AND source_skill_id = ?').run(sourceUserId, sourceSkillId); + } + + private resolveCanonicalHubTaskId(taskId: string, sourceUserId: string, sourceTaskId?: string): string { + if (sourceTaskId) { + const bySource = this.db.prepare('SELECT id FROM hub_tasks WHERE source_user_id = ? AND source_task_id = ?').get(sourceUserId, sourceTaskId) as { id: string } | undefined; + if (!bySource) throw new Error(`source task not found for user=${sourceUserId} sourceTaskId=${sourceTaskId}`); + if (bySource.id != taskId) throw new Error(`mismatch between source task and hubTaskId: expected ${bySource.id}, got ${taskId}`); + return bySource.id; + } + throw new Error(`source task not found for user=${sourceUserId} taskId=${taskId}`); + } + + private resolveCanonicalHubSkillId(skillId: string, sourceUserId?: string, sourceSkillId?: string): string { + if (sourceUserId && sourceSkillId) { + const bySource = this.db.prepare('SELECT id FROM hub_skills WHERE source_user_id = ? AND source_skill_id = ?').get(sourceUserId, sourceSkillId) as { id: string } | undefined; + if (!bySource) throw new Error(`source skill not found for user=${sourceUserId} sourceSkillId=${sourceSkillId}`); + if (bySource.id != skillId) throw new Error(`mismatch between source skill and skillId: expected ${bySource.id}, got ${skillId}`); + return bySource.id; + } + throw new Error(`source skill not found for skillId=${skillId}`); + } + + private attachGroupsToHubUser(user: HubUserRecord): HubUserRecord { + return { + ...user, + groups: this.getGroupsForHubUser(user.id), + }; + } + close(): void { this.db.close(); } @@ -1398,6 +1770,227 @@ function rowToSkillVersion(row: SkillVersionRow): SkillVersion { }; } + +interface ClientHubConnection { + hubUrl: string; + userId: string; + username: string; + userToken: string; + role: UserRole; + connectedAt: number; +} + +interface ClientHubConnectionRow { + hub_url: string; + user_id: string; + username: string; + user_token: string; + role: string; + connected_at: number; +} + +function rowToClientHubConnection(row: ClientHubConnectionRow): ClientHubConnection { + return { + hubUrl: row.hub_url, + userId: row.user_id, + username: row.username, + userToken: row.user_token, + role: row.role as UserRole, + connectedAt: row.connected_at, + }; +} + +interface HubUserRecord extends UserInfo { + tokenHash: string; + createdAt: number; + approvedAt: number | null; +} + +interface HubUserRow { + id: string; + username: string; + device_name: string; + role: string; + status: string; + token_hash: string; + created_at: number; + approved_at: number | null; +} + +function rowToHubUser(row: HubUserRow): HubUserRecord { + return { + id: row.id, + username: row.username, + deviceName: row.device_name || undefined, + role: row.role as UserRole, + status: row.status as UserStatus, + groups: [], + tokenHash: row.token_hash, + createdAt: row.created_at, + approvedAt: row.approved_at, + }; +} + +interface HubGroupRecord { + id: string; + name: string; + description: string; + createdAt: number; +} + +interface HubGroupRow { + id: string; + name: string; + description: string; + created_at: number; +} + +function rowToHubGroup(row: HubGroupRow): HubGroupRecord { + return { + id: row.id, + name: row.name, + description: row.description, + createdAt: row.created_at, + }; +} + +interface HubTaskRecord { + id: string; + sourceTaskId: string; + sourceUserId: string; + title: string; + summary: string; + groupId: string | null; + visibility: SharedVisibility; + createdAt: number; + updatedAt: number; +} + +interface HubTaskRow { + id: string; + source_task_id: string; + source_user_id: string; + title: string; + summary: string; + group_id: string | null; + visibility: string; + created_at: number; + updated_at: number; +} + +function rowToHubTask(row: HubTaskRow): HubTaskRecord { + return { + id: row.id, + sourceTaskId: row.source_task_id, + sourceUserId: row.source_user_id, + title: row.title, + summary: row.summary, + groupId: row.group_id, + visibility: row.visibility as SharedVisibility, + createdAt: row.created_at, + updatedAt: row.updated_at, + }; +} + +interface HubChunkUpsertInput { + id: string; + hubTaskId: string; + sourceTaskId: string; + sourceChunkId: string; + sourceUserId: string; + role: Chunk["role"]; + content: string; + summary: string; + kind: Chunk["kind"]; + createdAt: number; +} + +interface HubChunkRecord { + id: string; + hubTaskId: string; + sourceChunkId: string; + sourceUserId: string; + role: Chunk["role"]; + content: string; + summary: string; + kind: Chunk["kind"]; + createdAt: number; +} + +interface HubChunkRow { + id: string; + hub_task_id: string; + source_chunk_id: string; + source_user_id: string; + role: string; + content: string; + summary: string; + kind: string; + created_at: number; +} + +function rowToHubChunk(row: HubChunkRow): HubChunkRecord { + return { + id: row.id, + hubTaskId: row.hub_task_id, + sourceChunkId: row.source_chunk_id, + sourceUserId: row.source_user_id, + role: row.role as Chunk["role"], + content: row.content, + summary: row.summary, + kind: row.kind as Chunk["kind"], + createdAt: row.created_at, + }; +} + +interface HubSkillRecord { + id: string; + sourceSkillId: string; + sourceUserId: string; + name: string; + description: string; + version: number; + groupId: string | null; + visibility: SharedVisibility; + bundle: string; + qualityScore: number | null; + createdAt: number; + updatedAt: number; +} + +interface HubSkillRow { + id: string; + source_skill_id: string; + source_user_id: string; + name: string; + description: string; + version: number; + group_id: string | null; + visibility: string; + bundle: string; + quality_score: number | null; + created_at: number; + updated_at: number; +} + +function rowToHubSkill(row: HubSkillRow): HubSkillRecord { + return { + id: row.id, + sourceSkillId: row.source_skill_id, + sourceUserId: row.source_user_id, + name: row.name, + description: row.description, + version: row.version, + groupId: row.group_id, + visibility: row.visibility as SharedVisibility, + bundle: row.bundle, + qualityScore: row.quality_score, + createdAt: row.created_at, + updatedAt: row.updated_at, + }; +} + + function contentHash(content: string): string { return createHash("sha256").update(content).digest("hex").slice(0, 16); } diff --git a/apps/memos-local-openclaw/tests/storage.test.ts b/apps/memos-local-openclaw/tests/storage.test.ts index fa919e0d8..f21d1d929 100644 --- a/apps/memos-local-openclaw/tests/storage.test.ts +++ b/apps/memos-local-openclaw/tests/storage.test.ts @@ -131,6 +131,359 @@ describe("SqliteStore", () => { }); }); +describe("SqliteStore hub sharing schema", () => { + it("should persist a single client hub connection record", () => { + store.setClientHubConnection({ + hubUrl: "http://127.0.0.1:18800", + userId: "user-1", + username: "alice", + userToken: "token-1", + role: "admin", + connectedAt: 123, + }); + + const saved = store.getClientHubConnection(); + expect(saved).toMatchObject({ + hubUrl: "http://127.0.0.1:18800", + userId: "user-1", + username: "alice", + userToken: "token-1", + role: "admin", + connectedAt: 123, + }); + + store.setClientHubConnection({ + hubUrl: "http://192.168.1.8:18800", + userId: "user-2", + username: "bob", + userToken: "token-2", + role: "member", + connectedAt: 456, + }); + + const updated = store.getClientHubConnection(); + expect(updated).toMatchObject({ + hubUrl: "http://192.168.1.8:18800", + userId: "user-2", + username: "bob", + userToken: "token-2", + role: "member", + connectedAt: 456, + }); + }); + + it("should store hub users, groups, and memberships", () => { + store.upsertHubUser({ + id: "user-1", + username: "alice", + deviceName: "Alice Mac", + role: "admin", + status: "active", + tokenHash: "hash-1", + createdAt: 100, + approvedAt: 110, + }); + store.upsertHubUser({ + id: "user-2", + username: "bob", + deviceName: "Bob Mac", + role: "member", + status: "pending", + tokenHash: "hash-2", + createdAt: 200, + approvedAt: null, + }); + store.upsertHubGroup({ + id: "group-1", + name: "Backend", + description: "backend team", + createdAt: 300, + }); + store.addHubGroupMember("group-1", "user-1", 400); + + const pending = store.listHubUsers("pending"); + expect(pending).toHaveLength(1); + expect(pending[0].username).toBe("bob"); + expect(pending[0].groups).toEqual([]); + + const groups = store.getGroupsForHubUser("user-1"); + expect(groups).toHaveLength(1); + expect(groups[0].name).toBe("Backend"); + + const alice = store.getHubUser("user-1"); + expect(alice).not.toBeNull(); + expect(alice!.groups).toHaveLength(1); + expect(alice!.groups[0].name).toBe("Backend"); + + const users = store.listHubUsers(); + const aliceFromList = users.find((user) => user.id === "user-1"); + expect(aliceFromList).toBeDefined(); + expect(aliceFromList!.groups).toHaveLength(1); + expect(aliceFromList!.groups[0].name).toBe("Backend"); + }); + + it("should allow hub users without a device name", () => { + store.upsertHubUser({ + id: "user-3", + username: "carol", + role: "member", + status: "active", + tokenHash: "hash-3", + createdAt: 300, + approvedAt: 310, + groups: [], + }); + + const user = store.getHubUser("user-3"); + expect(user).not.toBeNull(); + expect(user!.username).toBe("carol"); + expect(user!.deviceName).toBeUndefined(); + }); + + it("should upsert shared hub records idempotently by source ids", () => { + store.upsertHubTask({ + id: "hub-task-1", + sourceTaskId: "task-1", + sourceUserId: "user-1", + title: "Deploy API", + summary: "deploy summary", + groupId: "group-1", + visibility: "group", + createdAt: 1000, + updatedAt: 1000, + }); + store.upsertHubChunk({ + id: "hub-chunk-1", + hubTaskId: "hub-task-1", + sourceTaskId: "task-1", + sourceChunkId: "chunk-1", + sourceUserId: "user-1", + role: "assistant", + content: "deploy content", + summary: "deploy summary", + kind: "paragraph", + createdAt: 1100, + }); + store.upsertHubTask({ + id: "hub-task-2", + sourceTaskId: "task-1", + sourceUserId: "user-1", + title: "Deploy API v2", + summary: "updated summary", + groupId: "group-1", + visibility: "group", + createdAt: 1000, + updatedAt: 2000, + }); + + const task = store.getHubTaskBySource("user-1", "task-1"); + expect(task).not.toBeNull(); + expect(task!.id).toBe("hub-task-1"); + expect(task!.title).toBe("Deploy API v2"); + + const chunk = store.getHubChunkBySource("user-1", "chunk-1"); + expect(chunk).not.toBeNull(); + expect(chunk!.hubTaskId).toBe("hub-task-1"); + + store.upsertHubChunk({ + id: "hub-chunk-2", + hubTaskId: "hub-task-1", + sourceTaskId: "task-1", + sourceChunkId: "chunk-2", + sourceUserId: "user-1", + role: "assistant", + content: "deploy content 2", + summary: "deploy summary 2", + kind: "paragraph", + createdAt: 1200, + }); + + const remappedChunk = store.getHubChunkBySource("user-1", "chunk-2"); + expect(remappedChunk).not.toBeNull(); + expect(remappedChunk!.hubTaskId).toBe("hub-task-1"); + + store.upsertHubSkill({ + id: "hub-skill-1", + sourceSkillId: "skill-1", + sourceUserId: "user-1", + name: "deploy-skill", + description: "first description", + version: 1, + groupId: null, + visibility: "public", + bundle: '{"skill_md":"# deploy"}', + qualityScore: 0.8, + createdAt: 100, + updatedAt: 100, + }); + store.upsertHubSkillEmbedding("hub-skill-1", [0.1, 0.2, 0.3], "user-1", "skill-1"); + store.upsertHubSkill({ + id: "hub-skill-2", + sourceSkillId: "skill-1", + sourceUserId: "user-1", + name: "deploy-skill", + description: "updated description", + version: 2, + groupId: null, + visibility: "public", + bundle: '{"skill_md":"# deploy v2"}', + qualityScore: 0.9, + createdAt: 100, + updatedAt: 200, + }); + + store.upsertHubSkillEmbedding("hub-skill-1", [0.4, 0.5, 0.6], "user-1", "skill-1"); + + const skill = store.getHubSkillBySource("user-1", "skill-1"); + expect(skill).not.toBeNull(); + expect(skill!.id).toBe("hub-skill-1"); + expect(skill!.version).toBe(2); + expect(skill!.description).toBe("updated description"); + const embedding = store.getHubSkillEmbedding("hub-skill-1"); + expect(embedding).not.toBeNull(); + expect(embedding![0]).toBeCloseTo(0.4, 5); + expect(embedding![1]).toBeCloseTo(0.5, 5); + expect(embedding![2]).toBeCloseTo(0.6, 5); + }); + + it("should reject mismatched parent ids even when the source tuple exists", () => { + store.upsertHubTask({ + id: "hub-task-alice", + sourceTaskId: "task-alice", + sourceUserId: "user-1", + title: "Alice task", + summary: "alice summary", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }); + store.upsertHubTask({ + id: "hub-task-bob", + sourceTaskId: "task-bob", + sourceUserId: "user-2", + title: "Bob task", + summary: "bob summary", + groupId: null, + visibility: "public", + createdAt: 2, + updatedAt: 2, + }); + + expect(() => store.upsertHubChunk({ + id: "hub-chunk-z", + hubTaskId: "hub-task-bob", + sourceTaskId: "task-alice", + sourceChunkId: "chunk-alice", + sourceUserId: "user-1", + role: "assistant", + content: "alice chunk", + summary: "alice chunk", + kind: "paragraph", + createdAt: 3, + })).toThrow(/mismatch/i); + + expect(store.getHubChunkBySource("user-1", "chunk-alice")).toBeNull(); + + store.upsertHubSkill({ + id: "hub-skill-alice", + sourceSkillId: "skill-alice", + sourceUserId: "user-1", + name: "alice-skill", + description: "alice", + version: 1, + groupId: null, + visibility: "public", + bundle: '{"skill_md":"# alice"}', + qualityScore: 0.7, + createdAt: 1, + updatedAt: 1, + }); + store.upsertHubSkill({ + id: "hub-skill-bob", + sourceSkillId: "skill-bob", + sourceUserId: "user-2", + name: "bob-skill", + description: "bob", + version: 1, + groupId: null, + visibility: "public", + bundle: '{"skill_md":"# bob"}', + qualityScore: 0.6, + createdAt: 1, + updatedAt: 1, + }); + + expect(() => store.upsertHubSkillEmbedding("hub-skill-bob", [0.9, 0.8], "user-1", "skill-alice")).toThrow(/mismatch/i); + expect(store.getHubSkillEmbedding("hub-skill-alice")).toBeNull(); + expect(store.getHubSkillEmbedding("hub-skill-bob")).toBeNull(); + }); + + it("should reject child writes when the source tuple does not resolve to a parent", () => { + store.upsertHubTask({ + id: "hub-task-bob-only", + sourceTaskId: "task-bob-only", + sourceUserId: "user-2", + title: "Bob only", + summary: "bob only", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }); + + expect(() => store.upsertHubChunk({ + id: "hub-chunk-bad", + hubTaskId: "hub-task-bob-only", + sourceTaskId: "task-alice-missing", + sourceChunkId: "chunk-missing", + sourceUserId: "user-1", + role: "assistant", + content: "bad", + summary: "bad", + kind: "paragraph", + createdAt: 2, + })).toThrow(/source task/i); + + store.upsertHubSkill({ + id: "hub-skill-bob-only", + sourceSkillId: "skill-bob-only", + sourceUserId: "user-2", + name: "bob-only", + description: "bob-only", + version: 1, + groupId: null, + visibility: "public", + bundle: '{"skill_md":"# bob only"}', + qualityScore: 0.5, + createdAt: 1, + updatedAt: 1, + }); + + expect(() => store.upsertHubSkillEmbedding("hub-skill-bob-only", [0.2, 0.3], "user-1", "skill-alice-missing")).toThrow(/source skill/i); + expect(store.getHubChunkBySource("user-1", "chunk-missing")).toBeNull(); + expect(store.getHubSkillEmbedding("hub-skill-bob-only")).toBeNull(); + }); + + it("should require source identifiers for remap-sensitive child writes", () => { + expect(() => (store as any).upsertHubChunk({ + id: "hub-chunk-x", + hubTaskId: "hub-task-x", + sourceChunkId: "chunk-x", + sourceUserId: "user-1", + role: "assistant", + content: "x", + summary: "x", + kind: "paragraph", + createdAt: 1, + })).toThrow(/sourceTaskId/i); + + expect(() => (store as any).upsertHubSkillEmbedding("hub-skill-x", [0.1, 0.2])).toThrow(/sourceUserId and sourceSkillId/i); + }); + + +}); + describe("vectorSearch", () => { const noopLog: Logger = { debug: () => {}, From 2750687102096fedd8b878c6dfc4dd42eef1ff80 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 14:24:21 +0800 Subject: [PATCH 04/85] chore(git): ignore local worktrees --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 1a9c5653f..7ca5328bf 100644 --- a/.gitignore +++ b/.gitignore @@ -230,3 +230,6 @@ cython_debug/ outputs evaluation/data/temporal_locomo + +# Local git worktrees +.worktrees/ From 2e79cd70199789c281725d33365b1969fb03c527 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:06:26 +0800 Subject: [PATCH 05/85] feat(memos-local): add hub service skeleton --- apps/memos-local-openclaw/index.ts | 10 + apps/memos-local-openclaw/plugin-impl.ts | 10 + apps/memos-local-openclaw/src/hub/auth.ts | 78 +++++++ apps/memos-local-openclaw/src/hub/server.ts | 207 ++++++++++++++++++ .../src/hub/user-manager.ts | 113 ++++++++++ .../tests/hub-auth.test.ts | 66 ++++++ .../tests/hub-server.test.ts | 157 +++++++++++++ .../tests/plugin-impl-access.test.ts | 89 +++++++- 8 files changed, 728 insertions(+), 2 deletions(-) create mode 100644 apps/memos-local-openclaw/src/hub/auth.ts create mode 100644 apps/memos-local-openclaw/src/hub/server.ts create mode 100644 apps/memos-local-openclaw/src/hub/user-manager.ts create mode 100644 apps/memos-local-openclaw/tests/hub-auth.test.ts create mode 100644 apps/memos-local-openclaw/tests/hub-server.test.ts diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 71db2ba71..323149df2 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -17,6 +17,7 @@ import { RecallEngine } from "./src/recall/engine"; import { captureMessages, stripInboundMetadata } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; +import { HubServer } from "./src/hub/server"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -1202,11 +1203,19 @@ const memosLocalPlugin = { ctx, }); + const hubServer = ctx.config.sharing?.enabled && ctx.config.sharing.role === "hub" + ? new HubServer({ store, log: ctx.log, config: ctx.config, dataDir: stateDir }) + : null; + // ─── Service lifecycle ─── api.registerService({ id: "memos-local-openclaw-plugin", start: async () => { + if (hubServer) { + const hubUrl = await hubServer.start(); + api.logger.info(`memos-local: hub started at ${hubUrl}`); + } try { const viewerUrl = await viewer.start(); api.logger.info(`memos-local: started (embedding: ${embedder.provider})`); @@ -1228,6 +1237,7 @@ const memosLocalPlugin = { }, stop: async () => { await telemetry.shutdown(); + await hubServer?.stop(); viewer.stop(); store.close(); api.logger.info("memos-local: stopped"); diff --git a/apps/memos-local-openclaw/plugin-impl.ts b/apps/memos-local-openclaw/plugin-impl.ts index 462919779..b4f6c9ec2 100644 --- a/apps/memos-local-openclaw/plugin-impl.ts +++ b/apps/memos-local-openclaw/plugin-impl.ts @@ -12,6 +12,7 @@ import { RecallEngine } from "./src/recall/engine"; import { captureMessages } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; +import { HubServer } from "./src/hub/server"; function ownerFilterFor(agentId: string | undefined): string[] { const resolvedAgentId = agentId && agentId.trim().length > 0 ? agentId : "main"; @@ -509,11 +510,19 @@ const memosLocalPlugin = { ctx, }); + const hubServer = ctx.config.sharing?.enabled && ctx.config.sharing.role === "hub" + ? new HubServer({ store, log: ctx.log, config: ctx.config, dataDir: stateDir }) + : null; + // ─── Service lifecycle ─── api.registerService({ id: "memos-local-openclaw-plugin", start: async () => { + if (hubServer) { + const hubUrl = await hubServer.start(); + api.logger.info(`memos-local: hub started at ${hubUrl}`); + } try { const viewerUrl = await viewer.start(); api.logger.info(`memos-local: started (embedding: ${embedder.provider})`); @@ -530,6 +539,7 @@ const memosLocalPlugin = { } }, stop: async () => { + await hubServer?.stop(); viewer.stop(); await worker.flush(); store.close(); diff --git a/apps/memos-local-openclaw/src/hub/auth.ts b/apps/memos-local-openclaw/src/hub/auth.ts new file mode 100644 index 000000000..508fe4424 --- /dev/null +++ b/apps/memos-local-openclaw/src/hub/auth.ts @@ -0,0 +1,78 @@ +import { createHmac, randomBytes, timingSafeEqual } from "crypto"; +import type { UserRole, UserStatus } from "../sharing/types"; + +type UserTokenPayload = { + userId: string; + username: string; + role: UserRole; + status: UserStatus; + exp: number; +}; + +function base64url(input: Buffer | string): string { + return Buffer.from(input) + .toString("base64") + .replace(/\+/g, "-") + .replace(/\//g, "_") + .replace(/=+$/g, ""); +} + +function unbase64url(input: string): Buffer { + const padded = input.replace(/-/g, "+").replace(/_/g, "/") + "===".slice((input.length + 3) % 4); + return Buffer.from(padded, "base64"); +} + +function sign(value: string, secret: string): string { + return base64url(createHmac("sha256", secret).update(value).digest()); +} + +export function createTeamToken(secret: string): string { + const nonce = base64url(randomBytes(12)); + const body = `team.${nonce}`; + return `${body}.${sign(body, secret)}`; +} + +export function verifyTeamToken(token: string, secret: string): boolean { + const idx = token.lastIndexOf("."); + if (idx <= 0) return false; + const body = token.slice(0, idx); + const sig = token.slice(idx + 1); + const expected = sign(body, secret); + try { + return timingSafeEqual(Buffer.from(sig), Buffer.from(expected)); + } catch { + return false; + } +} + +export function issueUserToken( + payload: { userId: string; username: string; role: UserRole; status: UserStatus }, + secret: string, + ttlMs = 24 * 60 * 60 * 1000, +): string { + const full: UserTokenPayload = { ...payload, exp: Date.now() + ttlMs }; + const body = base64url(JSON.stringify(full)); + return `${body}.${sign(body, secret)}`; +} + +export function verifyUserToken(token: string, secret: string): Omit | null { + const idx = token.lastIndexOf("."); + if (idx <= 0) return null; + const body = token.slice(0, idx); + const sig = token.slice(idx + 1); + const expected = sign(body, secret); + + try { + if (!timingSafeEqual(Buffer.from(sig), Buffer.from(expected))) return null; + const parsed = JSON.parse(unbase64url(body).toString("utf8")) as UserTokenPayload; + if (parsed.exp < Date.now()) return null; + return { + userId: parsed.userId, + username: parsed.username, + role: parsed.role, + status: parsed.status, + }; + } catch { + return null; + } +} diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts new file mode 100644 index 000000000..bbe3e12ea --- /dev/null +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -0,0 +1,207 @@ +import * as fs from "fs"; +import * as http from "http"; +import * as path from "path"; +import { createHash, randomBytes, randomUUID } from "crypto"; +import type { SqliteStore } from "../storage/sqlite"; +import type { Logger, MemosLocalConfig } from "../types"; +import { issueUserToken, verifyUserToken } from "./auth"; +import { HubUserManager } from "./user-manager"; + +type HubServerOptions = { + store: SqliteStore; + log: Logger; + config: MemosLocalConfig; + dataDir: string; +}; + +type HubAuthState = { + authSecret: string; + bootstrapAdminUserId?: string; + bootstrapAdminToken?: string; +}; + +export class HubServer { + private server?: http.Server; + private readonly userManager: HubUserManager; + private readonly authStatePath: string; + private authState: HubAuthState; + + constructor(private opts: HubServerOptions) { + this.userManager = new HubUserManager(opts.store, opts.log); + this.authStatePath = path.join(opts.dataDir, "hub-auth.json"); + this.authState = this.loadAuthState(); + } + + async start(): Promise { + if (!this.teamToken) { + throw new Error("team token is required to start hub mode"); + } + if (this.server?.listening) { + return `http://127.0.0.1:${this.port}`; + } + + this.server = http.createServer(async (req, res) => { + try { + await this.handle(req, res); + } catch (err) { + this.opts.log.warn(`hub server error: ${String(err)}`); + res.statusCode = 500; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify({ error: "internal_error" })); + } + }); + + await new Promise((resolve, reject) => { + const onError = (err: Error) => { + this.server?.off("listening", onListening); + reject(err); + }; + const onListening = () => { + this.server?.off("error", onError); + resolve(); + }; + this.server!.once("error", onError); + this.server!.once("listening", onListening); + this.server!.listen(this.port, "0.0.0.0"); + }); + + const bootstrap = this.userManager.ensureBootstrapAdmin( + this.authSecret, + "admin", + this.authState.bootstrapAdminUserId, + this.authState.bootstrapAdminToken, + ); + if (bootstrap.token) { + this.authState.bootstrapAdminUserId = bootstrap.user.id; + this.authState.bootstrapAdminToken = bootstrap.token; + this.saveAuthState(); + this.opts.log.info(`memos-local: bootstrap admin token persisted to ${this.authStatePath}`); + } + + return `http://127.0.0.1:${this.port}`; + } + + async stop(): Promise { + if (!this.server) return; + const server = this.server; + this.server = undefined; + await new Promise((resolve) => server.close(() => resolve())); + } + + private get port(): number { + return this.opts.config.sharing?.hub?.port ?? 18800; + } + + private get teamName(): string { + return this.opts.config.sharing?.hub?.teamName ?? ""; + } + + private get teamToken(): string { + return this.opts.config.sharing?.hub?.teamToken ?? ""; + } + + private get authSecret(): string { + return this.authState.authSecret; + } + + private loadAuthState(): HubAuthState { + try { + const raw = fs.readFileSync(this.authStatePath, "utf8"); + const parsed = JSON.parse(raw) as HubAuthState; + if (parsed.authSecret) return parsed; + } catch {} + const initial = { authSecret: randomBytes(32).toString("hex") } as HubAuthState; + fs.mkdirSync(path.dirname(this.authStatePath), { recursive: true }); + fs.writeFileSync(this.authStatePath, JSON.stringify(initial, null, 2), "utf8"); + return initial; + } + + private saveAuthState(): void { + fs.mkdirSync(path.dirname(this.authStatePath), { recursive: true }); + fs.writeFileSync(this.authStatePath, JSON.stringify(this.authState, null, 2), "utf8"); + } + + private async handle(req: http.IncomingMessage, res: http.ServerResponse): Promise { + const url = new URL(req.url || "/", `http://127.0.0.1:${this.port}`); + const path = url.pathname; + + if (req.method === "GET" && path === "/api/v1/hub/info") { + return this.json(res, 200, { + teamName: this.teamName, + version: "0.0.0", + apiVersion: "v1", + }); + } + + if (req.method === "POST" && path === "/api/v1/hub/join") { + const body = await this.readJson(req); + if (!body || body.teamToken !== this.teamToken) { + return this.json(res, 403, { error: "invalid_team_token" }); + } + const pending = this.userManager.createPendingUser({ + username: String(body.username || `user-${randomUUID().slice(0, 8)}`), + deviceName: typeof body.deviceName === "string" ? body.deviceName : undefined, + }); + return this.json(res, 202, { status: "pending", userId: pending.id }); + } + + if (req.method === "GET" && path === "/api/v1/hub/me") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const user = this.opts.store.getHubUser(auth.userId); + if (!user) return this.json(res, 401, { error: "unauthorized" }); + return this.json(res, 200, user); + } + + if (req.method === "GET" && path === "/api/v1/hub/admin/pending-users") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); + return this.json(res, 200, { users: this.userManager.listPendingUsers() }); + } + + if (req.method === "POST" && path === "/api/v1/hub/admin/approve-user") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); + const body = await this.readJson(req); + const token = issueUserToken({ userId: String(body.userId), username: String(body.username || ""), role: "member", status: "active" }, this.authSecret); + const approved = this.userManager.approveUser(String(body.userId), token); + if (!approved) return this.json(res, 404, { error: "not_found" }); + return this.json(res, 200, { status: "active", token }); + } + + return this.json(res, 404, { error: "not_found" }); + } + + private authenticate(req: http.IncomingMessage) { + const header = req.headers.authorization; + if (!header || !header.startsWith("Bearer ")) return null; + const token = header.slice("Bearer ".length); + const payload = verifyUserToken(token, this.authSecret); + if (!payload) return null; + const user = this.opts.store.getHubUser(payload.userId); + if (!user || user.status !== "active") return null; + const hash = createHash("sha256").update(token).digest("hex"); + if (user.tokenHash !== hash) return null; + return { + userId: user.id, + username: user.username, + role: user.role, + status: user.status, + }; + } + + private async readJson(req: http.IncomingMessage): Promise { + const chunks: Buffer[] = []; + for await (const chunk of req) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); + const raw = Buffer.concat(chunks).toString("utf8"); + return raw ? JSON.parse(raw) : {}; + } + + private json(res: http.ServerResponse, statusCode: number, body: unknown): void { + res.statusCode = statusCode; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify(body)); + } +} diff --git a/apps/memos-local-openclaw/src/hub/user-manager.ts b/apps/memos-local-openclaw/src/hub/user-manager.ts new file mode 100644 index 000000000..3165dbf2c --- /dev/null +++ b/apps/memos-local-openclaw/src/hub/user-manager.ts @@ -0,0 +1,113 @@ +import { randomUUID, createHash } from "crypto"; +import { issueUserToken } from "./auth"; +import type { Logger } from "../types"; +import type { UserInfo } from "../sharing/types"; +import type { SqliteStore } from "../storage/sqlite"; + +type ManagedHubUser = UserInfo & { tokenHash: string; createdAt: number; approvedAt: number | null }; + +export class HubUserManager { + constructor(private store: SqliteStore, private log: Logger) {} + + createPendingUser(input: { username: string; deviceName?: string }): ManagedHubUser { + const user = { + id: randomUUID(), + username: input.username, + deviceName: input.deviceName, + role: "member" as const, + status: "pending" as const, + groups: [], + tokenHash: "", + createdAt: Date.now(), + approvedAt: null, + }; + this.store.upsertHubUser(user); + return user; + } + + listPendingUsers(): ManagedHubUser[] { + return this.store.listHubUsers("pending"); + } + + approveUser(userId: string, token: string): ManagedHubUser | null { + const user = this.store.getHubUser(userId); + if (!user) return null; + const updated = { + ...user, + status: "active" as const, + tokenHash: createHash("sha256").update(token).digest("hex"), + approvedAt: Date.now(), + }; + this.store.upsertHubUser(updated); + return updated; + } + + ensureBootstrapAdmin(secret: string, username = "admin", bootstrapUserId?: string, bootstrapToken?: string): { user: ManagedHubUser; token: string } { + if (bootstrapUserId) { + const bootstrapUser = this.store.getHubUser(bootstrapUserId); + if (bootstrapUser && bootstrapUser.role === "admin" && bootstrapUser.status === "active") { + if (bootstrapToken && bootstrapUser.tokenHash === createHash("sha256").update(bootstrapToken).digest("hex")) { + return { user: bootstrapUser, token: bootstrapToken }; + } + const refreshedToken = issueUserToken( + { userId: bootstrapUser.id, username: bootstrapUser.username, role: bootstrapUser.role, status: bootstrapUser.status }, + secret, + 3650 * 24 * 60 * 60 * 1000, + ); + const refreshedUser: ManagedHubUser = { + ...bootstrapUser, + tokenHash: createHash("sha256").update(refreshedToken).digest("hex"), + }; + this.store.upsertHubUser(refreshedUser); + return { user: refreshedUser, token: refreshedToken }; + } + } + + const existing = this.store.listHubUsers().find((user) => user.role === "admin" && user.status === "active"); + if (existing) { + const refreshedToken = issueUserToken( + { userId: existing.id, username: existing.username, role: existing.role, status: existing.status }, + secret, + 3650 * 24 * 60 * 60 * 1000, + ); + const refreshedUser: ManagedHubUser = { + ...existing, + tokenHash: createHash("sha256").update(refreshedToken).digest("hex"), + }; + this.store.upsertHubUser(refreshedUser); + return { user: refreshedUser, token: refreshedToken }; + } + + const user: ManagedHubUser = { + id: randomUUID(), + username, + deviceName: "hub", + role: "admin", + status: "active", + groups: [], + tokenHash: "", + createdAt: Date.now(), + approvedAt: Date.now(), + }; + const token = issueUserToken( + { userId: user.id, username: user.username, role: user.role, status: user.status }, + secret, + 3650 * 24 * 60 * 60 * 1000, + ); + user.tokenHash = createHash("sha256").update(token).digest("hex"); + this.store.upsertHubUser(user); + return { user, token }; + } + + rejectUser(userId: string): ManagedHubUser | null { + const user = this.store.getHubUser(userId); + if (!user) return null; + const updated = { + ...user, + status: "rejected" as const, + approvedAt: Date.now(), + }; + this.store.upsertHubUser(updated); + return updated; + } +} diff --git a/apps/memos-local-openclaw/tests/hub-auth.test.ts b/apps/memos-local-openclaw/tests/hub-auth.test.ts new file mode 100644 index 000000000..e57d7bf7a --- /dev/null +++ b/apps/memos-local-openclaw/tests/hub-auth.test.ts @@ -0,0 +1,66 @@ +import { describe, expect, it } from "vitest"; +import { + createTeamToken, + issueUserToken, + verifyTeamToken, + verifyUserToken, +} from "../src/hub/auth"; + +describe("hub auth", () => { + it("should create and verify a team token", () => { + const token = createTeamToken("team-secret"); + expect(typeof token).toBe("string"); + expect(token.length).toBeGreaterThan(10); + expect(verifyTeamToken(token, "team-secret")).toBe(true); + expect(verifyTeamToken(token, "wrong-secret")).toBe(false); + }); + + it("should issue and verify a user token", () => { + const token = issueUserToken( + { + userId: "user-1", + username: "alice", + role: "admin", + status: "active", + }, + "team-secret", + 60_000, + ); + + const verified = verifyUserToken(token, "team-secret"); + expect(verified).not.toBeNull(); + expect(verified!.userId).toBe("user-1"); + expect(verified!.username).toBe("alice"); + expect(verified!.role).toBe("admin"); + expect(verified!.status).toBe("active"); + }); + + it("should reject expired or tampered user tokens", async () => { + const token = issueUserToken( + { + userId: "user-2", + username: "bob", + role: "member", + status: "active", + }, + "team-secret", + 1, + ); + + await new Promise((resolve) => setTimeout(resolve, 5)); + expect(verifyUserToken(token, "team-secret")).toBeNull(); + + const valid = issueUserToken( + { + userId: "user-3", + username: "carol", + role: "member", + status: "active", + }, + "team-secret", + 60_000, + ); + const tampered = valid.replace(/.$/, valid.endsWith("a") ? "b" : "a"); + expect(verifyUserToken(tampered, "team-secret")).toBeNull(); + }); +}); diff --git a/apps/memos-local-openclaw/tests/hub-server.test.ts b/apps/memos-local-openclaw/tests/hub-server.test.ts new file mode 100644 index 000000000..7b2f8029a --- /dev/null +++ b/apps/memos-local-openclaw/tests/hub-server.test.ts @@ -0,0 +1,157 @@ +import { afterEach, describe, expect, it } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { HubServer } from "../src/hub/server"; + +const noopLog = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +const servers: HubServer[] = []; +const stores: SqliteStore[] = []; +const dirs: string[] = []; + +afterEach(() => { + for (const server of servers.splice(0)) server.stop(); + for (const store of stores.splice(0)) store.close(); + for (const dir of dirs.splice(0)) fs.rmSync(dir, { recursive: true, force: true }); +}); + +describe("hub server", () => { + it("should preserve bootstrap admin token across restart", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-auth-")); + dirs.push(dir); + const dbPath = path.join(dir, "test.db"); + + const store1 = new SqliteStore(dbPath, noopLog); + stores.push(store1); + const server1 = new HubServer({ + store: store1, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18912, teamName: "Persist", teamToken: "persist-secret" } } }, + dataDir: dir, + } as any); + servers.push(server1); + await server1.start(); + + const authPath = path.join(dir, "hub-auth.json"); + const firstState = JSON.parse(fs.readFileSync(authPath, "utf8")); + const firstToken = firstState.bootstrapAdminToken; + expect(firstToken).toBeTruthy(); + + const firstMe = await fetch("http://127.0.0.1:18912/api/v1/hub/me", { + headers: { authorization: `Bearer ${firstToken}` }, + }); + expect(firstMe.status).toBe(200); + + await server1.stop(); + servers.pop(); + store1.close(); + stores.pop(); + + const store2 = new SqliteStore(dbPath, noopLog); + stores.push(store2); + const server2 = new HubServer({ + store: store2, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18912, teamName: "Persist", teamToken: "persist-secret" } } }, + dataDir: dir, + } as any); + servers.push(server2); + await server2.start(); + + const secondMe = await fetch("http://127.0.0.1:18912/api/v1/hub/me", { + headers: { authorization: `Bearer ${firstToken}` }, + }); + expect(secondMe.status).toBe(200); + }); + + it("should recover bootstrap metadata for legacy hubs with an existing admin", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-legacy-")); + dirs.push(dir); + const dbPath = path.join(dir, "test.db"); + const authPath = path.join(dir, "hub-auth.json"); + + const store = new SqliteStore(dbPath, noopLog); + stores.push(store); + store.upsertHubUser({ + id: "legacy-admin", + username: "legacy", + deviceName: "hub", + role: "admin", + status: "active", + groups: [], + tokenHash: "", + createdAt: 1, + approvedAt: 1, + }); + fs.writeFileSync(authPath, JSON.stringify({ authSecret: "legacy-secret" }, null, 2), "utf8"); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18913, teamName: "Legacy", teamToken: "legacy-team-token" } } }, + dataDir: dir, + } as any); + servers.push(server); + await server.start(); + + const state = JSON.parse(fs.readFileSync(authPath, "utf8")); + expect(state.bootstrapAdminUserId).toBe("legacy-admin"); + expect(state.bootstrapAdminToken).toBeTruthy(); + + const me = await fetch("http://127.0.0.1:18913/api/v1/hub/me", { + headers: { authorization: `Bearer ${state.bootstrapAdminToken}` }, + }); + expect(me.status).toBe(200); + }); + + it("should refuse to start hub mode without a team token", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-no-token-")); + dirs.push(dir); + const store = new SqliteStore(path.join(dir, "test.db"), noopLog); + stores.push(store); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18914, teamName: "NoToken", teamToken: "" } } }, + dataDir: dir, + } as any); + servers.push(server); + + await expect(server.start()).rejects.toThrow(/team token/i); + }); + + it("should fail cleanly on port conflict", async () => { + const dir1 = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-1-")); + const dir2 = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-2-")); + dirs.push(dir1, dir2); + + const store1 = new SqliteStore(path.join(dir1, "test.db"), noopLog); + const store2 = new SqliteStore(path.join(dir2, "test.db"), noopLog); + stores.push(store1, store2); + + const server1 = new HubServer({ + store: store1, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18911, teamName: "A", teamToken: "secret-a" } } }, + dataDir: dir1, + } as any); + const server2 = new HubServer({ + store: store2, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18911, teamName: "B", teamToken: "secret-b" } } }, + dataDir: dir2, + } as any); + servers.push(server1, server2); + + await server1.start(); + await expect(server2.start()).rejects.toThrow(); + }); +}); diff --git a/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts index 41951a04d..db0471c9f 100644 --- a/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts +++ b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts @@ -3,14 +3,16 @@ import * as fs from "fs"; import * as path from "path"; import * as os from "os"; import plugin from "../plugin-impl"; +import { SqliteStore } from "../src/storage/sqlite"; +import { issueUserToken } from "../src/hub/auth"; -function makeApi(stateDir: string) { +function makeApi(stateDir: string, pluginConfig: Record = {}) { const tools = new Map(); const events = new Map(); let service: any; const api = { - pluginConfig: {}, + pluginConfig, resolvePath(input: string) { return input === "~/.openclaw" ? stateDir : input; }, @@ -43,6 +45,89 @@ async function waitFor(predicate: () => Promise | boolean, timeoutMs = throw new Error("Timed out waiting for condition"); } +describe("plugin-impl hub service skeleton", () => { + let tmpDir: string; + let service: any; + let port: number; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-plugin-impl-hub-")); + port = 18901 + Math.floor(Math.random() * 2000); + ({ service } = makeApi(tmpDir, { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Core Team", + teamToken: "team-secret", + }, + }, + })); + }); + + afterEach(async () => { + await service?.stop?.(); + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it("should start hub service and expose info/join skeleton routes", async () => { + await service.start(); + + const dbPath = path.join(tmpDir, "memos-local", "memos.db"); + const store = new SqliteStore(dbPath, { info: () => {}, warn: () => {}, error: () => {}, debug: () => {} }); + const admins = store.listHubUsers().filter((user) => user.role === "admin" && user.status === "active"); + expect(admins.length).toBeGreaterThanOrEqual(1); + store.close(); + + const info = await fetch(`http://127.0.0.1:${port}/api/v1/hub/info`); + expect(info.status).toBe(200); + const infoJson = await info.json(); + expect(infoJson.teamName).toBe("Core Team"); + expect(infoJson.apiVersion).toBe("v1"); + + const me = await fetch(`http://127.0.0.1:${port}/api/v1/hub/me`); + expect(me.status).toBe(401); + + const join = await fetch(`http://127.0.0.1:${port}/api/v1/hub/join`, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + username: "bob", + deviceName: "Bob Mac", + teamToken: "team-secret", + }), + }); + expect(join.status).toBe(202); + const joinJson = await join.json(); + expect(joinJson.status).toBe("pending"); + expect(joinJson.userId).toBeTruthy(); + }); + + it("should reject forged admin tokens derived from the team token", async () => { + await service.start(); + + const forged = issueUserToken( + { + userId: "forged-admin", + username: "mallory", + role: "admin", + status: "active", + }, + "team-secret", + 60_000, + ); + + const res = await fetch(`http://127.0.0.1:${port}/api/v1/hub/admin/pending-users`, { + headers: { + authorization: `Bearer ${forged}`, + }, + }); + + expect([401, 403]).toContain(res.status); + }); +}); + describe("plugin-impl owner isolation", () => { let tmpDir: string; let tools: Map; From 69b96c0b92d42eabee6cdaee3a26a7856208a008 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:18:26 +0800 Subject: [PATCH 06/85] feat(memos-local): add openclaw fallback guards --- apps/memos-local-openclaw/src/config.ts | 21 +++- .../src/embedding/index.ts | 5 + .../src/ingest/providers/index.ts | 40 +++++-- apps/memos-local-openclaw/src/types.ts | 7 +- .../memos-local-openclaw/src/viewer/server.ts | 58 ++++++---- .../tests/integration.test.ts | 101 ++++++++++++++++++ .../tests/viewer-config.test.ts | 40 +++++++ 7 files changed, 238 insertions(+), 34 deletions(-) create mode 100644 apps/memos-local-openclaw/tests/viewer-config.test.ts diff --git a/apps/memos-local-openclaw/src/config.ts b/apps/memos-local-openclaw/src/config.ts index 1399628d2..fec97a367 100644 --- a/apps/memos-local-openclaw/src/config.ts +++ b/apps/memos-local-openclaw/src/config.ts @@ -27,6 +27,10 @@ export function resolveConfig(raw: Partial | undefined, stateD const telemetryEnabled = cfg.telemetry?.enabled ?? (telemetryEnvVar === "false" || telemetryEnvVar === "0" ? false : true); + const sharingCapabilities = { + hostEmbedding: cfg.sharing?.capabilities?.hostEmbedding ?? false, + hostCompletion: cfg.sharing?.capabilities?.hostCompletion ?? false, + }; return { ...cfg, @@ -54,6 +58,18 @@ export function resolveConfig(raw: Partial | undefined, stateD posthogApiKey: cfg.telemetry?.posthogApiKey ?? process.env.POSTHOG_API_KEY ?? "", posthogHost: cfg.telemetry?.posthogHost ?? process.env.POSTHOG_HOST ?? "", }, + summarizer: cfg.summarizer + ? { + ...cfg.summarizer, + capabilities: sharingCapabilities, + } + : undefined, + embedding: cfg.embedding + ? { + ...cfg.embedding, + capabilities: sharingCapabilities, + } + : undefined, sharing: { enabled: cfg.sharing?.enabled ?? false, role: cfg.sharing?.role ?? "client", @@ -66,10 +82,7 @@ export function resolveConfig(raw: Partial | undefined, stateD hubAddress: cfg.sharing?.client?.hubAddress ?? "", userToken: cfg.sharing?.client?.userToken ?? "", }, - capabilities: { - hostEmbedding: cfg.sharing?.capabilities?.hostEmbedding ?? false, - hostCompletion: cfg.sharing?.capabilities?.hostCompletion ?? false, - }, + capabilities: sharingCapabilities, }, }; } diff --git a/apps/memos-local-openclaw/src/embedding/index.ts b/apps/memos-local-openclaw/src/embedding/index.ts index aa511dcb3..7fc1e55c5 100644 --- a/apps/memos-local-openclaw/src/embedding/index.ts +++ b/apps/memos-local-openclaw/src/embedding/index.ts @@ -13,6 +13,9 @@ export class Embedder { ) {} get provider(): string { + if (this.cfg?.provider === "openclaw" && this.cfg.capabilities?.hostEmbedding !== true) { + return "local"; + } return this.cfg?.provider ?? "local"; } @@ -61,6 +64,8 @@ export class Embedder { return await embedMistral(texts, cfg!, this.log); case "voyage": return await embedVoyage(texts, cfg!, this.log); + case "openclaw": + throw new Error("OpenClaw host embedding is not available in this sidecar build"); case "local": default: return await embedLocal(texts, this.log); diff --git a/apps/memos-local-openclaw/src/ingest/providers/index.ts b/apps/memos-local-openclaw/src/ingest/providers/index.ts index de958075a..c02c8198a 100644 --- a/apps/memos-local-openclaw/src/ingest/providers/index.ts +++ b/apps/memos-local-openclaw/src/ingest/providers/index.ts @@ -12,8 +12,18 @@ export class Summarizer { private log: Logger, ) {} - async summarize(text: string): Promise { + private get provider(): SummarizerConfig["provider"] | undefined { if (!this.cfg) { + return undefined; + } + if (this.cfg.provider === "openclaw" && this.cfg.capabilities?.hostCompletion !== true) { + return undefined; + } + return this.cfg.provider; + } + + async summarize(text: string): Promise { + if (!this.provider) { return ruleFallback(text); } @@ -26,7 +36,7 @@ export class Summarizer { } async summarizeTask(text: string): Promise { - if (!this.cfg) { + if (!this.provider) { return taskFallback(text); } @@ -40,7 +50,7 @@ export class Summarizer { private async callProvider(text: string): Promise { const cfg = this.cfg!; - switch (cfg.provider) { + switch (this.provider) { case "openai": case "openai_compatible": return summarizeOpenAI(text, cfg, this.log); @@ -52,6 +62,8 @@ export class Summarizer { return summarizeOpenAI(text, cfg, this.log); case "bedrock": return summarizeBedrock(text, cfg, this.log); + case "openclaw": + throw new Error("OpenClaw host completion is not available in this sidecar build"); default: throw new Error(`Unknown summarizer provider: ${cfg.provider}`); } @@ -63,7 +75,7 @@ export class Summarizer { * Returns null if no summarizer is configured (caller should fall back to heuristic). */ async judgeNewTopic(currentContext: string, newMessage: string): Promise { - if (!this.cfg) return null; + if (!this.provider) return null; try { return await this.callTopicJudge(currentContext, newMessage); @@ -75,7 +87,7 @@ export class Summarizer { private async callTopicJudge(currentContext: string, newMessage: string): Promise { const cfg = this.cfg!; - switch (cfg.provider) { + switch (this.provider) { case "openai": case "openai_compatible": case "azure_openai": @@ -86,6 +98,8 @@ export class Summarizer { return judgeNewTopicGemini(currentContext, newMessage, cfg, this.log); case "bedrock": return judgeNewTopicBedrock(currentContext, newMessage, cfg, this.log); + case "openclaw": + throw new Error("OpenClaw host completion is not available in this sidecar build"); default: throw new Error(`Unknown summarizer provider: ${cfg.provider}`); } @@ -99,7 +113,7 @@ export class Summarizer { query: string, candidates: Array<{ index: number; summary: string; role: string }>, ): Promise { - if (!this.cfg) return null; + if (!this.provider) return null; if (candidates.length === 0) return { relevant: [], sufficient: true }; try { @@ -115,7 +129,7 @@ export class Summarizer { candidates: Array<{ index: number; summary: string; role: string }>, ): Promise { const cfg = this.cfg!; - switch (cfg.provider) { + switch (this.provider) { case "openai": case "openai_compatible": case "azure_openai": @@ -126,6 +140,8 @@ export class Summarizer { return filterRelevantGemini(query, candidates, cfg, this.log); case "bedrock": return filterRelevantBedrock(query, candidates, cfg, this.log); + case "openclaw": + throw new Error("OpenClaw host completion is not available in this sidecar build"); default: throw new Error(`Unknown summarizer provider: ${cfg.provider}`); } @@ -139,7 +155,7 @@ export class Summarizer { newSummary: string, candidates: Array<{ index: number; summary: string; chunkId: string }>, ): Promise { - if (!this.cfg) return null; + if (!this.provider) return null; if (candidates.length === 0) return null; try { @@ -155,7 +171,7 @@ export class Summarizer { candidates: Array<{ index: number; summary: string; chunkId: string }>, ): Promise { const cfg = this.cfg!; - switch (cfg.provider) { + switch (this.provider) { case "openai": case "openai_compatible": case "azure_openai": @@ -166,6 +182,8 @@ export class Summarizer { return judgeDedupGemini(newSummary, candidates, cfg, this.log); case "bedrock": return judgeDedupBedrock(newSummary, candidates, cfg, this.log); + case "openclaw": + throw new Error("OpenClaw host completion is not available in this sidecar build"); default: throw new Error(`Unknown summarizer provider: ${cfg.provider}`); } @@ -173,7 +191,7 @@ export class Summarizer { private async callTaskProvider(text: string): Promise { const cfg = this.cfg!; - switch (cfg.provider) { + switch (this.provider) { case "openai": case "openai_compatible": case "azure_openai": @@ -184,6 +202,8 @@ export class Summarizer { return summarizeTaskGemini(text, cfg, this.log); case "bedrock": return summarizeTaskBedrock(text, cfg, this.log); + case "openclaw": + throw new Error("OpenClaw host completion is not available in this sidecar build"); default: throw new Error(`Unknown summarizer provider: ${cfg.provider}`); } diff --git a/apps/memos-local-openclaw/src/types.ts b/apps/memos-local-openclaw/src/types.ts index b759707e7..5aac2d536 100644 --- a/apps/memos-local-openclaw/src/types.ts +++ b/apps/memos-local-openclaw/src/types.ts @@ -151,7 +151,8 @@ export type SummaryProvider = | "anthropic" | "gemini" | "azure_openai" - | "bedrock"; + | "bedrock" + | "openclaw"; export type EmbeddingProvider = | "openai" @@ -161,7 +162,8 @@ export type EmbeddingProvider = | "cohere" | "mistral" | "voyage" - | "local"; + | "local" + | "openclaw"; export interface ProviderConfig { provider: string; @@ -171,6 +173,7 @@ export interface ProviderConfig { headers?: Record; timeoutMs?: number; temperature?: number; + capabilities?: SharingCapabilities; } export interface SummarizerConfig extends ProviderConfig { diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index c96bf5bfe..25f3603db 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -13,7 +13,8 @@ import { vectorSearch } from "../storage/vector"; import { TaskProcessor } from "../ingest/task-processor"; import { RecallEngine } from "../recall/engine"; import { SkillEvolver } from "../skill/evolver"; -import type { Logger, Chunk, PluginContext } from "../types"; +import { resolveConfig } from "../config"; +import type { Logger, Chunk, PluginContext, MemosLocalConfig } from "../types"; import { viewerHTML } from "./html"; import { v4 as uuid } from "uuid"; @@ -832,6 +833,34 @@ export class ViewerServer { return path.join(home, ".openclaw", "openclaw.json"); } + private getPluginEntryConfig(raw: any): Record { + const entries = raw?.plugins?.entries ?? {}; + return entries["memos-local-openclaw-plugin"]?.config + ?? entries["memos-lite-openclaw-plugin"]?.config + ?? entries["memos-lite"]?.config + ?? {}; + } + + private getResolvedViewerConfig(raw?: any): MemosLocalConfig { + const pluginCfg = this.getPluginEntryConfig(raw); + const stateDir = this.ctx?.stateDir ?? this.getOpenClawHome(); + return resolveConfig(pluginCfg as Partial, stateDir); + } + + private hasUsableEmbeddingProvider(cfg: MemosLocalConfig): boolean { + const embedding = cfg.embedding; + if (!embedding?.provider) return false; + if (embedding.provider === "openclaw") return false; + return true; + } + + private hasUsableSummarizerProvider(cfg: MemosLocalConfig): boolean { + const summarizer = cfg.summarizer; + if (!summarizer?.provider) return false; + if (summarizer.provider === "openclaw") return false; + return true; + } + private serveConfig(res: http.ServerResponse): void { try { const cfgPath = this.getOpenClawConfigPath(); @@ -841,16 +870,16 @@ export class ViewerServer { } const raw = JSON.parse(fs.readFileSync(cfgPath, "utf-8")); const entries = raw?.plugins?.entries ?? {}; - const pluginEntry = entries["memos-local-openclaw-plugin"]?.config - ?? entries["memos-lite-openclaw-plugin"]?.config - ?? entries["memos-lite"]?.config - ?? {}; - const result: Record = { ...pluginEntry }; + const resolved = this.getResolvedViewerConfig(raw) as Record; + const result: Record = { ...resolved }; + const pluginEntry = this.getPluginEntryConfig(raw); const topEntry = entries["memos-local-openclaw-plugin"] ?? entries["memos-lite-openclaw-plugin"] ?? entries["memos-lite"] ?? {}; - if (pluginEntry.viewerPort == null && topEntry.viewerPort) { + if ((pluginEntry as any).viewerPort != null) { + result.viewerPort = (pluginEntry as any).viewerPort; + } else if (topEntry.viewerPort) { result.viewerPort = topEntry.viewerPort; } this.jsonResponse(res, result); @@ -980,13 +1009,9 @@ export class ViewerServer { if (fs.existsSync(cfgPath)) { try { const raw = JSON.parse(fs.readFileSync(cfgPath, "utf-8")); - const pluginCfg = raw?.plugins?.entries?.["memos-local-openclaw-plugin"]?.config ?? - raw?.plugins?.entries?.["memos-lite"]?.config ?? - raw?.plugins?.entries?.["memos-lite-openclaw-plugin"]?.config ?? {}; - const emb = pluginCfg.embedding; - hasEmbedding = !!(emb && emb.provider); - const sum = pluginCfg.summarizer; - hasSummarizer = !!(sum && sum.provider); + const resolved = this.getResolvedViewerConfig(raw); + hasEmbedding = this.hasUsableEmbeddingProvider(resolved); + hasSummarizer = this.hasUsableSummarizerProvider(resolved); } catch { /* ignore */ } } @@ -1164,10 +1189,7 @@ export class ViewerServer { let summarizerCfg: any; try { const raw = JSON.parse(fs.readFileSync(cfgPath, "utf-8")); - const pluginCfg = raw?.plugins?.entries?.["memos-local-openclaw-plugin"]?.config ?? - raw?.plugins?.entries?.["memos-lite"]?.config ?? - raw?.plugins?.entries?.["memos-lite-openclaw-plugin"]?.config ?? {}; - summarizerCfg = pluginCfg.summarizer; + summarizerCfg = this.getResolvedViewerConfig(raw).summarizer; } catch { /* no config */ } const summarizer = new Summarizer(summarizerCfg, this.log); diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index d70ef9e27..c8034d7a4 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -4,6 +4,16 @@ import * as path from "path"; import * as os from "os"; import { initPlugin, type MemosLocalPlugin } from "../src/index"; import { buildContext, resolveConfig } from "../src/config"; +import { Embedder } from "../src/embedding"; +import { Summarizer } from "../src/ingest/providers"; +import { ViewerServer } from "../src/viewer/server"; + +const testLog = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; let plugin: MemosLocalPlugin; let tmpDir: string; @@ -98,6 +108,12 @@ describe("Integration: v4 types and config foundation", () => { stateDir, process.cwd(), { + summarizer: { + provider: "openclaw", + }, + embedding: { + provider: "openclaw", + }, sharing: { enabled: true, role: "hub", @@ -124,6 +140,8 @@ describe("Integration: v4 types and config foundation", () => { expect(ctx.config.sharing.client.userToken).toBe("user-secret"); expect(ctx.config.sharing.capabilities.hostEmbedding).toBe(true); expect(ctx.config.sharing.capabilities.hostCompletion).toBe(true); + expect(ctx.config.embedding?.capabilities?.hostEmbedding).toBe(true); + expect(ctx.config.summarizer?.capabilities?.hostCompletion).toBe(true); } finally { if (prevTeamToken === undefined) delete process.env.MEMOS_TEAM_TOKEN; else process.env.MEMOS_TEAM_TOKEN = prevTeamToken; @@ -134,6 +152,89 @@ describe("Integration: v4 types and config foundation", () => { fs.rmSync(stateDir, { recursive: true, force: true }); } }); + + it("should fall back safely when openclaw provider is configured without host capability flags", async () => { + const embedder = new Embedder({ provider: "openclaw" } as any, testLog as any); + const summarizer = new Summarizer({ provider: "openclaw" } as any, testLog as any); + const input = "OpenClaw fallback summary line stays local and safe."; + + expect(embedder.provider).toBe("local"); + expect(embedder.dimensions).toBe(384); + await expect(summarizer.summarize(input)).resolves.toBe(input); + await expect(summarizer.summarizeTask(input)).resolves.toBe(input); + await expect(summarizer.judgeNewTopic("current topic", "new message")).resolves.toBeNull(); + }); + + it("should apply the same capability-aware resolution in viewer config consumers", () => { + const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-viewer-home-")); + const stateDir = path.join(homeDir, ".openclaw"); + const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-viewer-data-")); + const cfgPath = path.join(stateDir, "openclaw.json"); + const prevHome = process.env.HOME; + const prevUserProfile = process.env.USERPROFILE; + + fs.mkdirSync(stateDir, { recursive: true }); + fs.writeFileSync(cfgPath, JSON.stringify({ + plugins: { + entries: { + "memos-local-openclaw-plugin": { + enabled: true, + config: { + embedding: { provider: "openclaw" }, + summarizer: { provider: "openclaw" }, + sharing: { + capabilities: { + hostEmbedding: false, + hostCompletion: false, + }, + }, + }, + }, + }, + }, + }), "utf-8"); + + process.env.HOME = homeDir; + if (prevUserProfile !== undefined) delete process.env.USERPROFILE; + + try { + const viewer = new ViewerServer({ + store: {} as any, + embedder: { provider: "local" } as any, + port: 0, + log: testLog as any, + dataDir, + ctx: buildContext(stateDir, process.cwd(), undefined, testLog as any), + }); + + const unavailable = (viewer as any).getResolvedViewerConfig(JSON.parse(fs.readFileSync(cfgPath, "utf-8"))); + expect((viewer as any).hasUsableEmbeddingProvider(unavailable)).toBe(false); + expect((viewer as any).hasUsableSummarizerProvider(unavailable)).toBe(false); + + const available = resolveConfig({ + embedding: { provider: "openclaw" }, + summarizer: { provider: "openclaw" }, + sharing: { + capabilities: { + hostEmbedding: true, + hostCompletion: true, + }, + }, + } as any, stateDir); + + expect((viewer as any).hasUsableEmbeddingProvider(available)).toBe(false); + expect((viewer as any).hasUsableSummarizerProvider(available)).toBe(false); + expect(available.summarizer?.capabilities?.hostCompletion).toBe(true); + } finally { + if (prevHome === undefined) delete process.env.HOME; + else process.env.HOME = prevHome; + + if (prevUserProfile !== undefined) process.env.USERPROFILE = prevUserProfile; + + fs.rmSync(homeDir, { recursive: true, force: true }); + fs.rmSync(dataDir, { recursive: true, force: true }); + } + }); }); describe("Integration: memory_search", () => { diff --git a/apps/memos-local-openclaw/tests/viewer-config.test.ts b/apps/memos-local-openclaw/tests/viewer-config.test.ts new file mode 100644 index 000000000..08d536262 --- /dev/null +++ b/apps/memos-local-openclaw/tests/viewer-config.test.ts @@ -0,0 +1,40 @@ +import { afterEach, describe, expect, it } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { ViewerServer } from "../src/viewer/server"; + +const noopLog = { debug: () => {}, info: () => {}, warn: () => {}, error: () => {} }; + +let tmpDir = ""; +let store: SqliteStore | null = null; + +afterEach(() => { + store?.close(); + store = null; + if (tmpDir) fs.rmSync(tmpDir, { recursive: true, force: true }); + tmpDir = ""; +}); + +describe("viewer config gating", () => { + it("should not report openclaw providers as viewer-usable in sidecar mode", () => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-viewer-config-")); + store = new SqliteStore(path.join(tmpDir, "test.db"), noopLog); + const viewer = new ViewerServer({ + store, + embedder: { provider: "local" } as any, + port: 19999, + log: noopLog, + dataDir: tmpDir, + }); + + const cfg = { + embedding: { provider: "openclaw", capabilities: { hostEmbedding: true } }, + summarizer: { provider: "openclaw", capabilities: { hostCompletion: true } }, + } as any; + + expect((viewer as any).hasUsableEmbeddingProvider(cfg)).toBe(false); + expect((viewer as any).hasUsableSummarizerProvider(cfg)).toBe(false); + }); +}); From c1d81941bfd4ecbdef95a9261a39fa0c76d10f80 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:37:46 +0800 Subject: [PATCH 07/85] feat(memos-local): add hub search and task share endpoints --- apps/memos-local-openclaw/src/hub/server.ts | 61 ++++++++ .../src/storage/sqlite.ts | 44 ++++++ .../tests/hub-server.test.ts | 141 ++++++++++++++++++ 3 files changed, 246 insertions(+) diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index bbe3e12ea..aa67a54ee 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -22,6 +22,7 @@ type HubAuthState = { export class HubServer { private server?: http.Server; + private remoteHitMap = new Map(); private readonly userManager: HubUserManager; private readonly authStatePath: string; private authState: HubAuthState; @@ -171,6 +172,66 @@ export class HubServer { return this.json(res, 200, { status: "active", token }); } + if (req.method === "POST" && path === "/api/v1/hub/tasks/share") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + if (!body?.task) return this.json(res, 400, { error: "invalid_payload" }); + const task = { ...body.task, sourceUserId: auth.userId }; + this.opts.store.upsertHubTask(task); + for (const chunk of Array.isArray(body.chunks) ? body.chunks : []) { + this.opts.store.upsertHubChunk({ ...chunk, sourceUserId: auth.userId }); + } + return this.json(res, 200, { ok: true, chunks: Array.isArray(body.chunks) ? body.chunks.length : 0 }); + } + + if (req.method === "POST" && path === "/api/v1/hub/tasks/unshare") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + this.opts.store.deleteHubTaskBySource(auth.userId, String(body.sourceTaskId)); + return this.json(res, 200, { ok: true }); + } + + if (req.method === "POST" && path === "/api/v1/hub/search") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + const hits = this.opts.store.searchHubChunks(String(body.query || ""), { userId: auth.userId, maxResults: Number(body.maxResults || 10) }) + .map(({ hit, rank }) => { + const remoteHitId = randomUUID(); + this.remoteHitMap.set(remoteHitId, { chunkId: hit.id, expiresAt: Date.now() + 10 * 60 * 1000, requesterUserId: auth.userId }); + return { + remoteHitId, + summary: hit.summary, + excerpt: hit.content.slice(0, 240), + hubRank: rank, + taskTitle: hit.task_title, + ownerName: hit.owner_name || "unknown", + groupName: hit.group_name, + visibility: hit.visibility, + source: { ts: hit.created_at, role: hit.role }, + }; + }); + return this.json(res, 200, { hits, meta: { totalCandidates: hits.length, searchedGroups: [], includedPublic: true } }); + } + + if (req.method === "POST" && path === "/api/v1/hub/memory-detail") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + const hit = this.remoteHitMap.get(String(body.remoteHitId)); + if (!hit || hit.expiresAt < Date.now()) return this.json(res, 404, { error: "not_found" }); + if (hit.requesterUserId !== auth.userId) return this.json(res, 403, { error: "forbidden" }); + const chunk = this.opts.store.getHubChunkById(hit.chunkId); + if (!chunk) return this.json(res, 404, { error: "not_found" }); + return this.json(res, 200, { + content: chunk.content, + summary: chunk.summary, + source: { ts: chunk.createdAt, role: chunk.role }, + }); + } + return this.json(res, 404, { error: "not_found" }); } diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index 0031faf1e..77cd0ef99 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -1573,6 +1573,36 @@ export class SqliteStore { return out; } + searchHubChunks(query: string, options?: { userId?: string; maxResults?: number }): Array<{ hit: HubSearchRow; rank: number }> { + const limit = options?.maxResults ?? 10; + const userId = options?.userId ?? ""; + const rows = this.db.prepare(` + SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, hg.name as group_name, hu.username as owner_name, + bm25(hub_chunks_fts) as rank + FROM hub_chunks_fts f + JOIN hub_chunks hc ON hc.rowid = f.rowid + JOIN hub_tasks ht ON ht.id = hc.hub_task_id + LEFT JOIN hub_groups hg ON hg.id = ht.group_id + LEFT JOIN hub_users hu ON hu.id = ht.source_user_id + WHERE hub_chunks_fts MATCH ? + AND ( + ht.visibility = 'public' + OR EXISTS ( + SELECT 1 FROM hub_group_members gm + WHERE gm.group_id = ht.group_id AND gm.user_id = ? + ) + ) + ORDER BY rank + LIMIT ? + `).all(sanitizeFtsQuery(query), userId, limit) as HubSearchRow[]; + return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); + } + + getHubChunkById(chunkId: string): HubChunkRecord | null { + const row = this.db.prepare('SELECT * FROM hub_chunks WHERE id = ?').get(chunkId) as HubChunkRow | undefined; + return row ? rowToHubChunk(row) : null; + } + deleteHubSkillBySource(sourceUserId: string, sourceSkillId: string): void { this.db.prepare('DELETE FROM hub_skills WHERE source_user_id = ? AND source_skill_id = ?').run(sourceUserId, sourceSkillId); } @@ -1991,6 +2021,20 @@ function rowToHubSkill(row: HubSkillRow): HubSkillRecord { } +interface HubSearchRow { + id: string; + content: string; + summary: string; + role: string; + created_at: number; + task_title: string | null; + visibility: string; + group_name: string | null; + owner_name: string | null; + rank: number; +} + + function contentHash(content: string): string { return createHash("sha256").update(content).digest("hex").slice(0, 16); } diff --git a/apps/memos-local-openclaw/tests/hub-server.test.ts b/apps/memos-local-openclaw/tests/hub-server.test.ts index 7b2f8029a..4b72ac4b9 100644 --- a/apps/memos-local-openclaw/tests/hub-server.test.ts +++ b/apps/memos-local-openclaw/tests/hub-server.test.ts @@ -155,3 +155,144 @@ describe("hub server", () => { await expect(server2.start()).rejects.toThrow(); }); }); + +describe("hub search pipeline", () => { + it("should scope search/detail and ignore spoofed sourceUserId", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-scope-")); + dirs.push(dir); + const store = new SqliteStore(path.join(dir, "test.db"), noopLog); + stores.push(store); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18916, teamName: "Scope", teamToken: "scope-secret" } } }, + dataDir: dir, + } as any); + servers.push(server); + await server.start(); + + const authPath = path.join(dir, "hub-auth.json"); + const adminState = JSON.parse(fs.readFileSync(authPath, "utf8")); + const adminToken = adminState.bootstrapAdminToken; + + const joinA = await fetch("http://127.0.0.1:18916/api/v1/hub/join", { method: "POST", headers: { "content-type": "application/json" }, body: JSON.stringify({ username: "alice", deviceName: "A", teamToken: "scope-secret" }) }); + const joinB = await fetch("http://127.0.0.1:18916/api/v1/hub/join", { method: "POST", headers: { "content-type": "application/json" }, body: JSON.stringify({ username: "bob", deviceName: "B", teamToken: "scope-secret" }) }); + const userA = await joinA.json(); + const userB = await joinB.json(); + + const approveA = await fetch("http://127.0.0.1:18916/api/v1/hub/admin/approve-user", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${adminToken}` }, body: JSON.stringify({ userId: userA.userId, username: "alice" }) }); + const approveB = await fetch("http://127.0.0.1:18916/api/v1/hub/admin/approve-user", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${adminToken}` }, body: JSON.stringify({ userId: userB.userId, username: "bob" }) }); + const tokenA = (await approveA.json()).token; + const tokenB = (await approveB.json()).token; + + store.upsertHubGroup({ id: "group-1", name: "Backend", description: "backend", createdAt: 1 }); + store.addHubGroupMember("group-1", userA.userId, 1); + + const shareA = await fetch("http://127.0.0.1:18916/api/v1/hub/tasks/share", { + method: "POST", + headers: { "content-type": "application/json", authorization: `Bearer ${tokenA}` }, + body: JSON.stringify({ + task: { id: "hub-task-a", sourceTaskId: "task-a", sourceUserId: "spoof-user", title: "Group Task", summary: "group summary", groupId: "group-1", visibility: "group", createdAt: 1, updatedAt: 1 }, + chunks: [{ id: "hub-chunk-a", hubTaskId: "hub-task-a", sourceTaskId: "task-a", sourceChunkId: "chunk-a", sourceUserId: "spoof-user", role: "assistant", content: "secret backend nginx config", summary: "secret backend nginx", kind: "paragraph", createdAt: 2 }], + }), + }); + expect(shareA.status).toBe(200); + const storedTask = store.getHubTaskBySource(userA.userId, "task-a"); + expect(storedTask).not.toBeNull(); + + const searchA = await fetch("http://127.0.0.1:18916/api/v1/hub/search", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${tokenA}` }, body: JSON.stringify({ query: "backend nginx", maxResults: 5 }) }); + const searchB = await fetch("http://127.0.0.1:18916/api/v1/hub/search", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${tokenB}` }, body: JSON.stringify({ query: "backend nginx", maxResults: 5 }) }); + const jsonA = await searchA.json(); + const jsonB = await searchB.json(); + expect(jsonA.hits.length).toBeGreaterThan(0); + expect(jsonB.hits).toEqual([]); + + const detailA = await fetch("http://127.0.0.1:18916/api/v1/hub/memory-detail", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${tokenA}` }, body: JSON.stringify({ remoteHitId: jsonA.hits[0].remoteHitId }) }); + expect(detailA.status).toBe(200); + const detailB = await fetch("http://127.0.0.1:18916/api/v1/hub/memory-detail", { method: "POST", headers: { "content-type": "application/json", authorization: `Bearer ${tokenB}` }, body: JSON.stringify({ remoteHitId: jsonA.hits[0].remoteHitId }) }); + expect([403, 404]).toContain(detailB.status); + }); + + it("should accept shared task content and return searchable hits with details", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-search-")); + dirs.push(dir); + const store = new SqliteStore(path.join(dir, "test.db"), noopLog); + stores.push(store); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18915, teamName: "Search", teamToken: "search-secret" } } }, + dataDir: dir, + } as any); + servers.push(server); + await server.start(); + + const authPath = path.join(dir, "hub-auth.json"); + const state = JSON.parse(fs.readFileSync(authPath, "utf8")); + const token = state.bootstrapAdminToken; + + const shareRes = await fetch("http://127.0.0.1:18915/api/v1/hub/tasks/share", { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ + task: { + id: "hub-task-1", + sourceTaskId: "task-1", + sourceUserId: "user-1", + title: "Deploy Nginx", + summary: "deploy nginx summary", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }, + chunks: [ + { + id: "hub-chunk-1", + hubTaskId: "hub-task-1", + sourceTaskId: "task-1", + sourceChunkId: "chunk-1", + sourceUserId: "user-1", + role: "assistant", + content: "Use nginx upstream and proxy_pass to port 3000", + summary: "nginx upstream to port 3000", + kind: "paragraph", + createdAt: 2, + }, + ], + }), + }); + expect(shareRes.status).toBe(200); + + const searchRes = await fetch("http://127.0.0.1:18915/api/v1/hub/search", { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ query: "nginx upstream 3000", scope: "all", maxResults: 5 }), + }); + expect(searchRes.status).toBe(200); + const searchJson = await searchRes.json(); + expect(searchJson.hits.length).toBeGreaterThan(0); + expect(searchJson.hits[0].remoteHitId).toBeTruthy(); + expect(searchJson.hits[0].taskTitle).toBe("Deploy Nginx"); + + const detailRes = await fetch("http://127.0.0.1:18915/api/v1/hub/memory-detail", { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ remoteHitId: searchJson.hits[0].remoteHitId }), + }); + expect(detailRes.status).toBe(200); + const detailJson = await detailRes.json(); + expect(detailJson.content).toContain("proxy_pass"); + }); +}); From 5be73dd4cf5ffbb16d60484bcf0a7e76c03f53de Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:39:09 +0800 Subject: [PATCH 08/85] feat(memos-local): add minimal task share tools --- apps/memos-local-openclaw/index.ts | 131 +++++++++++ apps/memos-local-openclaw/src/client/hub.ts | 71 ++++++ .../tests/integration.test.ts | 208 ++++++++++++++++++ 3 files changed, 410 insertions(+) create mode 100644 apps/memos-local-openclaw/src/client/hub.ts diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 323149df2..191172824 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -18,6 +18,7 @@ import { captureMessages, stripInboundMetadata } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; +import { hubRequestJson, resolveHubClient } from "./src/client/hub"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -556,6 +557,136 @@ const memosLocalPlugin = { { name: "task_summary" }, ); + // ─── Tool: task_share ─── + + api.registerTool( + { + name: "task_share", + label: "Task Share", + description: + "Share one existing local task and its chunks to the configured hub. " + + "Minimal MVP path for validating team task sharing.", + parameters: Type.Object({ + taskId: Type.String({ description: "Local task ID to share" }), + visibility: Type.Optional(Type.String({ description: "Share visibility: 'public' (default) or 'group'" })), + groupId: Type.Optional(Type.String({ description: "Optional group ID when visibility='group'" })), + }), + execute: trackTool("task_share", async (_toolCallId: any, params: any) => { + const { taskId, visibility: rawVisibility, groupId } = params as { + taskId: string; + visibility?: string; + groupId?: string; + }; + + const task = store.getTask(taskId); + if (!task) { + return { + content: [{ type: "text", text: `Task not found: ${taskId}` }], + details: { error: "not_found", taskId }, + }; + } + + const chunks = store.getChunksByTask(taskId); + if (chunks.length === 0) { + return { + content: [{ type: "text", text: `Task ${taskId} has no chunks to share.` }], + details: { error: "no_chunks", taskId }, + }; + } + + const visibility = rawVisibility === "group" ? "group" : "public"; + const hubClient = await resolveHubClient(store, ctx); + const { v4: uuidv4 } = require("uuid"); + const hubTaskId = uuidv4(); + + const response = await hubRequestJson(hubClient.hubUrl, hubClient.userToken, "/api/v1/hub/tasks/share", { + method: "POST", + body: JSON.stringify({ + task: { + id: hubTaskId, + sourceTaskId: task.id, + sourceUserId: hubClient.userId, + title: task.title, + summary: task.summary, + groupId: visibility === "group" ? (groupId ?? null) : null, + visibility, + createdAt: task.startedAt, + updatedAt: task.updatedAt, + }, + chunks: chunks.map((chunk) => ({ + id: uuidv4(), + hubTaskId, + sourceTaskId: task.id, + sourceChunkId: chunk.id, + sourceUserId: hubClient.userId, + role: chunk.role, + content: chunk.content, + summary: chunk.summary, + kind: chunk.kind, + createdAt: chunk.createdAt, + })), + }), + }) as any; + + return { + content: [{ type: "text", text: `Shared task "${task.title}" with ${chunks.length} chunks to the hub.` }], + details: { + shared: true, + taskId: task.id, + visibility, + chunkCount: chunks.length, + hubUrl: hubClient.hubUrl, + response, + }, + }; + }), + }, + { name: "task_share" }, + ); + + // ─── Tool: task_unshare ─── + + api.registerTool( + { + name: "task_unshare", + label: "Task Unshare", + description: "Remove one previously shared task from the configured hub.", + parameters: Type.Object({ + taskId: Type.String({ description: "Local task ID to unshare" }), + }), + execute: trackTool("task_unshare", async (_toolCallId: any, params: any) => { + const { taskId } = params as { taskId: string }; + + const task = store.getTask(taskId); + if (!task) { + return { + content: [{ type: "text", text: `Task not found: ${taskId}` }], + details: { error: "not_found", taskId }, + }; + } + + const hubClient = await resolveHubClient(store, ctx); + await hubRequestJson(hubClient.hubUrl, hubClient.userToken, "/api/v1/hub/tasks/unshare", { + method: "POST", + body: JSON.stringify({ + sourceUserId: hubClient.userId, + sourceTaskId: task.id, + }), + }); + + return { + content: [{ type: "text", text: `Unshared task "${task.title}" from the hub.` }], + details: { + unshared: true, + taskId: task.id, + hubUrl: hubClient.hubUrl, + }, + }; + }), + }, + { name: "task_unshare" }, + ); + // ─── Tool: skill_get ─── api.registerTool( diff --git a/apps/memos-local-openclaw/src/client/hub.ts b/apps/memos-local-openclaw/src/client/hub.ts new file mode 100644 index 000000000..1157ccdc7 --- /dev/null +++ b/apps/memos-local-openclaw/src/client/hub.ts @@ -0,0 +1,71 @@ +import type { PluginContext } from "../types"; +import type { SqliteStore } from "../storage/sqlite"; + +export interface ResolvedHubClient { + hubUrl: string; + userToken: string; + userId: string; + username: string; + role: string; +} + +export async function resolveHubClient(store: SqliteStore, ctx: PluginContext): Promise { + const persisted = store.getClientHubConnection() as any; + if (persisted?.hubUrl && persisted?.userToken) { + return { + hubUrl: normalizeHubUrl(String(persisted.hubUrl)), + userToken: String(persisted.userToken), + userId: String(persisted.userId), + username: String(persisted.username ?? ""), + role: String(persisted.role ?? "member"), + }; + } + + const hubAddress = ctx.config.sharing?.client?.hubAddress ?? ""; + const userToken = ctx.config.sharing?.client?.userToken ?? ""; + if (!hubAddress || !userToken) { + throw new Error("hub client connection is not configured"); + } + + const hubUrl = normalizeHubUrl(hubAddress); + const me = await hubRequestJson(hubUrl, userToken, "/api/v1/hub/me", { method: "GET" }) as any; + + return { + hubUrl, + userToken, + userId: String(me.id), + username: String(me.username ?? ""), + role: String(me.role ?? "member"), + }; +} + +export async function hubRequestJson( + hubUrl: string, + userToken: string, + route: string, + init: RequestInit = {}, +): Promise { + const res = await fetch(`${normalizeHubUrl(hubUrl)}${route}`, { + ...init, + headers: { + authorization: `Bearer ${userToken}`, + ...(init.body ? { "content-type": "application/json" } : {}), + ...(init.headers ?? {}), + }, + }); + + if (!res.ok) { + const body = await res.text(); + throw new Error(`hub request failed (${res.status}): ${body || res.statusText}`); + } + + if (res.status === 204) return null; + return res.json(); +} + +export function normalizeHubUrl(hubAddress: string): string { + const trimmed = hubAddress.trim().replace(/\/+$/, ""); + if (!trimmed) return ""; + if (/^https?:\/\//i.test(trimmed)) return trimmed; + return `http://${trimmed}`; +} diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index d70ef9e27..2b86906a4 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -2,8 +2,68 @@ import { describe, it, expect, beforeAll, afterAll } from "vitest"; import * as fs from "fs"; import * as path from "path"; import * as os from "os"; +import memosLocalPlugin from "../index"; import { initPlugin, type MemosLocalPlugin } from "../src/index"; import { buildContext, resolveConfig } from "../src/config"; +import { HubServer } from "../src/hub/server"; +import { SqliteStore } from "../src/storage/sqlite"; + +const noopLog = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +function makePluginApi(stateDir: string, pluginConfig: Record = {}) { + const tools = new Map(); + let service: any; + + const api = { + pluginConfig, + resolvePath(input: string) { + return input === "~/.openclaw" ? stateDir : input; + }, + logger: noopLog, + registerTool(def: any) { + tools.set(def.name, def); + }, + registerService(def: any) { + service = def; + }, + on() {}, + } as any; + + memosLocalPlugin.register(api); + return { tools, service }; +} + +function makeTaskChunk(overrides: Record = {}) { + const now = Date.now(); + return { + id: "chunk-local-1", + sessionKey: "session-local-share", + turnId: "turn-local-share", + seq: 0, + role: "user", + content: "Share the Docker rollout checklist with the hub.", + kind: "paragraph", + summary: "Docker rollout checklist", + embedding: null, + taskId: "task-local-1", + skillId: null, + owner: "agent:main", + dedupStatus: "active", + dedupTarget: null, + dedupReason: null, + mergeCount: 0, + lastHitAt: null, + mergeHistory: "[]", + createdAt: now, + updatedAt: now, + ...overrides, + }; +} let plugin: MemosLocalPlugin; let tmpDir: string; @@ -297,6 +357,154 @@ describe("Integration: owner isolation for initPlugin tools", () => { }); }); +describe("Integration: task sharing MVP", () => { + async function setupTaskSharingHarness(opts: { + usePersistedConnection?: boolean; + fallbackHubAddress?: string; + fallbackUserToken?: string; + } = {}) { + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-share-client-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-share-hub-")); + const port = 19100 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Task Share Test", + teamToken: "task-share-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + const userId = authState.bootstrapAdminUserId as string; + + const { tools, service } = makePluginApi(clientDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: opts.fallbackHubAddress ?? `127.0.0.1:${port}`, + userToken: opts.fallbackUserToken ?? userToken, + }, + }, + telemetry: { enabled: false }, + }); + + const clientStore = new SqliteStore(path.join(clientDir, "memos-local", "memos.db"), noopLog as any); + clientStore.insertTask({ + id: "task-local-1", + sessionKey: "session-local-share", + title: "Docker rollout checklist", + summary: "Steps to share the Docker rollout checklist with the team hub", + status: "completed", + owner: "agent:main", + startedAt: 100, + endedAt: 200, + updatedAt: 200, + }); + clientStore.insertChunk(makeTaskChunk()); + clientStore.insertChunk(makeTaskChunk({ + id: "chunk-local-2", + seq: 1, + role: "assistant", + content: "Verify port 8443 and POSTGRES_PASSWORD before deploy.", + summary: "Verify port 8443 and POSTGRES_PASSWORD", + })); + + if (opts.usePersistedConnection) { + clientStore.setClientHubConnection({ + hubUrl: `http://127.0.0.1:${port}`, + userId, + username: "admin", + userToken, + role: "admin", + connectedAt: Date.now(), + }); + } + + return { + clientDir, + hubDir, + port, + userId, + userToken, + tools, + service, + clientStore, + hubStore, + hubServer, + }; + } + + async function teardownTaskSharingHarness(harness: Awaited>) { + await harness.service?.stop?.(); + harness.clientStore.close(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.clientDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); + } + + it("task_share and task_unshare should push and remove a local task via config fallback", async () => { + const harness = await setupTaskSharingHarness(); + + try { + const shareTool = harness.tools.get("task_share"); + const unshareTool = harness.tools.get("task_unshare"); + expect(shareTool).toBeDefined(); + expect(unshareTool).toBeDefined(); + + const shareResult = await shareTool.execute("call-share", { taskId: "task-local-1", visibility: "public" }, { agentId: "main" }); + expect(shareResult.details.shared).toBe(true); + expect(shareResult.details.chunkCount).toBe(2); + + const sharedTask = harness.hubStore.getHubTaskBySource(harness.userId, "task-local-1"); + expect(sharedTask).not.toBeNull(); + expect(sharedTask!.title).toBe("Docker rollout checklist"); + + const sharedChunk = harness.hubStore.getHubChunkBySource(harness.userId, "chunk-local-1"); + expect(sharedChunk).not.toBeNull(); + expect(sharedChunk!.summary).toBe("Docker rollout checklist"); + + const unshareResult = await unshareTool.execute("call-unshare", { taskId: "task-local-1" }, { agentId: "main" }); + expect(unshareResult.details.unshared).toBe(true); + expect(harness.hubStore.getHubTaskBySource(harness.userId, "task-local-1")).toBeNull(); + expect(harness.hubStore.getHubChunkBySource(harness.userId, "chunk-local-1")).toBeNull(); + } finally { + await teardownTaskSharingHarness(harness); + } + }); + + it("task_share should prefer persisted hub connection over fallback config", async () => { + const harness = await setupTaskSharingHarness({ + usePersistedConnection: true, + fallbackHubAddress: "127.0.0.1:9", + fallbackUserToken: "bad-token", + }); + + try { + const shareTool = harness.tools.get("task_share"); + const shareResult = await shareTool.execute("call-share", { taskId: "task-local-1", visibility: "public" }, { agentId: "main" }); + + expect(shareResult.details.shared).toBe(true); + expect(harness.hubStore.getHubTaskBySource(harness.userId, "task-local-1")).not.toBeNull(); + } finally { + await teardownTaskSharingHarness(harness); + } + }); +}); + describe("Integration: evidence anti-writeback", () => { it("should not store evidence wrapper blocks in memory", async () => { plugin.onConversationTurn([ From 0140adca32854cc564d3979708b0f5c7b9e92c43 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:42:55 +0800 Subject: [PATCH 09/85] feat(memos-local): add client hub connector --- .../src/client/connector.ts | 62 +++++++++++++++ .../tests/client-connector.test.ts | 78 +++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 apps/memos-local-openclaw/src/client/connector.ts create mode 100644 apps/memos-local-openclaw/tests/client-connector.test.ts diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts new file mode 100644 index 000000000..18ab493e5 --- /dev/null +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -0,0 +1,62 @@ +import type { MemosLocalConfig } from "../types"; +import type { UserRole, UserStatus } from "../sharing/types"; +import type { SqliteStore } from "../storage/sqlite"; +import { hubRequestJson, normalizeHubUrl } from "./hub"; + +export interface HubSessionInfo { + hubUrl: string; + userId: string; + username: string; + userToken: string; + role: UserRole; + connectedAt: number; +} + +export interface HubStatusInfo { + connected: boolean; + user: null | { id: string; username: string; role: UserRole; status: UserStatus | string }; +} + +export async function connectToHub(store: SqliteStore, config: MemosLocalConfig): Promise { + const hubAddress = config.sharing?.client?.hubAddress ?? ""; + const userToken = config.sharing?.client?.userToken ?? ""; + if (!hubAddress || !userToken) { + throw new Error("hub client connection is not configured"); + } + + const hubUrl = normalizeHubUrl(hubAddress); + const me = await hubRequestJson(hubUrl, userToken, "/api/v1/hub/me", { method: "GET" }) as any; + store.setClientHubConnection({ + hubUrl, + userId: String(me.id), + username: String(me.username ?? ""), + userToken, + role: String(me.role ?? "member") as UserRole, + connectedAt: Date.now(), + }); + return store.getClientHubConnection()!; +} + +export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig): Promise { + const conn = store.getClientHubConnection(); + const hubAddress = conn?.hubUrl || config.sharing?.client?.hubAddress || ""; + const userToken = conn?.userToken || config.sharing?.client?.userToken || ""; + if (!hubAddress || !userToken) { + return { connected: false, user: null }; + } + + try { + const me = await hubRequestJson(normalizeHubUrl(hubAddress), userToken, "/api/v1/hub/me", { method: "GET" }) as any; + return { + connected: true, + user: { + id: String(me.id), + username: String(me.username ?? ""), + role: String(me.role ?? "member") as UserRole, + status: String(me.status ?? "active"), + }, + }; + } catch { + return { connected: false, user: null }; + } +} diff --git a/apps/memos-local-openclaw/tests/client-connector.test.ts b/apps/memos-local-openclaw/tests/client-connector.test.ts new file mode 100644 index 000000000..05a2e9095 --- /dev/null +++ b/apps/memos-local-openclaw/tests/client-connector.test.ts @@ -0,0 +1,78 @@ +import { afterEach, describe, expect, it } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { HubServer } from "../src/hub/server"; +import { connectToHub, getHubStatus } from "../src/client/connector"; + +const noopLog = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +const servers: HubServer[] = []; +const stores: SqliteStore[] = []; +const dirs: string[] = []; + +afterEach(async () => { + for (const server of servers.splice(0)) await server.stop(); + for (const store of stores.splice(0)) store.close(); + for (const dir of dirs.splice(0)) fs.rmSync(dir, { recursive: true, force: true }); +}); + +describe("client connector", () => { + it("should connect to hub and persist the resolved user session", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-client-connector-")); + dirs.push(dir); + const store = new SqliteStore(path.join(dir, "test.db"), noopLog); + stores.push(store); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18917, teamName: "Connector", teamToken: "connector-secret" } } }, + dataDir: dir, + } as any); + servers.push(server); + await server.start(); + + const authState = JSON.parse(fs.readFileSync(path.join(dir, "hub-auth.json"), "utf8")); + const token = authState.bootstrapAdminToken; + + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-client-state-")); + dirs.push(clientDir); + const clientStore = new SqliteStore(path.join(clientDir, "client.db"), noopLog); + stores.push(clientStore); + + const session = await connectToHub(clientStore, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: "127.0.0.1:18917", + userToken: token, + }, + }, + } as any); + + expect(session.userId).toBeTruthy(); + expect(session.role).toBe("admin"); + expect(clientStore.getClientHubConnection()).not.toBeNull(); + + const status = await getHubStatus(clientStore, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: "127.0.0.1:18917", + userToken: token, + }, + }, + } as any); + expect(status.connected).toBe(true); + expect(status.user?.role).toBe("admin"); + }); +}); From 46d8e31153dc6f054a4e362f517871317d7a4de1 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 15:47:34 +0800 Subject: [PATCH 10/85] feat(memos-local): add local plus hub memory search --- apps/memos-local-openclaw/src/client/hub.ts | 23 +- .../src/tools/memory-search.ts | 62 ++++- .../tests/integration.test.ts | 236 ++++++++++++++++++ 3 files changed, 312 insertions(+), 9 deletions(-) diff --git a/apps/memos-local-openclaw/src/client/hub.ts b/apps/memos-local-openclaw/src/client/hub.ts index 1157ccdc7..5a2f2b1ea 100644 --- a/apps/memos-local-openclaw/src/client/hub.ts +++ b/apps/memos-local-openclaw/src/client/hub.ts @@ -1,5 +1,6 @@ import type { PluginContext } from "../types"; import type { SqliteStore } from "../storage/sqlite"; +import type { HubScope, HubSearchResult } from "../sharing/types"; export interface ResolvedHubClient { hubUrl: string; @@ -9,7 +10,7 @@ export interface ResolvedHubClient { role: string; } -export async function resolveHubClient(store: SqliteStore, ctx: PluginContext): Promise { +export async function resolveHubClient(store: SqliteStore, ctx: PluginContext, overrides?: { hubAddress?: string; userToken?: string }): Promise { const persisted = store.getClientHubConnection() as any; if (persisted?.hubUrl && persisted?.userToken) { return { @@ -21,8 +22,8 @@ export async function resolveHubClient(store: SqliteStore, ctx: PluginContext): }; } - const hubAddress = ctx.config.sharing?.client?.hubAddress ?? ""; - const userToken = ctx.config.sharing?.client?.userToken ?? ""; + const hubAddress = overrides?.hubAddress ?? ctx.config.sharing?.client?.hubAddress ?? ""; + const userToken = overrides?.userToken ?? ctx.config.sharing?.client?.userToken ?? ""; if (!hubAddress || !userToken) { throw new Error("hub client connection is not configured"); } @@ -39,6 +40,22 @@ export async function resolveHubClient(store: SqliteStore, ctx: PluginContext): }; } +export async function hubSearchMemories( + store: SqliteStore, + ctx: PluginContext, + input: { query: string; maxResults?: number; scope?: HubScope; hubAddress?: string; userToken?: string }, +): Promise { + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + return hubRequestJson(client.hubUrl, client.userToken, "/api/v1/hub/search", { + method: "POST", + body: JSON.stringify({ + query: input.query, + maxResults: input.maxResults, + scope: input.scope, + }), + }) as Promise; +} + export async function hubRequestJson( hubUrl: string, userToken: string, diff --git a/apps/memos-local-openclaw/src/tools/memory-search.ts b/apps/memos-local-openclaw/src/tools/memory-search.ts index ede0291ba..3a491ccaa 100644 --- a/apps/memos-local-openclaw/src/tools/memory-search.ts +++ b/apps/memos-local-openclaw/src/tools/memory-search.ts @@ -1,3 +1,5 @@ +import { hubSearchMemories } from "../client/hub"; +import type { HubScope, HubSearchResult } from "../sharing/types"; import type { RecallEngine } from "../recall/engine"; import type { ToolDefinition } from "../types"; @@ -6,6 +8,21 @@ function resolveOwnerFilter(owner: unknown): string[] { return resolvedOwner === "public" ? ["public"] : [resolvedOwner, "public"]; } +function resolveScope(scope: unknown): HubScope { + return scope === "group" || scope === "all" ? scope : "local"; +} + +function emptyHubResult(scope: HubScope): HubSearchResult { + return { + hits: [], + meta: { + totalCandidates: 0, + searchedGroups: [], + includedPublic: scope === "all", + }, + }; +} + export function createMemorySearchTool(engine: RecallEngine): ToolDefinition { return { name: "memory_search", @@ -27,16 +44,49 @@ export function createMemorySearchTool(engine: RecallEngine): ToolDefinition { type: "number", description: "Minimum relevance score threshold 0-1 (default 0.45, floor 0.35).", }, + scope: { + type: "string", + description: "Search scope: local (default), group, or all. Group/all return split local and hub sections.", + }, + hubAddress: { + type: "string", + description: "Optional hub address override for client/hub integration tests or manual routing.", + }, + userToken: { + type: "string", + description: "Optional hub bearer token override for client/hub integration tests.", + }, }, }, handler: async (input) => { - const result = await engine.search({ - query: (input.query as string) ?? "", - maxResults: input.maxResults as number | undefined, - minScore: input.minScore as number | undefined, - ownerFilter: resolveOwnerFilter(input.owner), + const query = (input.query as string) ?? ""; + const maxResults = input.maxResults as number | undefined; + const minScore = input.minScore as number | undefined; + const ownerFilter = resolveOwnerFilter(input.owner); + const scope = resolveScope(input.scope); + + const localSearch = engine.search({ + query, + maxResults, + minScore, + ownerFilter, }); - return result; + + if (scope === "local") { + return localSearch; + } + + const store = (engine as any).store; + const ctx = (engine as any).ctx; + const [local, hub] = await Promise.all([ + localSearch, + hubSearchMemories(store, ctx, { query, maxResults, scope, hubAddress: input.hubAddress as string | undefined, userToken: input.userToken as string | undefined }).catch((err) => { + ctx?.log?.warn?.(`Hub search failed, using local-only results: ${err}`); + return emptyHubResult(scope); + }), + ]); + + return { local, hub }; }, }; } diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 2b86906a4..b41b3a67c 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -65,6 +65,134 @@ function makeTaskChunk(overrides: Record = {}) { }; } +async function setupFederatedMemorySearchHarness() { + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-federated-client-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-federated-hub-")); + const port = 19200 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Federated Search Test", + teamToken: "federated-search-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + const userId = authState.bootstrapAdminUserId as string; + + hubStore.upsertHubGroup({ + id: "group-rollout", + name: "Rollout", + description: "Rollout group", + createdAt: 1, + }); + hubStore.addHubGroupMember("group-rollout", userId, 1); + + hubStore.upsertHubTask({ + id: "hub-task-group-rollout", + sourceTaskId: "group-rollout-task", + sourceUserId: userId, + title: "Group rollout checklist", + summary: "Group-only rollout checklist for the release train", + groupId: "group-rollout", + visibility: "group", + createdAt: 1, + updatedAt: 1, + }); + hubStore.upsertHubChunk({ + id: "hub-chunk-group-rollout", + hubTaskId: "hub-task-group-rollout", + sourceTaskId: "group-rollout-task", + sourceChunkId: "group-rollout-chunk", + sourceUserId: userId, + role: "assistant", + content: "Shared rollout checklist for the group hub: verify canary deploy, smoke tests, and rollback owner.", + summary: "Shared rollout checklist for the group hub", + kind: "paragraph", + createdAt: 2, + }); + + hubStore.upsertHubTask({ + id: "hub-task-public-rollout", + sourceTaskId: "public-rollout-task", + sourceUserId: userId, + title: "Public rollout checklist", + summary: "Public rollout checklist for all clients", + groupId: null, + visibility: "public", + createdAt: 3, + updatedAt: 3, + }); + hubStore.upsertHubChunk({ + id: "hub-chunk-public-rollout", + hubTaskId: "hub-task-public-rollout", + sourceTaskId: "public-rollout-task", + sourceChunkId: "public-rollout-chunk", + sourceUserId: userId, + role: "assistant", + content: "Public shared rollout checklist: announce deploy window and verify dashboards after release.", + summary: "Public shared rollout checklist", + kind: "paragraph", + createdAt: 4, + }); + + const clientPlugin = initPlugin({ + stateDir: clientDir, + config: { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }, + }); + + clientPlugin.onConversationTurn([ + { + role: "user", + content: "Keep a local rollout checklist for the client deploy: verify migrations, confirm local smoke tests, and post status.", + }, + { + role: "assistant", + content: "Local rollout checklist captured with client-only smoke test details.", + }, + ], "session-federated-rollout"); + + await clientPlugin.flush(); + + return { + clientDir, + hubDir, + hubStore, + hubServer, + clientPlugin, + }; +} + +async function teardownFederatedMemorySearchHarness(harness: Awaited>) { + await harness.clientPlugin.shutdown(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.clientDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); +} + let plugin: MemosLocalPlugin; let tmpDir: string; @@ -196,6 +324,79 @@ describe("Integration: v4 types and config foundation", () => { }); }); +describe("Integration: memory_search hub scope", () => { + it("should return split local and hub results for scope=group", async () => { + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-search-hub-")); + const port = 19200 + Math.floor(Math.random() * 500); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { sharing: { enabled: true, role: "hub", hub: { port, teamName: "Search Hub", teamToken: "search-hub-secret" } } }, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userId = authState.bootstrapAdminUserId as string; + const userToken = authState.bootstrapAdminToken as string; + + const shareRes = await fetch(`http://127.0.0.1:${port}/api/v1/hub/tasks/share`, { + method: "POST", + headers: { + authorization: `Bearer ${userToken}`, + "content-type": "application/json", + }, + body: JSON.stringify({ + task: { + id: "hub-task-search-1", + sourceTaskId: "task-hub-1", + sourceUserId: userId, + title: "Shared Nginx Notes", + summary: "nginx notes", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }, + chunks: [ + { + id: "hub-chunk-search-1", + hubTaskId: "hub-task-search-1", + sourceTaskId: "task-hub-1", + sourceChunkId: "chunk-hub-1", + sourceUserId: userId, + role: "assistant", + content: "Shared nginx upstream config with proxy_pass to 3000.", + summary: "shared nginx upstream config", + kind: "paragraph", + createdAt: 2, + }, + ], + }), + }); + expect(shareRes.status).toBe(200); + + const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; + const result = (await searchTool.handler({ + query: "nginx upstream config", + scope: "group", + hubAddress: `127.0.0.1:${port}`, + userToken, + })) as any; + + expect(result.local).toBeDefined(); + expect(result.hub).toBeDefined(); + expect(Array.isArray(result.hub.hits)).toBe(true); + expect(result.hub.hits.length).toBeGreaterThan(0); + expect(result.hub.hits[0].taskTitle).toBe("Shared Nginx Notes"); + + await hubServer.stop(); + hubStore.close(); + fs.rmSync(hubDir, { recursive: true, force: true }); + }); +}); + describe("Integration: memory_search", () => { it("should find docker deployment details", async () => { const searchTool = plugin.tools.find((t) => t.name === "memory_search")!; @@ -247,6 +448,41 @@ describe("Integration: memory_search", () => { expect(result2.meta.note).toBeDefined(); expect(result2.meta.note).toContain("already"); }); + + it("should return local and hub sections for scope=group", async () => { + const harness = await setupFederatedMemorySearchHarness(); + + try { + const searchTool = harness.clientPlugin.tools.find((t) => t.name === "memory_search")!; + const result = (await searchTool.handler({ query: "rollout checklist", scope: "group" })) as any; + + expect(result.local.hits.length).toBeGreaterThan(0); + expect(result.local.meta.usedMaxResults).toBe(6); + expect(result.hub.hits.length).toBeGreaterThan(0); + expect(result.hub.hits.some((hit: any) => hit.visibility === "group")).toBe(true); + expect(result.hub.hits[0].remoteHitId).toBeTruthy(); + expect(result.hub.meta.totalCandidates).toBeGreaterThan(0); + } finally { + await teardownFederatedMemorySearchHarness(harness); + } + }); + + it("should return local and hub sections for scope=all", async () => { + const harness = await setupFederatedMemorySearchHarness(); + + try { + const searchTool = harness.clientPlugin.tools.find((t) => t.name === "memory_search")!; + const result = (await searchTool.handler({ query: "shared rollout checklist", scope: "all", maxResults: 5 })) as any; + + expect(result.local.hits.length).toBeGreaterThan(0); + expect(result.local.meta.usedMaxResults).toBe(5); + expect(result.hub.hits.length).toBeGreaterThan(0); + expect(result.hub.hits.some((hit: any) => hit.visibility === "public")).toBe(true); + expect(result.hub.meta.totalCandidates).toBeGreaterThan(0); + } finally { + await teardownFederatedMemorySearchHarness(harness); + } + }); }); describe("Integration: memory_timeline", () => { From b1e9c8b0530c8a4c5f7449a92b3365c3669299ed Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:01:43 +0800 Subject: [PATCH 11/85] chore(memos-local): stabilize v4 fast-track workspace --- .../src/tools/memory-search.ts | 4 +- .../tests/integration.test.ts | 4 +- .../2026-03-08-v4-fast-track-completion.md | 237 ++++++++++++++++++ 3 files changed, 241 insertions(+), 4 deletions(-) create mode 100644 docs/plans/2026-03-08-v4-fast-track-completion.md diff --git a/apps/memos-local-openclaw/src/tools/memory-search.ts b/apps/memos-local-openclaw/src/tools/memory-search.ts index 3a491ccaa..de1201bdc 100644 --- a/apps/memos-local-openclaw/src/tools/memory-search.ts +++ b/apps/memos-local-openclaw/src/tools/memory-search.ts @@ -50,11 +50,11 @@ export function createMemorySearchTool(engine: RecallEngine): ToolDefinition { }, hubAddress: { type: "string", - description: "Optional hub address override for client/hub integration tests or manual routing.", + description: "Optional hub address override for group/all search, integration tests, or manual routing.", }, userToken: { type: "string", - description: "Optional hub bearer token override for client/hub integration tests.", + description: "Optional hub bearer token override for group/all search or integration tests.", }, }, }, diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index b41b3a67c..4e16126b6 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -166,11 +166,11 @@ async function setupFederatedMemorySearchHarness() { clientPlugin.onConversationTurn([ { role: "user", - content: "Keep a local rollout checklist for the client deploy: verify migrations, confirm local smoke tests, and post status.", + content: "Keep a local shared rollout checklist for the client deploy: verify migrations, confirm local smoke tests, and post status.", }, { role: "assistant", - content: "Local rollout checklist captured with client-only smoke test details.", + content: "Local shared rollout checklist captured with client-only smoke test details.", }, ], "session-federated-rollout"); diff --git a/docs/plans/2026-03-08-v4-fast-track-completion.md b/docs/plans/2026-03-08-v4-fast-track-completion.md new file mode 100644 index 000000000..f05f0258e --- /dev/null +++ b/docs/plans/2026-03-08-v4-fast-track-completion.md @@ -0,0 +1,237 @@ +# V4 Hub Sharing Fast-Track Completion Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Ship a complete first usable version of v4 hub-spoke sharing as fast as possible, while preserving the already-working MVP memory-sharing path and finishing the remaining product-completeness items. + +**Architecture:** Keep the current mainline focused on the Hub memory-sharing critical path, then close remaining gaps in descending product value: detail retrieval, team info/onboarding polish, skill sync, Viewer UI, and final integration tests. Use parallel work whenever tasks have disjoint write scopes and do not block the mainline. + +**Tech Stack:** TypeScript, `better-sqlite3`, Hub HTTP server, local `RecallEngine`, Viewer server/UI, Vitest, OpenClaw plugin lifecycle. + +--- + +## Current State + +### Already completed on `codex/v4-hub-sharing` +- `T1` sharing config/types foundation +- `T2` hub/client schema + store helpers +- `T3` hub service skeleton + auth bootstrap +- `T5` minimal hub search + task share endpoints +- `T6` minimal client connector +- `T7` minimal local + hub memory search +- `T8` minimal `task_share` / `task_unshare` + +### Completed on side branch/worktree +- `T4` openclaw fallback guards on `codex/t4-openclaw-fallback` + +### Immediate cleanup items before next feature work +- Main branch currently has uncommitted changes in: + - `apps/memos-local-openclaw/src/tools/memory-search.ts` + - `apps/memos-local-openclaw/tests/integration.test.ts` +- There is an untracked stray path to clean: + - `apps/memos-local-openclaw/~/` + +These should be resolved before starting the next implementation batch. + +## Remaining Work (All still required) + +### Product-critical gaps +- Add `network_memory_detail` tool wired to `/api/v1/hub/memory-detail` +- Add `network_team_info` tool wired to `/api/v1/hub/me` (+ group list if available) +- Finish `T10` skill publish/pull via Hub +- Merge `T4` branch after quick sanity verification + +### Product-completeness gaps +- Minimal but usable Viewer/UI for hub/client state (`T12` MVP slice) +- Full tool registration completeness (`T11` finish) +- Final integrated tests + README update (`T13`) + +## Fastest Completion Strategy + +```text +Phase A — Stabilize current core (serial, short) + A0 Clean current working tree + A1 Land or discard leftover uncommitted T7 edits + A2 Merge T4 worktree branch into mainline + +Phase B — Finish missing user-facing MVP links (mainline + parallel) + B1 Mainline: network_memory_detail + network_team_info + B2 Parallel: T10 skill sync via Hub + +Phase C — Product completeness (parallel) + C1 Mainline: T11 tool registration completion + C2 Parallel: T12 minimal usable Viewer UX + +Phase D — Hardening (serial) + D1 T13 end-to-end smoke + focused integration suite + D2 README / setup docs / release sanity check +``` + +## Parallelization Rules + +### Must stay on the mainline critical path +- `A0` cleanup +- `A1` settle current dirty changes +- `B1` `network_memory_detail` + `network_team_info` +- `C1` final tool registration +- `D1` final smoke/integration verification + +### Should run in parallel when possible +- `T4` merge prep / sanity verification +- `T10` skill publish/pull via Hub +- `T12` Viewer/UI minimal admin & client state screens + +### Why these are parallel-safe +- `T10` mostly touches skill/Hub endpoints + client skill flows +- `T12` mostly touches `src/viewer/server.ts`, `src/viewer/html.ts` +- `B1` mostly touches tool wiring and client helper paths + +## Recommended Execution Order + +```text +Now +├─ A0 Clean worktree state +├─ A1 Decide whether dirty T7 edits are keep/amend/discard +├─ A2 Merge `codex/t4-openclaw-fallback` +│ +├─ B1 Mainline: add `network_memory_detail` +│ └─ then add `network_team_info` +│ +├─ B2 Parallel: T10 skill publish/pull via Hub +│ +├─ C1 Mainline: finalize tool registration +│ +├─ C2 Parallel: minimal Viewer/client/hub UI +│ +└─ D1/D2 Final test + docs + release pass +``` + +## Exact Next Task Recommendations + +### Task A0: Clean current branch state +**Why first:** Prevent accidental overwrite/confusion before more parallel work. + +**Actions:** +- Inspect current diffs in: + - `apps/memos-local-openclaw/src/tools/memory-search.ts` + - `apps/memos-local-openclaw/tests/integration.test.ts` +- Decide whether they belong to `T7` and should be committed/amended, or discarded +- Remove stray path `apps/memos-local-openclaw/~/` + +### Task A2: Merge T4 worktree branch +**Why now:** It is already implemented and does not block the current core, but the branch divergence should not grow. + +**Actions:** +- Compare `codex/t4-openclaw-fallback` against mainline +- Merge or cherry-pick: + - `69b96c0 feat(memos-local): add openclaw fallback guards` + - later viewer-gating follow-up if present in worktree +- Run targeted fallback tests + build on mainline + +### Task B1.1: Add `network_memory_detail` +**Why next:** Search already returns `remoteHitId`, so this closes the user-visible memory flow. + +**Files:** +- Modify: `apps/memos-local-openclaw/index.ts` +- Modify: `apps/memos-local-openclaw/src/client/hub.ts` +- Modify: `apps/memos-local-openclaw/tests/integration.test.ts` + +**MVP behavior:** +- Input: `remoteHitId`, optional `hubAddress`, optional `userToken` +- Resolve hub client using existing connector/helper fallback chain +- Call `/api/v1/hub/memory-detail` +- Return content/summary/source + +### Task B1.2: Add `network_team_info` +**Why immediately after:** Cheap, high-value visibility into connection/user/group context. + +**Files:** +- Modify: `apps/memos-local-openclaw/index.ts` +- Modify: `apps/memos-local-openclaw/src/client/connector.ts` +- Modify: `apps/memos-local-openclaw/tests/client-connector.test.ts` + +**MVP behavior:** +- Return connected/disconnected state +- Return current user identity/role +- Return groups if available from `/me` or local persisted state + +### Task B2: Finish T10 Hub skill sync +**Why parallel:** Important for completeness, but does not block memory-sharing MVP. + +**Files:** +- Modify: `apps/memos-local-openclaw/index.ts` +- Modify: `apps/memos-local-openclaw/src/skill/installer.ts` +- Create/modify: `apps/memos-local-openclaw/src/client/skill-sync.ts` +- Add tests in `apps/memos-local-openclaw/tests/integration.test.ts` + +**Minimum completion criteria:** +- `skill_publish(scope=group|public)` to Hub +- `network_skill_pull` from Hub +- bundle validation stays enforced + +### Task C1: Finish T11 tool registration +**Why after B1/T10:** Register only once core tools and skill tools exist. + +**Required tools to expose by end:** +- `task_share` +- `task_unshare` +- `network_memory_detail` +- `network_team_info` +- `network_skill_pull` + +### Task C2: Minimal T12 Viewer UI +**Why not earlier:** UI should follow working APIs. + +**MVP UI only:** +- Client: show Hub connected/disconnected + current user/role +- Search: local/group/all selector if not already present +- Hub/Admin: pending users list + approve action +- Shared result section rendering for hub hits + +### Task D1: Final T13 test pass +**Do not skip.** + +**Minimum smoke matrix:** +- Hub start/stop +- Join + approve +- Connect client +- `task_share` +- `memory_search(scope=group)` +- `network_memory_detail` +- `task_unshare` +- Hub-down fallback to local-only search +- skill publish/pull smoke if T10 lands + +## Fastest Team Split + +### Mainline owner +- A0/A1 cleanup +- A2 T4 merge +- B1 `network_memory_detail`, `network_team_info` +- C1 tool registration +- D1/D2 final smoke + docs + +### Parallel lane 1 +- T10 skill sync via Hub + +### Parallel lane 2 +- T12 minimal Viewer UI + +## “Tomorrow-ready” Definition + +A release is acceptable when all are true: +- Hub starts with a valid team token +- Admin bootstrap and approval flow work +- Client can connect and persist session +- Local task can be shared to Hub and searched back +- Hub hit can be opened with `network_memory_detail` +- Task can be unshared +- Local-only behavior still works if Hub is unavailable +- No obvious auth bypass in the happy-path MVP routes + +## Non-blocking defects to defer if time is short +- Advanced Viewer polish +- Rich group management UX +- Deep fallback/host integration beyond safe guards +- Skill version-management niceties beyond publish/pull happy path +- Exhaustive edge-case test coverage outside the smoke matrix From 72ceea08d7e978de9f19b2690801c9557e1d9508 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:12:32 +0800 Subject: [PATCH 12/85] feat(memos-local): add hub memory detail tool --- apps/memos-local-openclaw/index.ts | 36 ++++- apps/memos-local-openclaw/src/client/hub.ts | 26 +++- apps/memos-local-openclaw/src/index.ts | 3 +- .../memos-local-openclaw/src/sharing/types.ts | 10 ++ apps/memos-local-openclaw/src/tools/index.ts | 1 + .../src/tools/network-memory-detail.ts | 34 +++++ .../tests/integration.test.ts | 130 ++++++++++++++++++ 7 files changed, 237 insertions(+), 3 deletions(-) create mode 100644 apps/memos-local-openclaw/src/tools/network-memory-detail.ts diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 191172824..a6ed3a70f 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -18,7 +18,7 @@ import { captureMessages, stripInboundMetadata } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; -import { hubRequestJson, resolveHubClient } from "./src/client/hub"; +import { hubGetMemoryDetail, hubRequestJson, resolveHubClient } from "./src/client/hub"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -687,6 +687,40 @@ const memosLocalPlugin = { { name: "task_unshare" }, ); + api.registerTool( + { + name: "network_memory_detail", + label: "Network Memory Detail", + description: "Fetch the full detail for a Hub search hit returned by memory_search(scope=group|all).", + parameters: Type.Object({ + remoteHitId: Type.String({ description: "The remoteHitId returned by a Hub search hit" }), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), + }), + execute: trackTool("network_memory_detail", async (_toolCallId: any, params: any) => { + const { remoteHitId, hubAddress, userToken } = params as { + remoteHitId: string; + hubAddress?: string; + userToken?: string; + }; + + const detail = await hubGetMemoryDetail(store, ctx, { remoteHitId, hubAddress, userToken }); + return { + content: [{ + type: "text", + text: `## Shared Memory Detail + +${detail.summary} + +${detail.content}`, + }], + details: detail, + }; + }), + }, + { name: "network_memory_detail" }, + ); + // ─── Tool: skill_get ─── api.registerTool( diff --git a/apps/memos-local-openclaw/src/client/hub.ts b/apps/memos-local-openclaw/src/client/hub.ts index 5a2f2b1ea..06e076ceb 100644 --- a/apps/memos-local-openclaw/src/client/hub.ts +++ b/apps/memos-local-openclaw/src/client/hub.ts @@ -1,6 +1,6 @@ import type { PluginContext } from "../types"; import type { SqliteStore } from "../storage/sqlite"; -import type { HubScope, HubSearchResult } from "../sharing/types"; +import type { HubMemoryDetail, HubScope, HubSearchResult } from "../sharing/types"; export interface ResolvedHubClient { hubUrl: string; @@ -56,6 +56,30 @@ export async function hubSearchMemories( }) as Promise; } +export async function hubGetMemoryDetail( + store: SqliteStore, + ctx: PluginContext, + input: { remoteHitId: string; hubAddress?: string; userToken?: string }, +): Promise { + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + const detail = await hubRequestJson(client.hubUrl, client.userToken, "/api/v1/hub/memory-detail", { + method: "POST", + body: JSON.stringify({ + remoteHitId: input.remoteHitId, + }), + }) as Omit; + + return { + remoteHitId: input.remoteHitId, + content: String(detail.content ?? ""), + summary: String(detail.summary ?? ""), + source: { + ts: Number(detail.source?.ts ?? 0), + role: String(detail.source?.role ?? "assistant") as any, + }, + }; +} + export async function hubRequestJson( hubUrl: string, userToken: string, diff --git a/apps/memos-local-openclaw/src/index.ts b/apps/memos-local-openclaw/src/index.ts index dcea12d86..de0c5f35b 100644 --- a/apps/memos-local-openclaw/src/index.ts +++ b/apps/memos-local-openclaw/src/index.ts @@ -5,7 +5,7 @@ import { Embedder } from "./embedding"; import { IngestWorker } from "./ingest/worker"; import { RecallEngine } from "./recall/engine"; import { captureMessages } from "./capture"; -import { createMemorySearchTool, createMemoryTimelineTool, createMemoryGetTool } from "./tools"; +import { createMemorySearchTool, createMemoryTimelineTool, createMemoryGetTool, createNetworkMemoryDetailTool } from "./tools"; import type { MemosLocalConfig, ToolDefinition, Logger } from "./types"; export interface MemosLocalPlugin { @@ -63,6 +63,7 @@ export function initPlugin(opts: PluginInitOptions = {}): MemosLocalPlugin { createMemorySearchTool(engine), createMemoryTimelineTool(store), createMemoryGetTool(store), + createNetworkMemoryDetailTool(store, ctx), ]; ctx.log.info(`Plugin ready. DB: ${ctx.config.storage!.dbPath}, Embedding: ${embedder.provider}`); diff --git a/apps/memos-local-openclaw/src/sharing/types.ts b/apps/memos-local-openclaw/src/sharing/types.ts index 7c28ce45a..b3fe3b9db 100644 --- a/apps/memos-local-openclaw/src/sharing/types.ts +++ b/apps/memos-local-openclaw/src/sharing/types.ts @@ -57,6 +57,16 @@ export interface HubSearchResult { meta: HubSearchMeta; } +export interface HubMemoryDetail { + remoteHitId: string; + content: string; + summary: string; + source: { + ts: number; + role: Role; + }; +} + export interface NetworkSearchResult { local: SearchResult; hub: HubSearchResult; diff --git a/apps/memos-local-openclaw/src/tools/index.ts b/apps/memos-local-openclaw/src/tools/index.ts index 6e57dc08b..7ce2a4912 100644 --- a/apps/memos-local-openclaw/src/tools/index.ts +++ b/apps/memos-local-openclaw/src/tools/index.ts @@ -1,3 +1,4 @@ export { createMemorySearchTool } from "./memory-search"; export { createMemoryTimelineTool } from "./memory-timeline"; export { createMemoryGetTool } from "./memory-get"; +export { createNetworkMemoryDetailTool } from "./network-memory-detail"; diff --git a/apps/memos-local-openclaw/src/tools/network-memory-detail.ts b/apps/memos-local-openclaw/src/tools/network-memory-detail.ts new file mode 100644 index 000000000..70db72578 --- /dev/null +++ b/apps/memos-local-openclaw/src/tools/network-memory-detail.ts @@ -0,0 +1,34 @@ +import { hubGetMemoryDetail } from "../client/hub"; +import type { PluginContext, ToolDefinition } from "../types"; +import type { SqliteStore } from "../storage/sqlite"; + +export function createNetworkMemoryDetailTool(store: SqliteStore, ctx: PluginContext): ToolDefinition { + return { + name: "network_memory_detail", + description: + "Fetch the full detail for one Hub search hit using its remoteHitId. Use this after memory_search(scope=group|all) when you need the full shared content.", + inputSchema: { + type: "object", + properties: { + remoteHitId: { + type: "string", + description: "The remoteHitId returned by memory_search hub results.", + }, + hubAddress: { + type: "string", + description: "Optional hub address override for integration tests or manual routing.", + }, + userToken: { + type: "string", + description: "Optional hub bearer token override for integration tests.", + }, + }, + required: ["remoteHitId"], + }, + handler: async (input) => hubGetMemoryDetail(store, ctx, { + remoteHitId: String(input.remoteHitId ?? ""), + hubAddress: input.hubAddress as string | undefined, + userToken: input.userToken as string | undefined, + }), + }; +} diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 2643c68c2..5181171ac 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -579,6 +579,25 @@ describe("Integration: memory_search", () => { await teardownFederatedMemorySearchHarness(harness); } }); + + it("should return memory detail for a hub search hit", async () => { + const harness = await setupFederatedMemorySearchHarness(); + + try { + const searchTool = harness.clientPlugin.tools.find((t) => t.name === "memory_search")!; + const detailTool = harness.clientPlugin.tools.find((t) => t.name === "network_memory_detail")!; + const result = (await searchTool.handler({ query: "shared rollout checklist", scope: "all", maxResults: 5 })) as any; + const targetHit = result.hub.hits.find((hit: any) => hit.visibility === "public") ?? result.hub.hits[0]; + + const detail = (await detailTool.handler({ remoteHitId: targetHit.remoteHitId })) as any; + + expect(detail.summary).toContain("rollout checklist"); + expect(detail.content).toContain("announce deploy window"); + expect(detail.source.role).toBe("assistant"); + } finally { + await teardownFederatedMemorySearchHarness(harness); + } + }); }); describe("Integration: memory_timeline", () => { @@ -837,6 +856,117 @@ describe("Integration: task sharing MVP", () => { }); }); +describe("Integration: network memory detail tool", () => { + async function setupNetworkMemoryDetailHarness() { + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-network-detail-client-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-network-detail-hub-")); + const port = 19300 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Memory Detail Test", + teamToken: "memory-detail-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + const userId = authState.bootstrapAdminUserId as string; + + hubStore.upsertHubTask({ + id: "hub-task-detail-1", + sourceTaskId: "task-detail-1", + sourceUserId: userId, + title: "Deploy Nginx", + summary: "deploy nginx summary", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }); + hubStore.upsertHubChunk({ + id: "hub-chunk-detail-1", + hubTaskId: "hub-task-detail-1", + sourceTaskId: "task-detail-1", + sourceChunkId: "chunk-detail-1", + sourceUserId: userId, + role: "assistant", + content: "Use nginx upstream and proxy_pass to port 3000.", + summary: "nginx upstream to port 3000", + kind: "paragraph", + createdAt: 2, + }); + + const searchRes = await fetch(`http://127.0.0.1:${port}/api/v1/hub/search`, { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${userToken}`, + }, + body: JSON.stringify({ query: "nginx upstream 3000", scope: "all", maxResults: 5 }), + }); + const searchJson = await searchRes.json(); + + const { tools, service } = makePluginApi(clientDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }); + + return { + clientDir, + hubDir, + tools, + service, + hubStore, + hubServer, + remoteHitId: searchJson.hits[0].remoteHitId as string, + }; + } + + async function teardownNetworkMemoryDetailHarness(harness: Awaited>) { + await harness.service?.stop?.(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.clientDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); + } + + it("network_memory_detail should fetch hub detail via config fallback", async () => { + const harness = await setupNetworkMemoryDetailHarness(); + + try { + const detailTool = harness.tools.get("network_memory_detail"); + expect(detailTool).toBeDefined(); + + const result = await detailTool.execute("call-network-detail", { remoteHitId: harness.remoteHitId }, { agentId: "main" }); + expect(result.details.remoteHitId).toBe(harness.remoteHitId); + expect(result.details.content).toContain("proxy_pass"); + expect(result.details.summary).toContain("nginx upstream"); + expect(result.details.source.role).toBe("assistant"); + } finally { + await teardownNetworkMemoryDetailHarness(harness); + } + }); +}); + describe("Integration: evidence anti-writeback", () => { it("should not store evidence wrapper blocks in memory", async () => { plugin.onConversationTurn([ From 0b4f0839740b856548536e20489115622942c531 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:17:21 +0800 Subject: [PATCH 13/85] feat(memos-local): add hub team info tool --- apps/memos-local-openclaw/index.ts | 34 ++++++++ .../src/client/connector.ts | 19 ++++- .../tests/client-connector.test.ts | 11 +++ .../tests/integration.test.ts | 85 +++++++++++++++++++ 4 files changed, 147 insertions(+), 2 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index a6ed3a70f..2d4c73991 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -19,6 +19,7 @@ import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; import { hubGetMemoryDetail, hubRequestJson, resolveHubClient } from "./src/client/hub"; +import { getHubStatus } from "./src/client/connector"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -721,6 +722,39 @@ ${detail.content}`, { name: "network_memory_detail" }, ); + api.registerTool( + { + name: "network_team_info", + label: "Network Team Info", + description: "Show current Hub connection status, signed-in user, role, and group memberships.", + parameters: Type.Object({}), + execute: trackTool("network_team_info", async () => { + const status = await getHubStatus(store, ctx.config); + if (!status.connected || !status.user) { + return { + content: [{ type: "text", text: "Hub is not connected." }], + details: status, + }; + } + + const groupNames = status.user.groups.map((group) => group.name); + return { + content: [{ + type: "text", + text: `## Team Connection + +User: ${status.user.username} +Role: ${status.user.role} +Hub: ${status.hubUrl ?? "(unknown)"} +Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, + }], + details: status, + }; + }), + }, + { name: "network_team_info" }, + ); + // ─── Tool: skill_get ─── api.registerTool( diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts index 18ab493e5..90bc21ab4 100644 --- a/apps/memos-local-openclaw/src/client/connector.ts +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -1,5 +1,5 @@ import type { MemosLocalConfig } from "../types"; -import type { UserRole, UserStatus } from "../sharing/types"; +import type { GroupInfo, UserRole, UserStatus } from "../sharing/types"; import type { SqliteStore } from "../storage/sqlite"; import { hubRequestJson, normalizeHubUrl } from "./hub"; @@ -14,7 +14,14 @@ export interface HubSessionInfo { export interface HubStatusInfo { connected: boolean; - user: null | { id: string; username: string; role: UserRole; status: UserStatus | string }; + hubUrl?: string; + user: null | { + id: string; + username: string; + role: UserRole; + status: UserStatus | string; + groups: GroupInfo[]; + }; } export async function connectToHub(store: SqliteStore, config: MemosLocalConfig): Promise { @@ -49,11 +56,19 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) const me = await hubRequestJson(normalizeHubUrl(hubAddress), userToken, "/api/v1/hub/me", { method: "GET" }) as any; return { connected: true, + hubUrl: normalizeHubUrl(hubAddress), user: { id: String(me.id), username: String(me.username ?? ""), role: String(me.role ?? "member") as UserRole, status: String(me.status ?? "active"), + groups: Array.isArray(me.groups) + ? me.groups.map((group: any) => ({ + id: String(group.id), + name: String(group.name), + description: typeof group.description === "string" ? group.description : undefined, + })) + : [], }, }; } catch { diff --git a/apps/memos-local-openclaw/tests/client-connector.test.ts b/apps/memos-local-openclaw/tests/client-connector.test.ts index 05a2e9095..d854818a2 100644 --- a/apps/memos-local-openclaw/tests/client-connector.test.ts +++ b/apps/memos-local-openclaw/tests/client-connector.test.ts @@ -41,6 +41,15 @@ describe("client connector", () => { const authState = JSON.parse(fs.readFileSync(path.join(dir, "hub-auth.json"), "utf8")); const token = authState.bootstrapAdminToken; + const userId = authState.bootstrapAdminUserId; + + store.upsertHubGroup({ + id: "group-backend", + name: "Backend", + description: "Backend team", + createdAt: 1, + }); + store.addHubGroupMember("group-backend", userId, 1); const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-client-state-")); dirs.push(clientDir); @@ -73,6 +82,8 @@ describe("client connector", () => { }, } as any); expect(status.connected).toBe(true); + expect(status.hubUrl).toBe("http://127.0.0.1:18917"); expect(status.user?.role).toBe("admin"); + expect(status.user?.groups.map((group: any) => group.name)).toEqual(["Backend"]); }); }); diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 5181171ac..5df56bba1 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -967,6 +967,91 @@ describe("Integration: network memory detail tool", () => { }); }); +describe("Integration: network team info tool", () => { + async function setupNetworkTeamInfoHarness() { + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-network-team-client-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-network-team-hub-")); + const port = 19400 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Team Info Test", + teamToken: "team-info-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + const userId = authState.bootstrapAdminUserId as string; + + hubStore.upsertHubGroup({ + id: "group-devops", + name: "DevOps", + description: "DevOps team", + createdAt: 1, + }); + hubStore.addHubGroupMember("group-devops", userId, 1); + + const { tools, service } = makePluginApi(clientDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }); + + return { + clientDir, + hubDir, + tools, + service, + hubStore, + hubServer, + }; + } + + async function teardownNetworkTeamInfoHarness(harness: Awaited>) { + await harness.service?.stop?.(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.clientDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); + } + + it("network_team_info should report hub connection, user, and groups", async () => { + const harness = await setupNetworkTeamInfoHarness(); + + try { + const teamInfoTool = harness.tools.get("network_team_info"); + expect(teamInfoTool).toBeDefined(); + + const result = await teamInfoTool.execute("call-team-info", {}, { agentId: "main" }); + expect(result.details.connected).toBe(true); + expect(result.details.hubUrl).toContain("127.0.0.1:"); + expect(result.details.user.username).toBe("admin"); + expect(result.details.user.role).toBe("admin"); + expect(result.details.user.groups.map((group: any) => group.name)).toEqual(["DevOps"]); + } finally { + await teardownNetworkTeamInfoHarness(harness); + } + }); +}); + describe("Integration: evidence anti-writeback", () => { it("should not store evidence wrapper blocks in memory", async () => { plugin.onConversationTurn([ From 3d70765c2bfc3ce26cb28066f9bb64c3d5ed7937 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:20:44 +0800 Subject: [PATCH 14/85] feat(memos-local): add hub skill bundle export helper --- .../src/client/skill-sync.ts | 68 ++++++++++ .../tests/skill-sync.test.ts | 118 ++++++++++++++++++ 2 files changed, 186 insertions(+) create mode 100644 apps/memos-local-openclaw/src/client/skill-sync.ts create mode 100644 apps/memos-local-openclaw/tests/skill-sync.test.ts diff --git a/apps/memos-local-openclaw/src/client/skill-sync.ts b/apps/memos-local-openclaw/src/client/skill-sync.ts new file mode 100644 index 000000000..00b0448e8 --- /dev/null +++ b/apps/memos-local-openclaw/src/client/skill-sync.ts @@ -0,0 +1,68 @@ +import * as fs from "fs"; +import * as path from "path"; +import type { SqliteStore } from "../storage/sqlite"; +import type { SkillGenerateOutput } from "../types"; +import type { SkillBundle } from "../sharing/types"; + +export function buildSkillBundleForHub(store: SqliteStore, skillId: string): SkillBundle { + const skill = store.getSkill(skillId); + if (!skill) { + throw new Error(`Skill not found: ${skillId}`); + } + + const latestVersion = store.getLatestSkillVersion(skillId); + const skillMd = readSkillMarkdown(skill.dirPath, latestVersion?.content ?? ""); + + return { + metadata: { + id: skill.id, + name: skill.name, + description: skill.description, + version: skill.version, + qualityScore: skill.qualityScore, + }, + bundle: { + skill_md: skillMd, + scripts: readCompanionFiles(path.join(skill.dirPath, "scripts")), + references: readCompanionFiles(path.join(skill.dirPath, "references")), + evals: readEvals(path.join(skill.dirPath, "evals", "evals.json")), + } satisfies SkillGenerateOutput, + }; +} + +function readSkillMarkdown(dirPath: string, fallback: string): string { + const skillMdPath = path.join(dirPath, "SKILL.md"); + if (fs.existsSync(skillMdPath)) { + return fs.readFileSync(skillMdPath, "utf8"); + } + return fallback; +} + +function readCompanionFiles(dirPath: string): Array<{ filename: string; content: string }> { + if (!fs.existsSync(dirPath)) return []; + + const out: Array<{ filename: string; content: string }> = []; + walkFiles(dirPath, dirPath, out); + return out.sort((left, right) => left.filename.localeCompare(right.filename)); +} + +function walkFiles(rootDir: string, currentDir: string, out: Array<{ filename: string; content: string }>): void { + for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) { + const fullPath = path.join(currentDir, entry.name); + if (entry.isDirectory()) { + walkFiles(rootDir, fullPath, out); + continue; + } + if (!entry.isFile()) continue; + out.push({ + filename: path.relative(rootDir, fullPath).replace(/\\/g, "/"), + content: fs.readFileSync(fullPath, "utf8"), + }); + } +} + +function readEvals(evalsPath: string): Array<{ id: number; prompt: string; expectations: string[] }> { + if (!fs.existsSync(evalsPath)) return []; + const raw = JSON.parse(fs.readFileSync(evalsPath, "utf8")) as { evals?: Array<{ id: number; prompt: string; expectations: string[] }> }; + return Array.isArray(raw.evals) ? raw.evals : []; +} diff --git a/apps/memos-local-openclaw/tests/skill-sync.test.ts b/apps/memos-local-openclaw/tests/skill-sync.test.ts new file mode 100644 index 000000000..d5e8639dd --- /dev/null +++ b/apps/memos-local-openclaw/tests/skill-sync.test.ts @@ -0,0 +1,118 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { buildSkillBundleForHub } from "../src/client/skill-sync"; +import type { Logger, Skill, SkillVersion } from "../src/types"; + +const noopLog: Logger = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +let tmpDir: string; +let store: SqliteStore; + +beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-sync-")); + store = new SqliteStore(path.join(tmpDir, "test.db"), noopLog); +}); + +afterEach(() => { + store.close(); + fs.rmSync(tmpDir, { recursive: true, force: true }); +}); + +function makeSkill(overrides: Partial = {}): Skill { + const now = Date.now(); + return { + id: overrides.id ?? "skill-1", + name: overrides.name ?? "docker-compose-deploy", + description: overrides.description ?? "Deploy with docker compose", + version: overrides.version ?? 2, + status: overrides.status ?? "active", + tags: overrides.tags ?? JSON.stringify(["docker", "deploy"]), + sourceType: overrides.sourceType ?? "manual", + dirPath: overrides.dirPath ?? path.join(tmpDir, "skill-dir"), + installed: overrides.installed ?? 0, + owner: overrides.owner ?? "agent:main", + visibility: overrides.visibility ?? "private", + qualityScore: overrides.qualityScore ?? 0.88, + createdAt: overrides.createdAt ?? now, + updatedAt: overrides.updatedAt ?? now, + }; +} + +function makeVersion(skillId: string, overrides: Partial = {}): SkillVersion { + const now = Date.now(); + return { + id: overrides.id ?? "skill-version-1", + skillId, + version: overrides.version ?? 2, + content: overrides.content ?? "# Docker Compose Deploy\nUse docker compose up -d", + changelog: overrides.changelog ?? "Improve deployment flow", + changeSummary: overrides.changeSummary ?? "Added scripts and evals", + upgradeType: overrides.upgradeType ?? "refine", + sourceTaskId: overrides.sourceTaskId ?? null, + metrics: overrides.metrics ?? "{}", + qualityScore: overrides.qualityScore ?? 0.88, + createdAt: overrides.createdAt ?? now, + }; +} + +describe("buildSkillBundleForHub", () => { + it("packages SKILL.md, scripts, references, and evals from a local skill directory", () => { + const skill = makeSkill(); + fs.mkdirSync(skill.dirPath, { recursive: true }); + fs.writeFileSync(path.join(skill.dirPath, "SKILL.md"), "# Docker Compose Deploy\nSee scripts/deploy.sh", "utf8"); + fs.mkdirSync(path.join(skill.dirPath, "scripts"), { recursive: true }); + fs.writeFileSync(path.join(skill.dirPath, "scripts", "deploy.sh"), "#!/bin/bash\ndocker compose up -d\n", "utf8"); + fs.mkdirSync(path.join(skill.dirPath, "references"), { recursive: true }); + fs.writeFileSync(path.join(skill.dirPath, "references", "docker-compose.yml"), "services:\n app: {}\n", "utf8"); + fs.mkdirSync(path.join(skill.dirPath, "evals"), { recursive: true }); + fs.writeFileSync( + path.join(skill.dirPath, "evals", "evals.json"), + JSON.stringify({ + skill_name: skill.name, + evals: [{ id: 1, prompt: "deploy app", expectations: ["compose", "up -d"] }], + }), + "utf8", + ); + + store.insertSkill(skill); + store.insertSkillVersion(makeVersion(skill.id)); + + const bundle = buildSkillBundleForHub(store, skill.id); + + expect(bundle.metadata.id).toBe(skill.id); + expect(bundle.metadata.name).toBe(skill.name); + expect(bundle.metadata.version).toBe(skill.version); + expect(bundle.bundle.skill_md).toContain("Docker Compose Deploy"); + expect(bundle.bundle.scripts).toEqual([ + expect.objectContaining({ filename: "deploy.sh" }), + ]); + expect(bundle.bundle.references).toEqual([ + expect.objectContaining({ filename: "docker-compose.yml" }), + ]); + expect(bundle.bundle.evals).toEqual([ + expect.objectContaining({ id: 1, prompt: "deploy app" }), + ]); + }); + + it("falls back to the latest skill version content when SKILL.md is missing", () => { + const skill = makeSkill({ id: "skill-2", dirPath: path.join(tmpDir, "skill-dir-2") }); + fs.mkdirSync(skill.dirPath, { recursive: true }); + store.insertSkill(skill); + store.insertSkillVersion(makeVersion(skill.id, { content: "# Version Fallback\nRecovered from DB" })); + + const bundle = buildSkillBundleForHub(store, skill.id); + + expect(bundle.bundle.skill_md).toContain("Version Fallback"); + expect(bundle.bundle.scripts).toEqual([]); + expect(bundle.bundle.references).toEqual([]); + expect(bundle.bundle.evals).toEqual([]); + }); +}); From 0057f483ece62c65b0f4fa83f5cda05cf093bf28 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:30:46 +0800 Subject: [PATCH 15/85] feat(memos-local): add hub skill sync flow --- apps/memos-local-openclaw/index.ts | 73 ++++++++- apps/memos-local-openclaw/src/client/hub.ts | 17 +- .../src/client/skill-sync.ts | 134 +++++++++++++++ apps/memos-local-openclaw/src/hub/server.ts | 77 +++++++++ .../memos-local-openclaw/src/sharing/types.ts | 15 ++ .../src/storage/sqlite.ts | 59 +++++++ .../tests/hub-server.test.ts | 74 +++++++++ .../tests/integration.test.ts | 155 ++++++++++++++++++ 8 files changed, 597 insertions(+), 7 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 2d4c73991..d0297991d 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -18,8 +18,9 @@ import { captureMessages, stripInboundMetadata } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; -import { hubGetMemoryDetail, hubRequestJson, resolveHubClient } from "./src/client/hub"; +import { hubGetMemoryDetail, hubRequestJson, hubSearchSkills, resolveHubClient } from "./src/client/hub"; import { getHubStatus } from "./src/client/connector"; +import { fetchHubSkillBundle, publishSkillBundleToHub, restoreSkillBundleFromHub } from "./src/client/skill-sync"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -966,18 +967,44 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, name: "skill_search", label: "Skill Search", description: - "Search available skills by natural language. Searches your own skills, public skills, or both. " + + "Search available skills by natural language. Searches local skills by default, or local + Hub skills when scope=group/all. " + "Use when you need a capability or guide and don't have a matching skill at hand.", parameters: Type.Object({ query: Type.String({ description: "Natural language description of the needed skill" }), - scope: Type.Optional(Type.String({ description: "Search scope: 'mix' (default, self + public), 'self' (own only), 'public' (public only)" })), + scope: Type.Optional(Type.String({ description: "Search scope: 'mix'/'self'/'public' for local search, or 'group'/'all' for local + Hub search" })), }), execute: trackTool("skill_search", async (_toolCallId: any, params: any) => { const { query: skillQuery, scope: rawScope } = params as { query: string; scope?: string }; - const scope = (rawScope === "self" || rawScope === "public") ? rawScope : "mix"; const skillAgentId = (params as any).agentId ?? "main"; const currentOwner = `agent:${skillAgentId}`; + if (rawScope === "group" || rawScope === "all") { + const [localHits, hub] = await Promise.all([ + engine.searchSkills(skillQuery, "mix" as any, currentOwner), + hubSearchSkills(store, ctx, { query: skillQuery, maxResults: 10 }).catch(() => ({ hits: [] })), + ]); + + if (localHits.length === 0 && hub.hits.length === 0) { + return { + content: [{ type: "text", text: `No relevant skills found for: "${skillQuery}" (scope: ${rawScope})` }], + details: { query: skillQuery, scope: rawScope, local: { hits: [] }, hub }, + }; + } + + const localText = localHits.length > 0 + ? localHits.map((h, i) => `${i + 1}. [${h.name}] ${h.description.slice(0, 150)}${h.visibility === "public" ? " (public)" : ""}`).join("\n") + : "(none)"; + const hubText = hub.hits.length > 0 + ? hub.hits.map((h, i) => `${i + 1}. [${h.name}] ${h.description.slice(0, 150)} (${h.visibility}${h.groupName ? `:${h.groupName}` : ""}, owner=${h.ownerName})`).join("\n") + : "(none)"; + + return { + content: [{ type: "text", text: `Local skills:\n${localText}\n\nHub skills:\n${hubText}` }], + details: { query: skillQuery, scope: rawScope, local: { hits: localHits }, hub }, + }; + } + + const scope = (rawScope === "self" || rawScope === "public") ? rawScope : "mix"; const hits = await engine.searchSkills(skillQuery, scope as any, currentOwner); if (hits.length === 0) { @@ -1009,17 +1036,28 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, description: "Make a skill public so other agents can discover and install it via skill_search.", parameters: Type.Object({ skillId: Type.String({ description: "The skill ID to publish" }), + scope: Type.Optional(Type.String({ description: "Publish scope: omit for local public, or use 'public' / 'group' to publish to Hub" })), + groupId: Type.Optional(Type.String({ description: "Optional group ID when scope='group'" })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), }), execute: trackTool("skill_publish", async (_toolCallId: any, params: any) => { - const { skillId: pubSkillId } = params as { skillId: string }; + const { skillId: pubSkillId, scope, groupId, hubAddress, userToken } = params as { skillId: string; scope?: string; groupId?: string; hubAddress?: string; userToken?: string }; const skill = store.getSkill(pubSkillId); if (!skill) { return { content: [{ type: "text", text: `Skill not found: ${pubSkillId}` }] }; } + if (scope === "public" || scope === "group") { + const published = await publishSkillBundleToHub(store, ctx, { skillId: pubSkillId, visibility: scope, groupId, hubAddress, userToken }); + return { + content: [{ type: "text", text: `Skill "${skill.name}" published to hub (${published.visibility}).` }], + details: { skillId: pubSkillId, name: skill.name, publishedToHub: true, hubSkillId: published.skillId, visibility: published.visibility }, + }; + } store.setSkillVisibility(pubSkillId, "public"); return { content: [{ type: "text", text: `Skill "${skill.name}" is now public.` }], - details: { skillId: pubSkillId, name: skill.name, visibility: "public" }, + details: { skillId: pubSkillId, name: skill.name, visibility: "public", publishedToHub: false }, }; }), }, @@ -1052,6 +1090,29 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, { name: "skill_unpublish" }, ); + api.registerTool( + { + name: "network_skill_pull", + label: "Network Skill Pull", + description: "Download a published Hub skill bundle and restore it into local managed skills.", + parameters: Type.Object({ + skillId: Type.String({ description: "The Hub skill ID to pull" }), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), + }), + execute: trackTool("network_skill_pull", async (_toolCallId: any, params: any) => { + const { skillId, hubAddress, userToken } = params as { skillId: string; hubAddress?: string; userToken?: string }; + const payload = await fetchHubSkillBundle(store, ctx, { skillId, hubAddress, userToken }); + const restored = restoreSkillBundleFromHub(store, ctx, payload); + return { + content: [{ type: "text", text: `Pulled Hub skill "${restored.localName}" into local storage.` }], + details: { pulled: true, hubSkillId: skillId, localSkillId: restored.localSkillId, localName: restored.localName, dirPath: restored.dirPath }, + }; + }), + }, + { name: "network_skill_pull" }, + ); + // ─── Auto-recall: inject relevant memories before agent starts ─── // Track recalled chunk IDs per turn to avoid re-storing them in agent_end diff --git a/apps/memos-local-openclaw/src/client/hub.ts b/apps/memos-local-openclaw/src/client/hub.ts index 06e076ceb..a28920798 100644 --- a/apps/memos-local-openclaw/src/client/hub.ts +++ b/apps/memos-local-openclaw/src/client/hub.ts @@ -1,6 +1,6 @@ import type { PluginContext } from "../types"; import type { SqliteStore } from "../storage/sqlite"; -import type { HubMemoryDetail, HubScope, HubSearchResult } from "../sharing/types"; +import type { HubMemoryDetail, HubScope, HubSearchResult, HubSkillSearchResult } from "../sharing/types"; export interface ResolvedHubClient { hubUrl: string; @@ -56,6 +56,21 @@ export async function hubSearchMemories( }) as Promise; } + +export async function hubSearchSkills( + store: SqliteStore, + ctx: PluginContext, + input: { query: string; maxResults?: number; hubAddress?: string; userToken?: string }, +): Promise { + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + const url = new URL(`${client.hubUrl}/api/v1/hub/skills`); + url.searchParams.set("query", input.query); + if (input.maxResults != null) url.searchParams.set("maxResults", String(input.maxResults)); + return hubRequestJson(url.origin, client.userToken, `${url.pathname}${url.search}`, { + method: "GET", + }) as Promise; +} + export async function hubGetMemoryDetail( store: SqliteStore, ctx: PluginContext, diff --git a/apps/memos-local-openclaw/src/client/skill-sync.ts b/apps/memos-local-openclaw/src/client/skill-sync.ts index 00b0448e8..e6abb952f 100644 --- a/apps/memos-local-openclaw/src/client/skill-sync.ts +++ b/apps/memos-local-openclaw/src/client/skill-sync.ts @@ -1,8 +1,11 @@ import * as fs from "fs"; import * as path from "path"; +import { randomUUID } from "crypto"; +import type { PluginContext, Skill, SkillVersion } from "../types"; import type { SqliteStore } from "../storage/sqlite"; import type { SkillGenerateOutput } from "../types"; import type { SkillBundle } from "../sharing/types"; +import { resolveHubClient, hubRequestJson } from "./hub"; export function buildSkillBundleForHub(store: SqliteStore, skillId: string): SkillBundle { const skill = store.getSkill(skillId); @@ -66,3 +69,134 @@ function readEvals(evalsPath: string): Array<{ id: number; prompt: string; expec const raw = JSON.parse(fs.readFileSync(evalsPath, "utf8")) as { evals?: Array<{ id: number; prompt: string; expectations: string[] }> }; return Array.isArray(raw.evals) ? raw.evals : []; } + + +export async function publishSkillBundleToHub( + store: SqliteStore, + ctx: PluginContext, + input: { skillId: string; visibility: "public" | "group"; groupId?: string; hubAddress?: string; userToken?: string }, +): Promise<{ skillId: string; visibility: "public" | "group" }> { + const bundle = buildSkillBundleForHub(store, input.skillId); + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + return hubRequestJson(client.hubUrl, client.userToken, "/api/v1/hub/skills/publish", { + method: "POST", + body: JSON.stringify({ + visibility: input.visibility, + groupId: input.groupId, + metadata: bundle.metadata, + bundle: bundle.bundle, + }), + }) as Promise<{ skillId: string; visibility: "public" | "group" }>; +} + +export async function fetchHubSkillBundle( + store: SqliteStore, + ctx: PluginContext, + input: { skillId: string; hubAddress?: string; userToken?: string }, +): Promise { + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + return hubRequestJson(client.hubUrl, client.userToken, `/api/v1/hub/skills/${encodeURIComponent(input.skillId)}/bundle`, { + method: "GET", + }) as Promise; +} + +export function restoreSkillBundleFromHub( + store: SqliteStore, + ctx: PluginContext, + payload: SkillBundle & { skillId?: string }, +): { localSkillId: string; localName: string; dirPath: string } { + validateBundle(payload.bundle); + + const skillsStoreDir = path.join(ctx.stateDir, "skills-store"); + fs.mkdirSync(skillsStoreDir, { recursive: true }); + + const baseName = sanitizeName(payload.metadata.name) || `hub-skill-${(payload.skillId ?? payload.metadata.id).slice(0, 8)}`; + const resolvedName = resolveLocalSkillName(store, baseName, payload.skillId ?? payload.metadata.id); + const dirPath = path.join(skillsStoreDir, resolvedName); + fs.mkdirSync(dirPath, { recursive: true }); + fs.writeFileSync(path.join(dirPath, "SKILL.md"), payload.bundle.skill_md, "utf8"); + + writeCompanionFiles(dirPath, "scripts", payload.bundle.scripts); + writeCompanionFiles(dirPath, "references", payload.bundle.references); + if (payload.bundle.evals.length > 0) { + const evalDir = path.join(dirPath, "evals"); + fs.mkdirSync(evalDir, { recursive: true }); + fs.writeFileSync(path.join(evalDir, "evals.json"), JSON.stringify({ skill_name: payload.metadata.name, evals: payload.bundle.evals }, null, 2), "utf8"); + } + + const now = Date.now(); + const localSkillId = randomUUID(); + const skill: Skill = { + id: localSkillId, + name: resolvedName, + description: payload.metadata.description, + version: payload.metadata.version, + status: "active", + tags: JSON.stringify(["hub-import"]), + sourceType: "manual", + dirPath, + installed: 0, + owner: "agent:main", + visibility: "private", + qualityScore: payload.metadata.qualityScore, + createdAt: now, + updatedAt: now, + }; + const version: SkillVersion = { + id: randomUUID(), + skillId: localSkillId, + version: payload.metadata.version, + content: payload.bundle.skill_md, + changelog: "Imported from hub", + changeSummary: "Imported from hub", + upgradeType: "create", + sourceTaskId: null, + metrics: "{}", + qualityScore: payload.metadata.qualityScore, + createdAt: now, + }; + + store.insertSkill(skill); + store.insertSkillVersion(version); + return { localSkillId, localName: resolvedName, dirPath }; +} + +function validateBundle(bundle: SkillGenerateOutput): void { + const allowedExtensions = new Set([".md", ".ts", ".js", ".sh", ".json", ".yaml", ".yml", ".txt"]); + const files = [...bundle.scripts, ...bundle.references]; + if (Buffer.byteLength(bundle.skill_md, "utf8") > 100 * 1024) throw new Error("SKILL.md exceeds size limit"); + if (files.length > 50) throw new Error("bundle contains too many files"); + + let totalBytes = Buffer.byteLength(bundle.skill_md, "utf8"); + for (const file of files) { + const name = file.filename; + if (!name || path.isAbsolute(name) || name.startsWith("/") || name.includes("..")) throw new Error(`unsafe filename: ${name}`); + if (!/^[A-Za-z0-9._/-]+$/.test(name)) throw new Error(`invalid filename: ${name}`); + const ext = path.extname(name).toLowerCase(); + if (!allowedExtensions.has(ext)) throw new Error(`unsupported file type: ${name}`); + const fileSize = Buffer.byteLength(file.content, "utf8"); + if (fileSize > 512 * 1024) throw new Error(`file exceeds size limit: ${name}`); + totalBytes += fileSize; + } + if (totalBytes > 5 * 1024 * 1024) throw new Error("bundle exceeds size limit"); +} + +function writeCompanionFiles(dirPath: string, root: "scripts" | "references", files: Array<{ filename: string; content: string }>): void { + if (files.length === 0) return; + const rootDir = path.join(dirPath, root); + fs.mkdirSync(rootDir, { recursive: true }); + for (const file of files) { + const target = path.join(rootDir, file.filename); + fs.mkdirSync(path.dirname(target), { recursive: true }); + fs.writeFileSync(target, file.content, "utf8"); + } +} + +function sanitizeName(input: string): string { + return input.trim().replace(/[^A-Za-z0-9._-]+/g, "-").replace(/^-+|-+$/g, ""); +} + +function resolveLocalSkillName(store: SqliteStore, baseName: string, sourceId: string): string { + if (!store.getSkillByName(baseName)) return baseName; + return `${baseName}-hub-${sourceId.slice(0, 8)}`; +} diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index aa67a54ee..561e10f6b 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -216,6 +216,83 @@ export class HubServer { return this.json(res, 200, { hits, meta: { totalCandidates: hits.length, searchedGroups: [], includedPublic: true } }); } + if (req.method === "GET" && path === "/api/v1/hub/skills") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const hits = this.opts.store.searchHubSkills(String(url.searchParams.get("query") || ""), { + userId: auth.userId, + maxResults: Number(url.searchParams.get("maxResults") || 10), + }).map(({ hit }) => ({ + skillId: hit.id, + name: hit.name, + description: hit.description, + version: hit.version, + visibility: hit.visibility, + groupName: hit.group_name, + ownerName: hit.owner_name || "unknown", + qualityScore: hit.quality_score, + })); + return this.json(res, 200, { hits }); + } + + if (req.method === "POST" && path === "/api/v1/hub/skills/publish") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + const metadata = body?.metadata ?? {}; + const sourceSkillId = String(metadata.id || ""); + if (!sourceSkillId) return this.json(res, 400, { error: "missing_skill_id" }); + const existing = this.opts.store.getHubSkillBySource(auth.userId, sourceSkillId); + const skillId = existing?.id ?? randomUUID(); + const visibility = body?.visibility === "group" ? "group" : "public"; + this.opts.store.upsertHubSkill({ + id: skillId, + sourceSkillId, + sourceUserId: auth.userId, + name: String(metadata.name || sourceSkillId), + description: String(metadata.description || ""), + version: Number(metadata.version || 1), + groupId: visibility === "group" ? String(body?.groupId || "") || null : null, + visibility, + bundle: JSON.stringify(body?.bundle ?? {}), + qualityScore: metadata.qualityScore == null ? null : Number(metadata.qualityScore), + createdAt: existing?.createdAt ?? Date.now(), + updatedAt: Date.now(), + }); + return this.json(res, 200, { ok: true, skillId, visibility }); + } + + const skillBundleMatch = req.method === "GET" ? path.match(/^\/api\/v1\/hub\/skills\/([^/]+)\/bundle$/) : null; + if (skillBundleMatch) { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const skill = this.opts.store.getHubSkillById(decodeURIComponent(skillBundleMatch[1])); + if (!skill) return this.json(res, 404, { error: "not_found" }); + const user = this.opts.store.getHubUser(auth.userId); + const groups = new Set((user?.groups ?? []).map((group) => group.id)); + const allowed = skill.visibility === "public" || (skill.groupId != null && groups.has(skill.groupId)); + if (!allowed) return this.json(res, 403, { error: "forbidden" }); + return this.json(res, 200, { + skillId: skill.id, + metadata: { + id: skill.sourceSkillId, + name: skill.name, + description: skill.description, + version: skill.version, + qualityScore: skill.qualityScore, + }, + bundle: JSON.parse(skill.bundle), + }); + } + + if (req.method === "POST" && path === "/api/v1/hub/skills/unpublish") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + const body = await this.readJson(req); + this.opts.store.deleteHubSkillBySource(auth.userId, String(body?.sourceSkillId || "")); + return this.json(res, 200, { ok: true }); + } + if (req.method === "POST" && path === "/api/v1/hub/memory-detail") { const auth = this.authenticate(req); if (!auth) return this.json(res, 401, { error: "unauthorized" }); diff --git a/apps/memos-local-openclaw/src/sharing/types.ts b/apps/memos-local-openclaw/src/sharing/types.ts index b3fe3b9db..6b20803ba 100644 --- a/apps/memos-local-openclaw/src/sharing/types.ts +++ b/apps/memos-local-openclaw/src/sharing/types.ts @@ -67,6 +67,21 @@ export interface HubMemoryDetail { }; } +export interface HubSkillHit { + skillId: string; + name: string; + description: string; + version: number; + visibility: SharedVisibility; + groupName: string | null; + ownerName: string; + qualityScore: number | null; +} + +export interface HubSkillSearchResult { + hits: HubSkillHit[]; +} + export interface NetworkSearchResult { local: SearchResult; hub: HubSearchResult; diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index 77cd0ef99..617a3b0a8 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -1550,6 +1550,11 @@ export class SqliteStore { return row ? rowToHubSkill(row) : null; } + getHubSkillById(skillId: string): HubSkillRecord | null { + const row = this.db.prepare('SELECT * FROM hub_skills WHERE id = ?').get(skillId) as HubSkillRow | undefined; + return row ? rowToHubSkill(row) : null; + } + upsertHubSkillEmbedding(skillId: string, vector: number[], sourceUserId: string, sourceSkillId: string): void { if (!sourceUserId || !sourceSkillId) throw new Error("sourceUserId and sourceSkillId are required for hub skill embedding upserts"); const canonicalSkillId = this.resolveCanonicalHubSkillId(skillId, sourceUserId, sourceSkillId); @@ -1603,6 +1608,49 @@ export class SqliteStore { return row ? rowToHubChunk(row) : null; } + searchHubSkills(query: string, options?: { userId?: string; maxResults?: number }): Array<{ hit: HubSkillSearchRow; rank: number }> { + const limit = options?.maxResults ?? 10; + const userId = options?.userId ?? ""; + const sanitized = sanitizeFtsQuery(query); + let rows: HubSkillSearchRow[]; + if (sanitized) { + rows = this.db.prepare(` + SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, hg.name AS group_name, hu.username AS owner_name, hs.quality_score, + bm25(hub_skills_fts) as rank + FROM hub_skills_fts f + JOIN hub_skills hs ON hs.rowid = f.rowid + LEFT JOIN hub_groups hg ON hg.id = hs.group_id + LEFT JOIN hub_users hu ON hu.id = hs.source_user_id + WHERE hub_skills_fts MATCH ? + AND ( + hs.visibility = 'public' + OR EXISTS ( + SELECT 1 FROM hub_group_members gm + WHERE gm.group_id = hs.group_id AND gm.user_id = ? + ) + ) + ORDER BY rank + LIMIT ? + `).all(sanitized, userId, limit) as HubSkillSearchRow[]; + } else { + rows = this.db.prepare(` + SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, hg.name AS group_name, hu.username AS owner_name, hs.quality_score, + 0 as rank + FROM hub_skills hs + LEFT JOIN hub_groups hg ON hg.id = hs.group_id + LEFT JOIN hub_users hu ON hu.id = hs.source_user_id + WHERE hs.visibility = 'public' + OR EXISTS ( + SELECT 1 FROM hub_group_members gm + WHERE gm.group_id = hs.group_id AND gm.user_id = ? + ) + ORDER BY hs.updated_at DESC + LIMIT ? + `).all(userId, limit) as HubSkillSearchRow[]; + } + return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); + } + deleteHubSkillBySource(sourceUserId: string, sourceSkillId: string): void { this.db.prepare('DELETE FROM hub_skills WHERE source_user_id = ? AND source_skill_id = ?').run(sourceUserId, sourceSkillId); } @@ -2021,6 +2069,17 @@ function rowToHubSkill(row: HubSkillRow): HubSkillRecord { } +interface HubSkillSearchRow { + id: string; + name: string; + description: string; + version: number; + visibility: string; + group_name: string | null; + owner_name: string | null; + quality_score: number | null; +} + interface HubSearchRow { id: string; content: string; diff --git a/apps/memos-local-openclaw/tests/hub-server.test.ts b/apps/memos-local-openclaw/tests/hub-server.test.ts index 4b72ac4b9..6b6d463f1 100644 --- a/apps/memos-local-openclaw/tests/hub-server.test.ts +++ b/apps/memos-local-openclaw/tests/hub-server.test.ts @@ -296,3 +296,77 @@ describe("hub search pipeline", () => { expect(detailJson.content).toContain("proxy_pass"); }); }); + + +describe("hub skill pipeline", () => { + it("should publish, fetch, and unpublish skill bundles", async () => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-hub-skills-")); + dirs.push(dir); + const store = new SqliteStore(path.join(dir, "test.db"), noopLog); + stores.push(store); + + const server = new HubServer({ + store, + log: noopLog, + config: { sharing: { enabled: true, role: "hub", hub: { port: 18918, teamName: "Skills", teamToken: "skills-secret" } } }, + dataDir: dir, + } as any); + servers.push(server); + await server.start(); + + const authPath = path.join(dir, "hub-auth.json"); + const state = JSON.parse(fs.readFileSync(authPath, "utf8")); + const token = state.bootstrapAdminToken; + const userId = state.bootstrapAdminUserId; + + const publishRes = await fetch("http://127.0.0.1:18918/api/v1/hub/skills/publish", { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ + visibility: "public", + metadata: { + id: "skill-source-1", + name: "docker-compose-deploy", + description: "Deploy with docker compose", + version: 2, + qualityScore: 0.88, + }, + bundle: { + skill_md: "# Docker Compose Deploy\nUse scripts/deploy.sh", + scripts: [{ filename: "deploy.sh", content: "#!/bin/bash\ndocker compose up -d\n" }], + references: [{ filename: "docker-compose.yml", content: "services:\n app: {}\n" }], + evals: [{ id: 1, prompt: "deploy app", expectations: ["compose", "up -d"] }], + }, + }), + }); + expect(publishRes.status).toBe(200); + const publishJson = await publishRes.json(); + expect(publishJson.skillId).toBeTruthy(); + + const stored = store.getHubSkillBySource(userId, "skill-source-1"); + expect(stored).not.toBeNull(); + expect(stored!.name).toBe("docker-compose-deploy"); + + const bundleRes = await fetch(`http://127.0.0.1:18918/api/v1/hub/skills/${publishJson.skillId}/bundle`, { + headers: { authorization: `Bearer ${token}` }, + }); + expect(bundleRes.status).toBe(200); + const bundleJson = await bundleRes.json(); + expect(bundleJson.metadata.name).toBe("docker-compose-deploy"); + expect(bundleJson.bundle.skill_md).toContain("Docker Compose Deploy"); + + const unpublishRes = await fetch("http://127.0.0.1:18918/api/v1/hub/skills/unpublish", { + method: "POST", + headers: { + "content-type": "application/json", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ sourceSkillId: "skill-source-1" }), + }); + expect(unpublishRes.status).toBe(200); + expect(store.getHubSkillBySource(userId, "skill-source-1")).toBeNull(); + }); +}); diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 5df56bba1..9c5b6da43 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -1052,6 +1052,161 @@ describe("Integration: network team info tool", () => { }); }); +describe("Integration: hub skill sync", () => { + async function setupSkillSyncHarness() { + const publisherDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-publisher-")); + const pullerDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-puller-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-hub-")); + const port = 19500 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Skill Sync Test", + teamToken: "skill-sync-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + + const skillDir = path.join(publisherDir, "skills-store", "docker-compose-deploy"); + fs.mkdirSync(path.join(skillDir, "scripts"), { recursive: true }); + fs.mkdirSync(path.join(skillDir, "references"), { recursive: true }); + fs.mkdirSync(path.join(skillDir, "evals"), { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), "# Docker Compose Deploy\nUse scripts/deploy.sh", "utf8"); + fs.writeFileSync(path.join(skillDir, "scripts", "deploy.sh"), "#!/bin/bash\ndocker compose up -d\n", "utf8"); + fs.writeFileSync(path.join(skillDir, "references", "docker-compose.yml"), "services:\n app: {}\n", "utf8"); + fs.writeFileSync(path.join(skillDir, "evals", "evals.json"), JSON.stringify({ + skill_name: "docker-compose-deploy", + evals: [{ id: 1, prompt: "deploy app", expectations: ["compose", "up -d"] }], + }), "utf8"); + + const publisherStore = new SqliteStore(path.join(publisherDir, "memos-local", "memos.db"), noopLog as any); + publisherStore.insertSkill({ + id: "skill-local-1", + name: "docker-compose-deploy", + description: "Deploy with docker compose", + version: 2, + status: "active", + tags: JSON.stringify(["docker", "deploy"]), + sourceType: "manual", + dirPath: skillDir, + installed: 0, + owner: "agent:main", + visibility: "private", + qualityScore: 0.88, + createdAt: 1, + updatedAt: 1, + }); + publisherStore.insertSkillVersion({ + id: "skill-version-local-1", + skillId: "skill-local-1", + version: 2, + content: "# Docker Compose Deploy\nUse scripts/deploy.sh", + changelog: "initial", + changeSummary: "initial", + upgradeType: "create", + sourceTaskId: null, + metrics: "{}", + qualityScore: 0.88, + createdAt: 1, + }); + publisherStore.close(); + + const publisher = makePluginApi(publisherDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }); + + const puller = makePluginApi(pullerDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }); + + const pullerStore = new SqliteStore(path.join(pullerDir, "memos-local", "memos.db"), noopLog as any); + + return { + publisherDir, + pullerDir, + hubDir, + publisher, + puller, + pullerStore, + hubStore, + hubServer, + }; + } + + async function teardownSkillSyncHarness(harness: Awaited>) { + await harness.publisher.service?.stop?.(); + await harness.puller.service?.stop?.(); + harness.pullerStore.close(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.publisherDir, { recursive: true, force: true }); + fs.rmSync(harness.pullerDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); + } + + it("skill_publish and network_skill_pull should round-trip a bundle through the hub", async () => { + const harness = await setupSkillSyncHarness(); + + try { + const publishTool = harness.publisher.tools.get("skill_publish"); + const pullTool = harness.puller.tools.get("network_skill_pull"); + expect(publishTool).toBeDefined(); + expect(pullTool).toBeDefined(); + + const publishResult = await publishTool.execute("call-skill-publish", { skillId: "skill-local-1", scope: "public" }, { agentId: "main" }); + expect(publishResult.details.publishedToHub).toBe(true); + expect(publishResult.details.hubSkillId).toBeTruthy(); + + const searchTool = harness.puller.tools.get("skill_search"); + const searchResult = await searchTool.execute("call-skill-search", { query: "docker compose deploy", scope: "all" }, { agentId: "main" }); + expect(searchResult.details.hub.hits.length).toBeGreaterThan(0); + expect(searchResult.details.hub.hits[0].name).toContain("docker-compose-deploy"); + + const pulled = await pullTool.execute("call-skill-pull", { skillId: searchResult.details.hub.hits[0].skillId }, { agentId: "main" }); + expect(pulled.details.pulled).toBe(true); + expect(pulled.details.localSkillId).toBeTruthy(); + + const localSkill = harness.pullerStore.getSkill(pulled.details.localSkillId); + expect(localSkill).not.toBeNull(); + expect(localSkill!.name).toContain("docker-compose-deploy"); + expect(fs.existsSync(path.join(localSkill!.dirPath, "SKILL.md"))).toBe(true); + expect(fs.existsSync(path.join(localSkill!.dirPath, "scripts", "deploy.sh"))).toBe(true); + expect(fs.existsSync(path.join(localSkill!.dirPath, "references", "docker-compose.yml"))).toBe(true); + } finally { + await teardownSkillSyncHarness(harness); + } + }); +}); + describe("Integration: evidence anti-writeback", () => { it("should not store evidence wrapper blocks in memory", async () => { plugin.onConversationTurn([ From 2f5aa15b176c353a899c88c0420a0db86ac4cf19 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 16:48:44 +0800 Subject: [PATCH 16/85] feat(memos-local): finish v4 tool registration --- apps/memos-local-openclaw/index.ts | 121 ++-- apps/memos-local-openclaw/plugin-impl.ts | 554 +----------------- .../tests/integration.test.ts | 113 ++++ .../tests/plugin-impl-access.test.ts | 25 + 4 files changed, 222 insertions(+), 591 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index d0297991d..f3fb34292 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -18,7 +18,7 @@ import { captureMessages, stripInboundMetadata } from "./src/capture"; import { DEFAULTS } from "./src/types"; import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; -import { hubGetMemoryDetail, hubRequestJson, hubSearchSkills, resolveHubClient } from "./src/client/hub"; +import { hubGetMemoryDetail, hubRequestJson, hubSearchMemories, hubSearchSkills, resolveHubClient } from "./src/client/hub"; import { getHubStatus } from "./src/client/connector"; import { fetchHubSkillBundle, publishSkillBundleToHub, restoreSkillBundleFromHub } from "./src/client/skill-sync"; import { SkillEvolver } from "./src/skill/evolver"; @@ -254,53 +254,107 @@ const memosLocalPlugin = { maxResults: Type.Optional(Type.Number({ description: "Max results (default 20, max 20)" })), minScore: Type.Optional(Type.Number({ description: "Min score 0-1 (default 0.45, floor 0.35)" })), role: Type.Optional(Type.String({ description: "Filter by role: 'user', 'assistant', or 'tool'. Use 'user' to find what the user said." })), + scope: Type.Optional(Type.String({ description: "Search scope: 'local' (default), 'group', or 'all'. Group/all return split local and hub results." })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), }), execute: trackTool("memory_search", async (_toolCallId: any, params: any) => { - const { query, minScore, role } = params as { + const { query, maxResults, minScore, role, scope, hubAddress, userToken } = params as { query: string; maxResults?: number; minScore?: number; role?: string; + scope?: string; + hubAddress?: string; + userToken?: string; }; + const searchScope = scope === "group" || scope === "all" ? scope : "local"; + const searchLimit = Math.min(maxResults ?? 20, 20); const agentId = (params as any).agentId ?? "main"; const ownerFilter = [`agent:${agentId}`, "public"]; - ctx.log.debug(`memory_search query="${query}" minScore=${minScore ?? 0.45} role=${role ?? "all"} owner=agent:${agentId}`); - const result = await engine.search({ query, maxResults: 20, minScore, role, ownerFilter }); + ctx.log.debug(`memory_search query="${query}" minScore=${minScore ?? 0.45} role=${role ?? "all"} owner=agent:${agentId} scope=${searchScope}`); + const result = await engine.search({ query, maxResults: searchLimit, minScore, role, ownerFilter }); ctx.log.debug(`memory_search raw candidates: ${result.hits.length}`); - if (result.hits.length === 0) { + if (result.hits.length === 0 && searchScope === "local") { return { content: [{ type: "text", text: result.meta.note ?? "No relevant memories found." }], details: { meta: result.meta }, }; } - // LLM relevance + sufficiency filtering let filteredHits = result.hits; let sufficient = false; - const candidates = result.hits.map((h, i) => ({ - index: i + 1, - summary: h.summary, - role: h.source.role, - })); - - const filterResult = await summarizer.filterRelevant(query, candidates); - if (filterResult !== null) { - sufficient = filterResult.sufficient; - if (filterResult.relevant.length > 0) { - const indexSet = new Set(filterResult.relevant); - filteredHits = result.hits.filter((_, i) => indexSet.has(i + 1)); - ctx.log.debug(`memory_search LLM filter: ${result.hits.length} → ${filteredHits.length} hits, sufficient=${sufficient}`); - } else { - return { - content: [{ type: "text", text: "No relevant memories found for this query." }], - details: { meta: result.meta }, - }; + if (result.hits.length > 0) { + const candidates = result.hits.map((h, i) => ({ + index: i + 1, + summary: h.summary, + role: h.source.role, + })); + + const filterResult = await summarizer.filterRelevant(query, candidates); + if (filterResult !== null) { + sufficient = filterResult.sufficient; + if (filterResult.relevant.length > 0) { + const indexSet = new Set(filterResult.relevant); + filteredHits = result.hits.filter((_, i) => indexSet.has(i + 1)); + ctx.log.debug(`memory_search LLM filter: ${result.hits.length} → ${filteredHits.length} hits, sufficient=${sufficient}`); + } else if (searchScope === "local") { + return { + content: [{ type: "text", text: "No relevant memories found for this query." }], + details: { meta: result.meta }, + }; + } else { + filteredHits = []; + } } } + const beforeDedup = filteredHits.length; + filteredHits = deduplicateHits(filteredHits); + ctx.log.debug(`memory_search dedup: ${beforeDedup} → ${filteredHits.length}`); + + const localDetailsHits = filteredHits.map((h) => { + let effectiveTaskId = h.taskId; + if (effectiveTaskId) { + const t = store.getTask(effectiveTaskId); + if (t && t.status === "skipped") effectiveTaskId = null; + } + return { + chunkId: h.ref.chunkId, + taskId: effectiveTaskId, + skillId: h.skillId, + role: h.source.role, + score: h.score, + }; + }); + + if (searchScope !== "local") { + const hub = await hubSearchMemories(store, ctx, { query, maxResults: searchLimit, scope: searchScope as any, hubAddress, userToken }).catch(() => ({ hits: [], meta: { totalCandidates: 0, searchedGroups: [], includedPublic: searchScope === "all" } })); + const localText = filteredHits.length > 0 + ? filteredHits.map((h, i) => { + const excerpt = h.original_excerpt.length > 220 ? h.original_excerpt.slice(0, 217) + "..." : h.original_excerpt; + return `${i + 1}. [${h.source.role}] ${excerpt}`; + }).join("\n") + : "(none)"; + const hubText = hub.hits.length > 0 + ? hub.hits.map((h, i) => `${i + 1}. [${h.ownerName}] ${h.summary}${h.groupName ? ` (${h.groupName})` : ""}`).join("\n") + : "(none)"; + + return { + content: [{ + type: "text", + text: `Local results:\n${localText}\n\nHub results:\n${hubText}`, + }], + details: { + local: { hits: localDetailsHits, meta: result.meta }, + hub, + }, + }; + } + if (filteredHits.length === 0) { return { content: [{ type: "text", text: "No relevant memories found for this query." }], @@ -308,10 +362,6 @@ const memosLocalPlugin = { }; } - const beforeDedup = filteredHits.length; - filteredHits = deduplicateHits(filteredHits); - ctx.log.debug(`memory_search dedup: ${beforeDedup} → ${filteredHits.length}`); - const lines = filteredHits.map((h, i) => { const excerpt = h.original_excerpt.length > 300 ? h.original_excerpt.slice(0, 297) + "..." @@ -356,20 +406,7 @@ const memosLocalPlugin = { }, ], details: { - hits: filteredHits.map((h) => { - let effectiveTaskId = h.taskId; - if (effectiveTaskId) { - const t = store.getTask(effectiveTaskId); - if (t && t.status === "skipped") effectiveTaskId = null; - } - return { - chunkId: h.ref.chunkId, - taskId: effectiveTaskId, - skillId: h.skillId, - role: h.source.role, - score: h.score, - }; - }), + hits: localDetailsHits, meta: result.meta, }, }; diff --git a/apps/memos-local-openclaw/plugin-impl.ts b/apps/memos-local-openclaw/plugin-impl.ts index b4f6c9ec2..8e5f54130 100644 --- a/apps/memos-local-openclaw/plugin-impl.ts +++ b/apps/memos-local-openclaw/plugin-impl.ts @@ -1,552 +1,8 @@ /** - * MemOS Local Plugin Implementation — loaded by index.ts after ensuring deps. + * MemOS Local Plugin Implementation. + * + * Keep this file as a thin re-export so tests and loaders that import + * `plugin-impl` always get the canonical plugin definition from `index.ts`. */ -import type { OpenClawPluginApi } from "openclaw/plugin-sdk"; -import { Type } from "@sinclair/typebox"; -import { buildContext } from "./src/config"; -import { SqliteStore } from "./src/storage/sqlite"; -import { Embedder } from "./src/embedding"; -import { IngestWorker } from "./src/ingest/worker"; -import { RecallEngine } from "./src/recall/engine"; -import { captureMessages } from "./src/capture"; -import { DEFAULTS } from "./src/types"; -import { ViewerServer } from "./src/viewer/server"; -import { HubServer } from "./src/hub/server"; - -function ownerFilterFor(agentId: string | undefined): string[] { - const resolvedAgentId = agentId && agentId.trim().length > 0 ? agentId : "main"; - return [`agent:${resolvedAgentId}`, "public"]; -} - -const pluginConfigSchema = { - type: "object" as const, - additionalProperties: true, - properties: { - embedding: { - type: "object" as const, - properties: { - provider: { type: "string" as const }, - endpoint: { type: "string" as const }, - apiKey: { type: "string" as const }, - model: { type: "string" as const }, - }, - }, - summarizer: { - type: "object" as const, - properties: { - provider: { type: "string" as const }, - endpoint: { type: "string" as const }, - apiKey: { type: "string" as const }, - model: { type: "string" as const }, - temperature: { type: "number" as const }, - }, - }, - viewerPort: { type: "number" as const }, - telemetry: { - type: "object" as const, - description: "Anonymous usage analytics (opt-out). No memory content or personal data is ever sent.", - properties: { - enabled: { - type: "boolean" as const, - description: "Enable anonymous telemetry (default: true). Set to false to opt-out.", - }, - }, - }, - }, -}; - -const memosLocalPlugin = { - id: "memos-local-openclaw-plugin", - name: "MemOS Local Memory", - description: - "Full-write local conversation memory with hybrid search (RRF + MMR + recency). " + - "Provides memory_search, memory_timeline, memory_get for progressive recall.", - kind: "memory" as const, - configSchema: pluginConfigSchema, - - register(api: OpenClawPluginApi) { - const pluginCfg = (api.pluginConfig ?? {}) as Record; - const stateDir = api.resolvePath("~/.openclaw"); - const ctx = buildContext(stateDir, process.cwd(), pluginCfg as any, { - debug: (msg: string) => api.logger.info(`[debug] ${msg}`), - info: (msg: string) => api.logger.info(msg), - warn: (msg: string) => api.logger.warn(msg), - error: (msg: string) => api.logger.warn(`[error] ${msg}`), - }); - - const store = new SqliteStore(ctx.config.storage!.dbPath!, ctx.log); - const embedder = new Embedder(ctx.config.embedding, ctx.log); - const worker = new IngestWorker(store, embedder, ctx); - const engine = new RecallEngine(store, embedder, ctx); - const evidenceTag = ctx.config.capture?.evidenceWrapperTag ?? DEFAULTS.evidenceWrapperTag; - - api.logger.info(`memos-local: initialized (db: ${ctx.config.storage!.dbPath})`); - - // ─── Tool: memory_search ─── - - api.registerTool( - { - name: "memory_search", - label: "Memory Search", - description: - "Search stored conversation memories. Returns summary, original_excerpt (evidence), score, and ref. " + - "Default: top 6, minScore 0.45. Increase maxResults to 12/20 or lower minScore to 0.35 if needed.", - parameters: Type.Object({ - query: Type.String({ description: "Natural language search query" }), - maxResults: Type.Optional(Type.Number({ description: "Max results (default 6, max 20)" })), - minScore: Type.Optional(Type.Number({ description: "Min score 0-1 (default 0.45, floor 0.35)" })), - }), - async execute(_toolCallId, params, context) { - const { query, maxResults, minScore } = params as { - query: string; - maxResults?: number; - minScore?: number; - }; - - const agentId = (context as any)?.agentId ?? "main"; - const ownerFilter = ownerFilterFor(agentId); - const result = await engine.search({ query, maxResults, minScore, ownerFilter }); - - if (result.hits.length === 0) { - return { - content: [{ type: "text", text: result.meta.note ?? "No relevant memories found." }], - details: { meta: result.meta }, - }; - } - - const roleLabel = (r: string) => r === "user" ? "[USER said]" : r === "assistant" ? "[ASSISTANT replied]" : r === "tool" ? "[TOOL returned]" : `[${r.toUpperCase()}]`; - - const text = result.hits - .map( - (h, i) => - `${i + 1}. ${roleLabel(h.source.role)} [score=${h.score}] ${h.summary}\n Evidence: ${h.original_excerpt.slice(0, 200)}`, - ) - .join("\n\n"); - - return { - content: [ - { - type: "text", - text: `Found ${result.hits.length} memories (minScore=${result.meta.usedMinScore}):\n\n${text}`, - }, - ], - details: { - hits: result.hits.map((h) => ({ - role: h.source.role, - summary: h.summary, - original_excerpt: h.original_excerpt, - ref: h.ref, - score: h.score, - source: h.source, - })), - meta: result.meta, - }, - }; - }, - }, - { name: "memory_search" }, - ); - - // ─── Tool: memory_timeline ─── - - api.registerTool( - { - name: "memory_timeline", - label: "Memory Timeline", - description: - "Get neighboring context around a memory ref. Use after memory_search to expand context.", - parameters: Type.Object({ - sessionKey: Type.String({ description: "From search hit ref.sessionKey" }), - chunkId: Type.String({ description: "From search hit ref.chunkId" }), - turnId: Type.String({ description: "From search hit ref.turnId" }), - seq: Type.Number({ description: "From search hit ref.seq" }), - window: Type.Optional(Type.Number({ description: "Context window ±N (default 2)" })), - }), - async execute(_toolCallId, params, context) { - const { sessionKey, chunkId, turnId, seq, window: win } = params as { - sessionKey: string; - chunkId: string; - turnId: string; - seq: number; - window?: number; - }; - - const agentId = (context as any)?.agentId ?? "main"; - const ownerFilter = ownerFilterFor(agentId); - const w = win ?? DEFAULTS.timelineWindowDefault; - const anchorChunk = store.getChunkForOwners(chunkId, ownerFilter); - if (!anchorChunk) { - return { - content: [{ type: "text", text: "Timeline (0 entries):\n\n" }], - details: { entries: [], anchorRef: { sessionKey, chunkId, turnId, seq } }, - }; - } - const neighbors = store.getNeighborChunks(sessionKey, turnId, seq, w, ownerFilter); - const anchorTs = anchorChunk?.createdAt ?? 0; - - const entries = neighbors.map((chunk) => { - let relation: "before" | "current" | "after" = "before"; - if (chunk.id === chunkId) relation = "current"; - else if (chunk.createdAt > anchorTs) relation = "after"; - - return { - relation, - role: chunk.role, - excerpt: chunk.content.slice(0, DEFAULTS.excerptMaxChars), - ts: chunk.createdAt, - }; - }); - - const rl = (r: string) => r === "user" ? "USER" : r === "assistant" ? "ASSISTANT" : r.toUpperCase(); - const text = entries - .map((e) => `[${e.relation}] ${rl(e.role)}: ${e.excerpt.slice(0, 150)}`) - .join("\n"); - - return { - content: [{ type: "text", text: `Timeline (${entries.length} entries):\n\n${text}` }], - details: { entries, anchorRef: { sessionKey, chunkId, turnId, seq } }, - }; - }, - }, - { name: "memory_timeline" }, - ); - - // ─── Tool: memory_get ─── - - api.registerTool( - { - name: "memory_get", - label: "Memory Get", - description: - "Get full original text of a memory chunk. Use to verify exact details from a search hit.", - parameters: Type.Object({ - chunkId: Type.String({ description: "From search hit ref.chunkId" }), - maxChars: Type.Optional( - Type.Number({ description: `Max chars (default ${DEFAULTS.getMaxCharsDefault}, max ${DEFAULTS.getMaxCharsMax})` }), - ), - }), - async execute(_toolCallId, params, context) { - const { chunkId, maxChars } = params as { chunkId: string; maxChars?: number }; - const limit = Math.min(maxChars ?? DEFAULTS.getMaxCharsDefault, DEFAULTS.getMaxCharsMax); - - const agentId = (context as any)?.agentId ?? "main"; - const chunk = store.getChunkForOwners(chunkId, ownerFilterFor(agentId)); - if (!chunk) { - return { - content: [{ type: "text", text: `Chunk not found: ${chunkId}` }], - details: { error: "not_found" }, - }; - } - - const content = chunk.content.length > limit - ? chunk.content.slice(0, limit) + "…" - : chunk.content; - - const who = chunk.role === "user" ? "USER said" : chunk.role === "assistant" ? "ASSISTANT replied" : chunk.role === "tool" ? "TOOL returned" : chunk.role.toUpperCase(); - - return { - content: [{ type: "text", text: `[${who}] (session: ${chunk.sessionKey})\n\n${content}` }], - details: { - ref: { sessionKey: chunk.sessionKey, chunkId: chunk.id, turnId: chunk.turnId, seq: chunk.seq }, - source: { ts: chunk.createdAt, role: chunk.role, sessionKey: chunk.sessionKey }, - }, - }; - }, - }, - { name: "memory_get" }, - ); - - // ─── Tool: memory_viewer ─── - - const viewerPort = (pluginCfg as any).viewerPort ?? 18799; - - api.registerTool( - { - name: "memory_viewer", - label: "Open Memory Viewer", - description: - "Open the MemOS Memory Viewer web dashboard. Returns the URL the user can open in their browser to visually browse, search, and manage all stored memories.", - parameters: Type.Object({}), - async execute() { - const url = `http://127.0.0.1:${viewerPort}`; - return { - content: [ - { - type: "text", - text: [ - `MemOS Memory Viewer: ${url}`, - "", - "Open this URL in your browser to:", - "- Browse all stored memories with a clean timeline view", - "- Semantic search (powered by your embedding model)", - "- Create, edit, and delete memories", - "- Filter by session, role, and time range", - "", - "First visit requires setting a password to protect your data.", - ].join("\n"), - }, - ], - details: { viewerUrl: url }, - }; - }, - }, - { name: "memory_viewer" }, - ); - - // ─── Tool: memory_write_public ─── - - api.registerTool( - { - name: "memory_write_public", - label: "Write Public Memory", - description: - "Write a piece of information to public memory. Public memories are visible to all agents during memory_search. " + - "Use this for shared knowledge, team decisions, or cross-agent coordination information.", - parameters: Type.Object({ - content: Type.String({ description: "The content to write to public memory" }), - summary: Type.Optional(Type.String({ description: "Optional short summary of the content" })), - }), - async execute(_toolCallId, params) { - const { content, summary } = params as { content: string; summary?: string }; - if (!content || !content.trim()) { - return { content: [{ type: "text", text: "Content cannot be empty." }] }; - } - - const { v4: uuidv4 } = await import("uuid"); - const now = Date.now(); - const chunkId = uuidv4(); - const chunkSummary = summary ?? content.slice(0, 200); - - store.insertChunk({ - id: chunkId, - sessionKey: "public", - turnId: `public-${now}`, - seq: 0, - role: "assistant", - content: content.trim(), - kind: "paragraph", - summary: chunkSummary, - embedding: null, - taskId: null, - skillId: null, - owner: "public", - dedupStatus: "active", - dedupTarget: null, - dedupReason: null, - mergeCount: 0, - lastHitAt: null, - mergeHistory: "[]", - createdAt: now, - updatedAt: now, - }); - - try { - const [emb] = await embedder.embed([chunkSummary]); - if (emb) store.upsertEmbedding(chunkId, emb); - } catch (err) { - api.logger.warn(`memos-local: public memory embedding failed: ${err}`); - } - - return { - content: [{ type: "text", text: `Public memory written successfully (id: ${chunkId}).` }], - details: { chunkId, owner: "public" }, - }; - }, - }, - { name: "memory_write_public" }, - ); - - // ─── Tool: skill_search ─── - - api.registerTool( - { - name: "skill_search", - label: "Skill Search", - description: - "Search available skills by natural language. Searches your own skills, public skills, or both. " + - "Use when you need a capability or guide and don't have a matching skill at hand.", - parameters: Type.Object({ - query: Type.String({ description: "Natural language description of the needed skill" }), - scope: Type.Optional(Type.String({ description: "Search scope: 'mix' (default, self + public), 'self' (own only), 'public' (public only)" })), - }), - async execute(_toolCallId, params, context) { - const { query, scope: rawScope } = params as { query: string; scope?: string }; - const scope = (rawScope === "self" || rawScope === "public") ? rawScope : "mix"; - const agentId = (context as any)?.agentId ?? "main"; - const currentOwner = `agent:${agentId}`; - - const hits = await engine.searchSkills(query, scope as any, currentOwner); - - if (hits.length === 0) { - return { - content: [{ type: "text", text: `No relevant skills found for: "${query}" (scope: ${scope})` }], - details: { query, scope, hits: [] }, - }; - } - - const text = hits.map((h, i) => - `${i + 1}. [${h.name}] ${h.description.slice(0, 150)}${h.visibility === "public" ? " (public)" : ""}`, - ).join("\n"); - - return { - content: [{ type: "text", text: `Found ${hits.length} skills:\n\n${text}` }], - details: { query, scope, hits }, - }; - }, - }, - { name: "skill_search" }, - ); - - // ─── Tool: skill_publish ─── - - api.registerTool( - { - name: "skill_publish", - label: "Publish Skill", - description: "Make a skill public so other agents can discover and install it via skill_search.", - parameters: Type.Object({ - skillId: Type.String({ description: "The skill ID to publish" }), - }), - async execute(_toolCallId, params) { - const { skillId } = params as { skillId: string }; - const skill = store.getSkill(skillId); - if (!skill) { - return { content: [{ type: "text", text: `Skill not found: ${skillId}` }] }; - } - store.setSkillVisibility(skillId, "public"); - return { - content: [{ type: "text", text: `Skill "${skill.name}" is now public.` }], - details: { skillId, name: skill.name, visibility: "public" }, - }; - }, - }, - { name: "skill_publish" }, - ); - - // ─── Tool: skill_unpublish ─── - - api.registerTool( - { - name: "skill_unpublish", - label: "Unpublish Skill", - description: "Make a skill private. Other agents will no longer be able to discover it.", - parameters: Type.Object({ - skillId: Type.String({ description: "The skill ID to unpublish" }), - }), - async execute(_toolCallId, params) { - const { skillId } = params as { skillId: string }; - const skill = store.getSkill(skillId); - if (!skill) { - return { content: [{ type: "text", text: `Skill not found: ${skillId}` }] }; - } - store.setSkillVisibility(skillId, "private"); - return { - content: [{ type: "text", text: `Skill "${skill.name}" is now private.` }], - details: { skillId, name: skill.name, visibility: "private" }, - }; - }, - }, - { name: "skill_unpublish" }, - ); - - // ─── Auto-capture: write conversation to memory after each agent turn ─── - - api.on("agent_end", async (event) => { - if (!event.success || !event.messages || event.messages.length === 0) return; - - try { - const agentId = (event as any).agentId ?? "main"; - const owner = `agent:${agentId}`; - - const msgs: Array<{ role: string; content: string; toolName?: string }> = []; - for (const msg of event.messages) { - if (!msg || typeof msg !== "object") continue; - const m = msg as Record; - const role = m.role as string; - if (role !== "user" && role !== "assistant" && role !== "tool") continue; - - let text = ""; - if (typeof m.content === "string") { - text = m.content; - } else if (Array.isArray(m.content)) { - for (const block of m.content) { - if (block && typeof block === "object" && (block as any).type === "text") { - text += (block as any).text + "\n"; - } - } - } - - if (!text.trim()) continue; - - const toolName = role === "tool" - ? (m.name as string) ?? (m.toolName as string) ?? (m.tool_call_id ? "unknown" : undefined) - : undefined; - - msgs.push({ role, content: text.trim(), toolName }); - } - - if (msgs.length === 0) return; - - const sessionKey = (event as any).sessionKey ?? "default"; - const turnId = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; - const captured = captureMessages(msgs, sessionKey, turnId, evidenceTag, ctx.log, owner); - if (captured.length > 0) { - worker.enqueue(captured); - } - } catch (err) { - api.logger.warn(`memos-local: capture failed: ${String(err)}`); - } - }); - - // ─── Memory Viewer (web UI) ─── - - const viewer = new ViewerServer({ - store, - embedder, - port: viewerPort, - log: ctx.log, - dataDir: stateDir, - ctx, - }); - - const hubServer = ctx.config.sharing?.enabled && ctx.config.sharing.role === "hub" - ? new HubServer({ store, log: ctx.log, config: ctx.config, dataDir: stateDir }) - : null; - - // ─── Service lifecycle ─── - - api.registerService({ - id: "memos-local-openclaw-plugin", - start: async () => { - if (hubServer) { - const hubUrl = await hubServer.start(); - api.logger.info(`memos-local: hub started at ${hubUrl}`); - } - try { - const viewerUrl = await viewer.start(); - api.logger.info(`memos-local: started (embedding: ${embedder.provider})`); - api.logger.info(`╔══════════════════════════════════════════╗`); - api.logger.info(`║ MemOS Memory Viewer ║`); - api.logger.info(`║ → ${viewerUrl.padEnd(37)}║`); - api.logger.info(`║ Open in browser to manage memories ║`); - api.logger.info(`╚══════════════════════════════════════════╝`); - api.logger.info(`memos-local: password reset token: ${viewer.getResetToken()}`); - api.logger.info(`memos-local: forgot password? Use the reset token on the login page.`); - } catch (err) { - api.logger.warn(`memos-local: viewer failed to start: ${err}`); - api.logger.info(`memos-local: started (embedding: ${embedder.provider})`); - } - }, - stop: async () => { - await hubServer?.stop(); - viewer.stop(); - await worker.flush(); - store.close(); - api.logger.info("memos-local: stopped"); - }, - }); - }, -}; - -export default memosLocalPlugin; +export { default } from "./index"; diff --git a/apps/memos-local-openclaw/tests/integration.test.ts b/apps/memos-local-openclaw/tests/integration.test.ts index 9c5b6da43..0be904a9d 100644 --- a/apps/memos-local-openclaw/tests/integration.test.ts +++ b/apps/memos-local-openclaw/tests/integration.test.ts @@ -708,6 +708,119 @@ describe("Integration: owner isolation for initPlugin tools", () => { }); }); +describe("Integration: root plugin memory_search network scope", () => { + async function setupRootMemorySearchHarness() { + const clientDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-root-search-client-")); + const hubDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-root-search-hub-")); + const port = 19600 + Math.floor(Math.random() * 1000); + const hubStore = new SqliteStore(path.join(hubDir, "hub.db"), noopLog as any); + const hubServer = new HubServer({ + store: hubStore, + log: noopLog as any, + config: { + sharing: { + enabled: true, + role: "hub", + hub: { + port, + teamName: "Root Search Test", + teamToken: "root-search-secret", + }, + }, + } as any, + dataDir: hubDir, + } as any); + + await hubServer.start(); + const authState = JSON.parse(fs.readFileSync(path.join(hubDir, "hub-auth.json"), "utf8")); + const userToken = authState.bootstrapAdminToken as string; + const userId = authState.bootstrapAdminUserId as string; + + hubStore.upsertHubTask({ + id: "hub-task-root-search-1", + sourceTaskId: "task-root-search-1", + sourceUserId: userId, + title: "Hub rollout", + summary: "Hub rollout checklist", + groupId: null, + visibility: "public", + createdAt: 1, + updatedAt: 1, + }); + hubStore.upsertHubChunk({ + id: "hub-chunk-root-search-1", + hubTaskId: "hub-task-root-search-1", + sourceTaskId: "task-root-search-1", + sourceChunkId: "chunk-root-search-1", + sourceUserId: userId, + role: "assistant", + content: "Public rollout checklist from hub with nginx canary validation.", + summary: "Hub rollout checklist", + kind: "paragraph", + createdAt: 2, + }); + + const { tools, service } = makePluginApi(clientDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: `127.0.0.1:${port}`, + userToken, + }, + }, + telemetry: { enabled: false }, + }); + + const clientStore = new SqliteStore(path.join(clientDir, "memos-local", "memos.db"), noopLog as any); + clientStore.insertTask({ + id: "task-root-local-1", + sessionKey: "session-root-local", + title: "Local rollout", + summary: "Local rollout checklist", + status: "completed", + owner: "agent:main", + startedAt: 1, + endedAt: 2, + updatedAt: 2, + }); + clientStore.insertChunk(makeTaskChunk({ + id: "chunk-root-local-1", + sessionKey: "session-root-local", + turnId: "turn-root-local", + content: "Local rollout checklist with smoke tests and deploy validation.", + summary: "Local rollout checklist", + taskId: "task-root-local-1", + })); + + return { clientDir, hubDir, tools, service, clientStore, hubStore, hubServer }; + } + + async function teardownRootMemorySearchHarness(harness: Awaited>) { + await harness.service?.stop?.(); + harness.clientStore.close(); + await harness.hubServer.stop(); + harness.hubStore.close(); + fs.rmSync(harness.clientDir, { recursive: true, force: true }); + fs.rmSync(harness.hubDir, { recursive: true, force: true }); + } + + it("root memory_search should return split local and hub results for scope=all", async () => { + const harness = await setupRootMemorySearchHarness(); + try { + const searchTool = harness.tools.get("memory_search"); + expect(searchTool).toBeDefined(); + + const result = await searchTool.execute("call-root-search", { query: "rollout checklist", scope: "all", maxResults: 5 }, { agentId: "main" }); + expect(result.details.local.hits.length).toBeGreaterThan(0); + expect(result.details.hub.hits.length).toBeGreaterThan(0); + expect(result.details.hub.hits[0].remoteHitId).toBeTruthy(); + } finally { + await teardownRootMemorySearchHarness(harness); + } + }); +}); + describe("Integration: task sharing MVP", () => { async function setupTaskSharingHarness(opts: { usePersistedConnection?: boolean; diff --git a/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts index db0471c9f..b713393ca 100644 --- a/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts +++ b/apps/memos-local-openclaw/tests/plugin-impl-access.test.ts @@ -128,6 +128,31 @@ describe("plugin-impl hub service skeleton", () => { }); }); +describe("plugin-impl v4 tool registration", () => { + it("should register the required v4 sharing tools", () => { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-plugin-impl-tools-")); + const { tools, service } = makeApi(tmpDir, { + sharing: { + enabled: true, + role: "client", + client: { + hubAddress: "127.0.0.1:19999", + userToken: "test-token", + }, + }, + }); + + expect(service).toBeDefined(); + expect(tools.has("task_share")).toBe(true); + expect(tools.has("task_unshare")).toBe(true); + expect(tools.has("network_memory_detail")).toBe(true); + expect(tools.has("network_team_info")).toBe(true); + expect(tools.has("network_skill_pull")).toBe(true); + + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); +}); + describe("plugin-impl owner isolation", () => { let tmpDir: string; let tools: Map; From 5b5b1a23902c414c31e981fb813f5a3f3a38bcc6 Mon Sep 17 00:00:00 2001 From: jiaqian Date: Sun, 8 Mar 2026 17:30:12 +0800 Subject: [PATCH 17/85] feat(memos-local): complete T12 sharing viewer ui --- apps/memos-local-openclaw/src/hub/server.ts | 10 + apps/memos-local-openclaw/src/viewer/html.ts | 549 ++++++++++++++++-- .../memos-local-openclaw/src/viewer/server.ts | 289 +++++++++ .../tests/viewer-sharing.test.ts | 211 +++++++ .../tests/viewer-ui.test.ts | 17 + 5 files changed, 1028 insertions(+), 48 deletions(-) create mode 100644 apps/memos-local-openclaw/tests/viewer-sharing.test.ts create mode 100644 apps/memos-local-openclaw/tests/viewer-ui.test.ts diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index 561e10f6b..8ac2ec101 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -172,6 +172,16 @@ export class HubServer { return this.json(res, 200, { status: "active", token }); } + if (req.method === "POST" && path === "/api/v1/hub/admin/reject-user") { + const auth = this.authenticate(req); + if (!auth) return this.json(res, 401, { error: "unauthorized" }); + if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); + const body = await this.readJson(req); + const rejected = this.userManager.rejectUser(String(body.userId)); + if (!rejected) return this.json(res, 404, { error: "not_found" }); + return this.json(res, 200, { status: "rejected" }); + } + if (req.method === "POST" && path === "/api/v1/hub/tasks/share") { const auth = this.authenticate(req); if (!auth) return this.json(res, 401, { error: "unauthorized" }); diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index ae2db1c97..d5ab9375e 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -145,6 +145,42 @@ input,textarea,select{font-family:inherit;font-size:inherit} .search-bar input:focus{border-color:var(--pri);box-shadow:0 0 0 3px var(--pri-glow)} .search-bar .search-icon{position:absolute;left:14px;top:50%;transform:translateY(-50%);color:var(--text-muted);font-size:14px;pointer-events:none} .search-meta{font-size:12px;color:var(--text-sec);margin-bottom:14px;padding:0 2px} +.scope-select{padding:10px 12px;border:1px solid var(--border);border-radius:10px;background:var(--bg-card);color:var(--text);font-size:13px;min-width:110px;outline:none} +.sharing-inline-meta{font-size:12px;color:var(--text-muted);margin:-8px 0 14px 2px} +.sharing-sidebar-card{margin:14px 0 18px;border:1px solid var(--border);background:var(--bg-card);border-radius:12px;padding:12px;box-shadow:var(--shadow-sm)} +.sharing-sidebar-card .title{font-size:12px;font-weight:700;color:var(--text);margin-bottom:8px;text-transform:uppercase;letter-spacing:.04em} +.sharing-sidebar-card .status{font-size:13px;color:var(--text-sec);line-height:1.5} +.sharing-sidebar-card .status strong{color:var(--text)} +.sharing-sidebar-card .hint{margin-top:8px;font-size:11px;color:var(--text-muted)} +.result-section{margin-bottom:18px;border:1px solid var(--border);border-radius:14px;background:var(--bg-card);overflow:hidden} +.result-section-header{display:flex;justify-content:space-between;align-items:center;padding:12px 14px;border-bottom:1px solid var(--border);background:rgba(255,255,255,.02)} +.result-section-title{font-size:14px;font-weight:700;color:var(--text)} +.result-section-sub{font-size:12px;color:var(--text-muted)} +.search-hit-list{padding:12px;display:flex;flex-direction:column;gap:10px} +.search-hit-card,.hub-hit-card,.hub-skill-card{border:1px solid var(--border);border-radius:12px;background:var(--bg);padding:12px;box-shadow:var(--shadow-sm)} +.search-hit-card .summary,.hub-hit-card .summary,.hub-skill-card .summary{font-size:14px;font-weight:600;color:var(--text);margin-bottom:6px} +.search-hit-card .excerpt,.hub-hit-card .excerpt,.hub-skill-card .excerpt{font-size:12px;color:var(--text-sec);line-height:1.55;white-space:pre-wrap} +.search-hit-meta,.hub-hit-meta,.hub-skill-meta{display:flex;flex-wrap:wrap;gap:8px;margin-top:8px;font-size:11px;color:var(--text-muted)} +.meta-chip{display:inline-flex;align-items:center;gap:5px;padding:4px 8px;border:1px solid var(--border);border-radius:999px;background:var(--bg-card)} +.hub-hit-actions,.hub-skill-actions,.task-share-actions{display:flex;flex-wrap:wrap;gap:8px;margin-top:10px} +.sharing-settings-grid{display:grid;grid-template-columns:1.1fr .9fr;gap:18px} +.sharing-panel{border:1px solid var(--border);border-radius:14px;background:var(--bg-card);padding:14px;box-shadow:var(--shadow-sm)} +.sharing-panel h4{font-size:14px;font-weight:700;color:var(--text);margin:0 0 10px 0} +.sharing-panel .line{font-size:13px;color:var(--text-sec);margin-bottom:8px;line-height:1.55} +.sharing-panel .line strong{color:var(--text)} +.pending-user-list{display:flex;flex-direction:column;gap:10px} +.pending-user-card{border:1px solid var(--border);border-radius:12px;padding:12px;background:var(--bg)} +.pending-user-name{font-size:14px;font-weight:700;color:var(--text)} +.pending-user-meta{font-size:12px;color:var(--text-sec);margin-top:4px} +.pending-user-actions{display:flex;gap:8px;margin-top:10px} +.task-detail-actions{display:flex;align-items:center;gap:8px;flex-wrap:wrap} +.shared-memory-overlay,.shared-memory-overlay.show{display:none} +.shared-memory-overlay.show{display:flex;position:fixed;inset:0;align-items:center;justify-content:center;background:rgba(0,0,0,.55);z-index:1200;padding:24px} +.shared-memory-panel{width:min(860px,95vw);max-height:85vh;overflow:auto;border:1px solid var(--border);border-radius:18px;background:var(--bg-card);box-shadow:var(--shadow-lg);padding:20px} +.shared-memory-panel h3{font-size:18px;color:var(--text);margin-bottom:10px} +.shared-memory-panel .content{font-size:13px;color:var(--text-sec);line-height:1.7;white-space:pre-wrap;background:var(--bg);border:1px solid var(--border);border-radius:12px;padding:14px;margin-top:12px} +.hub-source-badge{display:inline-flex;align-items:center;gap:6px;padding:4px 8px;border-radius:999px;background:rgba(34,197,94,.12);color:var(--green);font-size:11px;font-weight:700;border:1px solid rgba(34,197,94,.22)} +@media (max-width: 960px){.sharing-settings-grid{grid-template-columns:1fr}.search-bar{flex-wrap:wrap}.scope-select{width:100%}.task-detail-actions{width:100%;justify-content:flex-start}} .filter-bar{display:flex;gap:8px;margin-bottom:16px;flex-wrap:wrap} .filter-chip{padding:5px 14px;border:1px solid var(--border);border-radius:6px;background:transparent;color:var(--text-sec);font-size:12px;font-weight:500;transition:all .15s} @@ -240,6 +276,7 @@ input,textarea,select{font-family:inherit;font-size:inherit} .form-group textarea{min-height:100px;resize:vertical} .modal-actions{display:flex;gap:10px;justify-content:flex-end;margin-top:28px} + /* ─── Toast ─── */ .toast-container{position:fixed;top:80px;right:24px;z-index:1000;display:flex;flex-direction:column;gap:8px} .toast{padding:14px 20px;border-radius:10px;font-size:13px;font-weight:500;box-shadow:var(--shadow-lg);animation:slideIn .3s ease;display:flex;align-items:center;gap:10px;max-width:360px;border:1px solid} @@ -741,6 +778,11 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
Days
+
Sessions
@@ -751,8 +793,14 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
@@ -809,7 +857,10 @@ input,textarea,select{font-family:inherit;font-size:inherit}

- +
+
+ +
@@ -819,7 +870,28 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
+
+
+

Shared Memory

+ +
+
+
+
+
+
+ +
-Total Skills
@@ -843,6 +915,10 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
+
Hub Skills
+
No hub skills loaded.
+
@@ -1073,6 +1149,22 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
+

Hub & Team

+
+

\u{1F517} Hub Connection

+
Loading...
+
+
+

\u{1F465} Team & Groups

+
Loading...
+
+
+

\u{1F6E1} Admin Pending Users

+
Loading...
+
+
+
\u2713 Saved @@ -1250,7 +1342,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+ + diff --git a/apps/memos-local-openclaw/docs/index.html b/apps/memos-local-openclaw/docs/index.html new file mode 100644 index 000000000..3c8d401e2 --- /dev/null +++ b/apps/memos-local-openclaw/docs/index.html @@ -0,0 +1,552 @@ + + + + + +MemOS — OpenClaw 记忆插件文档 + + + + + + + + + +
+ + +
+ + + +
+ +
+
MemOS OpenClaw 插件MemOS OpenClaw Plugin
+

MemOS

+

+ OpenClaw 提供完全本地化的持久记忆、智能任务总结、技能自动进化和多智能体协同。npm 一键安装,支持分级模型配置。 + Fully local persistent memory, smart task summarization, auto skill evolution, and multi-agent collaboration for OpenClaw. One-command install, tiered model support. +

+
+ 完全本地化:数据存于本机 SQLite,零云依赖。Viewer 仅 127.0.0.1,密码保护。 + Fully local: Data in local SQLite, zero cloud dependency. Viewer 127.0.0.1 only, password-protected. +
+ +
+
💾

全量写入Full-Write

每次对话自动捕获,语义分片后持久化。Auto-captures every conversation, chunks semantically.

+

任务总结与技能进化Tasks & Skills

碎片对话归纳为结构化任务,再提炼为可复用技能并持续升级。Conversations organized into tasks, then distilled into skills that auto-upgrade.

+
🔍

混合检索Hybrid Search

FTS5 + 向量,RRF,MMR,时间衰减。FTS5 + vector, RRF, MMR, recency decay.

+
🧠

全量可视化Visualization

记忆/任务/技能/分析/日志/导入/设置 7 个管理页。7 pages: memories, tasks, skills, analytics, logs, import, settings.

+
💰

分级模型Tiered Models

Embedding/摘要/技能可独立配置不同模型。Each pipeline configurable with different models.

+
🤝

多智能体协同Multi-Agent

记忆隔离 + 公共记忆 + 技能共享,多 Agent 协同进化。Memory isolation + public memory + skill sharing for collective evolution.

+
🦐

原生记忆导入Native Memory Import

一键迁移 OpenClaw 内置记忆,智能去重、断点续传、实时进度。One-click migration from OpenClaw built-in memories with smart dedup, resume, and real-time progress.

+
🔗

LLM 智能降级LLM Fallback Chain

技能模型 → 摘要模型 → OpenClaw 原生模型三级自动降级,零手动干预。Skill model → summarizer → OpenClaw native model, auto-fallback with zero manual intervention.

+
✏️

任务/技能 CRUDTask & Skill CRUD

列表卡片直接编辑、删除、重试技能生成、切换可见性。Edit, delete, retry skill gen, toggle visibility — all from list cards.

+
+
+ +
+

系统架构Architecture

+

四条流水线:记忆写入 → 任务总结与技能进化(异步)→ 智能检索 → 协同共享。每个 Agent 拥有独立记忆空间,通过公共记忆和技能共享实现协同进化。Four pipelines: write → task & skill evolution (async) → retrieval → collaboration. Each agent has isolated memory; public memory and skill sharing enable collective evolution.

+ +
+
OpenClawagent_end
+
Capture
+
Ingestchunk→summary→embed→dedup
+
SQLite+FTS5
+
+
+
Task Processor异步 · 话题检测 → 摘要async · topic → summary
+
Skill Evolver异步 · 评估 → 生成/升级async · eval → create/up
+
+
+
before_agent_startauto-recall
+
RecallFTS+Vector
+
LLM filter
+
Inject context
+
+
+
Agentmemory_search
+
RRF→MMR→Decay
+
LLM filter
+
excerpts+chunkId/task_id
+
task_summary / skill_get / memory_timeline
+
+ +

数据流Data Flow

+

写入Write

+
    +
  1. agent_end → Capture → Chunk → LLM Summary → Embed → Dedup → Store
  2. +
  3. 异步:任务检测 → 任务摘要 → 技能评估 → 技能生成/升级Async: task detect → summary → skill eval → create/upgrade
  4. +
+

检索Read

+
    +
  1. 每轮自动:before_agent_start 用用户消息检索 → LLM 过滤相关 → 注入 system 上下文;无结果时提示 agent 自生成 query 调 memory_searchPer turn: before_agent_start searches with user message → LLM filters relevant → inject system context; if no hits, hint agent to call memory_search with self-generated query.
  2. +
  3. memory_search → FTS5+Vector → RRF → MMR → Decay → LLM filter → excerpts + chunkId/task_id(无 summary)
  4. +
  5. task_summary / skill_get(skillId|taskId) / memory_timeline(chunkId) / skill_install
  6. +
+
+ +
+

快速开始Quick Start

+
    +
  • Node.js ≥ 18
  • +
  • OpenClaw 已安装OpenClaw installed
  • +
  • Embedding / Summarizer API 可选,不配自动用本地模型Embedding / Summarizer APIs optional, falls back to local
  • +
+ +

Step 0:安装 C++ 编译工具(macOS / Linux 推荐)Step 0: Install C++ Build Tools (macOS / Linux recommended)

+

插件依赖 better-sqlite3 原生模块。macOS / Linux 用户建议先安装编译工具,可大幅提升安装成功率。Windows 用户使用 Node.js LTS 版本时通常有预编译文件,可直接跳到 Step 1。The plugin depends on better-sqlite3, a native C/C++ module. macOS / Linux users should install build tools first. Windows users with Node.js LTS usually have prebuilt binaries and can skip to Step 1.

+
# macOS
+xcode-select --install
+
+# Linux (Ubuntu / Debian)
+sudo apt install build-essential python3
+
+# Windows: 通常无需操作。如安装失败,安装 Visual Studio Build Tools:
+# https://visualstudio.microsoft.com/visual-cpp-build-tools/bash
+ +

Step 1:安装插件 & 启动Step 1: Install Plugin & Start

+
openclaw plugins install @memtensor/memos-local-openclaw-plugin
+openclaw gateway startbash
+ +
安装失败?最常见的问题是 better-sqlite3 原生模块编译失败。请确认已执行上方 Step 0,然后手动重建:cd ~/.openclaw/extensions/memos-local-openclaw-plugin && npm rebuild better-sqlite3。更多方案请查看 安装排查指南better-sqlite3 官方文档Install failed? The most common issue is better-sqlite3 compilation failure. Ensure Step 0 is done, then manually rebuild: cd ~/.openclaw/extensions/memos-local-openclaw-plugin && npm rebuild better-sqlite3. See the troubleshooting guide or official better-sqlite3 docs for more solutions.
+ +

升级Upgrade

+
openclaw plugins update memos-local-openclaw-plugin
+openclaw gateway stop && openclaw gateway startbash
+
升级自动完成依赖安装、旧版清理和原生模块编译,无需手动操作。如果 update 命令不可用,先删除旧目录再重新安装:rm -rf ~/.openclaw/extensions/memos-local-openclaw-plugin && openclaw plugins install @memtensor/memos-local-openclaw-plugin(记忆数据不受影响)。Upgrade automatically handles dependencies, legacy cleanup, and native module compilation. If update is unavailable, delete the old directory first: rm -rf ~/.openclaw/extensions/memos-local-openclaw-plugin && openclaw plugins install @memtensor/memos-local-openclaw-plugin (memory data is stored separately and won't be affected).
+ +

配置Configuration

+

两种方式:编辑 openclaw.json 或通过 Viewer 网页面板在线修改。支持分级模型。Two methods: edit openclaw.json or via Viewer web panel. Tiered models supported.

+
{
+  "plugins": {
+    "slots": { "memory": "memos-local-openclaw-plugin" },
+    "entries": { "memos-local-openclaw-plugin": {
+      "config": {
+        "embedding": {                           // lightweight
+          "provider": "openai_compatible",
+          "model": "bge-m3",
+          "endpoint": "https://your-api-endpoint/v1",
+          "apiKey": "sk-••••••"
+        },
+        "summarizer": {                          // mid-tier
+            "provider": "openai_compatible",
+          "model": "gpt-4o-mini",
+          "endpoint": "https://your-api-endpoint/v1",
+          "apiKey": "sk-••••••"
+        },
+        "skillEvolution": {
+          "summarizer": {                        // high-quality
+            "provider": "openai_compatible",
+            "model": "claude-4.6-opus",
+            "endpoint": "https://your-api-endpoint/v1",
+            "apiKey": "sk-••••••"
+          }
+        },
+        "recall": {                               // optional
+          "vectorSearchMaxChunks": 0   // 0=search all; set 200000–300000 only if slow on huge DB
+        },
+        "viewerPort": 18799
+      }
+    }}
+  }
+}json
+
安装后每次对话自动存入记忆。访问 http://127.0.0.1:18799 使用 Viewer。Every conversation auto-stored. Visit http://127.0.0.1:18799 for Viewer.
+
+ +
+

🦐 记忆迁移 — 再续前缘🦐 Memory Migration — Reconnect

+

将 OpenClaw 原生内置的记忆数据(SQLite 存储的对话历史)无缝迁移到 MemOS 的智能记忆系统。你和 AI 共同积累的每一段对话,都值得被记住。Seamlessly migrate OpenClaw's native built-in memory data (SQLite conversation history) to MemOS's intelligent memory system. Every conversation you've built with AI deserves to be remembered.

+ +
核心特性:一键导入 · 智能去重 · 断点续传 · 任务与技能生成 · 实时进度 · 🦐 标识导入来源Key Features: One-click import · Smart dedup · Resume anytime · Task & skill gen · Real-time progress · 🦐 source tagging
+ +

操作步骤Usage

+

方式一:通过 Viewer 网页面板(推荐)Method 1: Via Viewer Web Panel (Recommended)

+
    +
  1. 访问 http://127.0.0.1:18799,切换到 Import 页面。Visit http://127.0.0.1:18799, switch to the Import page.
  2. +
  3. 点击 扫描 OpenClaw 原生记忆,系统自动扫描 ~/.openclaw/ 下的 SQLite 数据库和 JSONL 日志。Click Scan OpenClaw Native Memories — the system auto-scans SQLite databases and JSONL logs under ~/.openclaw/.
  4. +
  5. 查看扫描结果(文件数、会话数、消息数),确认后点击 开始导入Review scan results (files, sessions, messages), then click Start Import.
  6. +
  7. 实时查看导入进度条、统计数据(已导入/跳过/合并/错误)和日志。Monitor real-time progress bar, stats (stored/skipped/merged/errors), and logs.
  8. +
+ +

方式二:通过 Agent 对话Method 2: Via Agent Chat

+

在与 OpenClaw 的对话中,直接让 AI 操作:In your conversation with OpenClaw, tell the AI:

+
// Example prompts
+"请帮我导入 OpenClaw 的原生记忆"
+"Import my OpenClaw native memories"text
+ +

方式三:通过 HTTP APIMethod 3: Via HTTP API

+
# 1. 扫描
+curl http://127.0.0.1:18799/api/migrate/scan
+
+# 2. 开始导入(SSE 流式进度)
+curl http://127.0.0.1:18799/api/migrate/start
+
+# 3. 停止导入
+curl -X POST http://127.0.0.1:18799/api/migrate/stopbash
+ +

后处理:任务与技能生成Post-Processing: Task & Skill Generation

+

导入完成后,可选择对导入的记忆进行后处理:After import, optionally post-process imported memories:

+
    +
  • 任务生成:自动检测会话中的任务边界,为每个会话生成结构化摘要(目标/步骤/结果)。Task generation: Auto-detect task boundaries per session, generate structured summaries (goal/steps/result).
  • +
  • 技能进化:从已完成的任务中提炼可复用技能,生成 SKILL.md 文件并安装到工作区。Skill evolution: Distill reusable skills from completed tasks, generate SKILL.md and install to workspace.
  • +
+

后处理在同一 Agent 内串行执行,不同 Agent 之间可并行(并发度可配置 1–8)。已处理过的会话自动跳过。支持选择只生成任务、只生成技能或两者同时执行。Post-processing runs serially within each agent, with parallel processing across agents (configurable concurrency 1–8). Already processed sessions are auto-skipped. Choose task-only, skill-only, or both.

+ +

断点续传Resume & Stop

+

导入和后处理均支持随时暂停:Both import and post-processing support pause/resume:

+
    +
  • 点击 停止 按钮后,进度自动保存。Click Stop, progress auto-saved.
  • +
  • 刷新页面后自动检测未完成的导入,恢复进度条显示。On page refresh, auto-detect incomplete imports and restore progress display.
  • +
  • 再次点击开始即从上次中断处继续,已处理的记忆自动跳过。Click start again to continue from where you left off — processed memories are auto-skipped.
  • +
  • 导入和后处理在后台运行,关闭 Viewer 页面不影响执行。Import and post-processing run in the background — closing the Viewer page won't interrupt them.
  • +
+ +
🦐 来源标识:所有通过迁移导入的记忆都带有 🦐 标识,在 Viewer 的记忆列表中可一眼区分原生导入和对话生成的记忆。🦐 Source Tag: All migrated memories are tagged with 🦐, making them visually distinguishable from conversation-generated memories in the Viewer.
+
+ +
+

模块Modules

+

Capture

+

过滤 system/self-tool,剥离 OpenClaw 元数据。保留 user/assistant/tool。Filter system/self-tool, strip metadata. Keep user/assistant/tool.

+

Ingest

+

异步队列:语义分片 → LLM 摘要 → 向量化 → 智能去重(Top-5 相似 + LLM 判 DUPLICATE/UPDATE/NEW,UPDATE 合并摘要并追加内容)→ 存储;演化块记录 merge_history。Async queue: chunk → summary → embed → smart dedup (Top-5 similar + LLM DUPLICATE/UPDATE/NEW; UPDATE merges summary and appends content) → store; evolved chunks track merge_history.

+

任务总结Task Summarization

+

异步逐轮检测任务边界:分组为用户回合 → 第一条直接分配 → 后续每条由 LLM 判断话题是否切换(强偏向 SAME,避免过度分割)→ 2h 超时强制切分 → 结构化摘要(目标/步骤/结果)。支持编辑、删除、重试技能生成。Async per-turn boundary detection: group into user turns → first turn assigned directly → each subsequent turn checked by LLM topic judge (strongly biased toward SAME to avoid over-splitting) → 2h timeout forces split → structured summary (goal/steps/result). Supports edit, delete, retry skill generation.

+

技能进化Skill Evolution

+

规则过滤 → LLM 评估(可重复/有价值的任务才生成技能)→ SKILL.md 生成(步骤/警告/脚本)/ 升级 → 质量评分 → 安装。LLM 使用三级降级链(技能模型 → 摘要模型 → OpenClaw 原生模型)。支持编辑、删除、设为公开/私有。Rule filter → LLM evaluate (only repeatable/valuable tasks generate skills) → SKILL.md (steps/warnings/scripts) / upgrade → score → install. LLM uses a 3-level fallback chain (skill model → summarizer → OpenClaw native model). Supports edit, delete, toggle visibility.

+

Recall

+

FTS5+Vector → RRF(k=60) → MMR(λ=0.7) → Decay(14d) → Normalize → Filter(≥0.45) → Top-K。自动关联 Task/Skill。FTS5+Vector → RRF(k=60) → MMR(λ=0.7) → Decay(14d) → Normalize → Filter(≥0.45) → Top-K. Auto-links Task/Skill.

+

Viewer

+

7 页:记忆 CRUD/搜索/演化标识、任务(对话气泡)、技能(版本/下载)、分析、日志(工具调用输入输出)、OpenClaw 原生记忆导入、在线配置。密码保护。7 pages: memory CRUD/search/evolution badges, tasks (chat bubbles), skills (versions/download), analytics, logs (tool call I/O), OpenClaw native memory import, online config. Password-protected.

+
+ +
+

检索算法Retrieval

+

RRF

+
\[ \text{RRF}(d) = \sum_i \frac{1}{k + \text{rank}_i(d) + 1} \]
+

MMR

+
\[ \text{MMR}(d) = \lambda \cdot \text{rel}(d) - (1-\lambda) \cdot \max \text{sim}(d, d_s) \]
+

时间衰减Recency

+
\[ \text{final} = \text{score} \times \bigl(0.3 + 0.7 \times 0.5^{t/14}\bigr) \]
+
+ +
+

API

+ +

query (required), maxResults (20), minScore (0.45), role. Returns excerpts(原文片段)+ chunkId / task_id,无 summary;经 LLM 相关性过滤。excerpts + chunkId/task_id, no summary; LLM relevance filter.

+

memory_get

+

获取记忆块完整原文。Get full original text of a memory chunk. chunkId, maxChars (optional).

+

memory_timeline

+

以 chunkId 为锚点的上下文邻居。Context neighbors by chunkId. chunkId, window (2).

+

task_summary

+

任务结构化摘要。Structured task summary. taskId or query.

+

skill_get / skill_install

+

skill_get 支持 skillId 或 taskId(按任务解析技能);skill_install 安装到工作区。skill_get accepts skillId or taskId; skill_install installs to workspace.

+

memory_write_public

+

写入公共记忆(owner="public"),所有 Agent 均可检索。Write public memory (owner="public"), discoverable by all agents. content (required), summary (optional).

+ +

搜索技能:FTS5 关键词 + 向量语义双通道,RRF 融合后经 LLM 判断相关性。Search skills via FTS5 + vector, RRF fusion, then LLM relevance judgment. query (required), scope ("mix" | "self" | "public", default "mix").

+

skill_publish / skill_unpublish

+

skill_publish 将技能设为公开,其他 Agent 可通过 skill_search 发现并安装。skill_unpublish 设为私有。skill_publish makes a skill public and discoverable via skill_search. skill_unpublish sets it private. skillId (required).

+

memory_viewer

+

返回 Viewer URL。Returns Viewer URL.

+

Viewer HTTP

+ + + + + + + + + + + + + + + + + + + +
MethodPath说明Description
GET/Memory Viewer HTML
POST/api/auth/*setup / login / reset / logout
GET/api/memories记忆列表(分页、过滤)Memory list (pagination, filters)
GET/api/search混合搜索(向量 minScore 0.64 + FTS5 降级)Hybrid search (vector minScore 0.64 + FTS5 fallback)
POST/PUT/DELETE/api/memory/:id记忆 CRUDMemory CRUD
GET/api/tasks任务列表(状态过滤)Task list (status filter)
GET/PUT/DELETE/api/task/:id任务详情/编辑/删除Task detail/edit/delete
POST/api/task/:id/retry-skill重试技能生成Retry skill generation
GET/api/skills技能列表Skill list
GET/PUT/DELETE/api/skill/:id技能详情/编辑/删除Skill detail/edit/delete
PUT/api/skill/:id/visibility设置公开/私有Set public/private
GET/api/skill/:id/download技能 ZIP 下载Download as ZIP
GET/api/stats, /api/metrics统计与分析Stats & metrics
GET/api/logs工具调用日志Tool call logs
GET/PUT/api/config在线配置Online configuration
GET/POST/api/migrate/*记忆导入(扫描/开始/停止/SSE 进度)Memory import (scan/start/stop/SSE)
POST/GET/api/migrate/postprocess/*后处理(任务/技能生成)Post-process (task/skill gen)
+
+ +
+

多智能体协同Multi-Agent Collaboration

+

MemOS 原生支持多 Agent 场景。每个 Agent 的记忆和任务通过 owner 字段隔离(格式 agent:{agentId}),检索时自动过滤为当前 Agent + public。MemOS natively supports multi-agent scenarios. Each agent's memories and tasks are isolated via an owner field (agent:{agentId}); retrieval automatically filters to current agent + public.

+
    +
  • 记忆隔离:Agent A 无法检索 Agent B 的私有记忆Memory Isolation: Agent A cannot retrieve Agent B's private memories
  • +
  • 公共记忆:通过 memory_write_public 写入 owner="public" 的记忆,所有 Agent 可检索Public Memory: Use memory_write_public to write owner="public" memories discoverable by all agents
  • +
  • 技能共享:通过 skill_publish 将技能设为公开,其他 Agent 可通过 skill_search 发现并安装Skill Sharing: Use skill_publish to make skills public; other agents discover and install via skill_search
  • +
  • 技能检索skill_search 支持 scope 参数(mix/self/public),FTS + 向量双通道 + RRF 融合 + LLM 相关性判断Skill Discovery: skill_search supports scope (mix/self/public), FTS + vector dual channel + RRF fusion + LLM relevance judgment
  • +
+
+ +
+

LLM 降级链LLM Fallback Chain

+

所有 LLM 调用(摘要、话题检测、去重、技能生成/升级)均使用三级自动降级机制:All LLM calls (summary, topic detection, dedup, skill generation/upgrade) use a 3-level automatic fallback chain:

+
+
skillSummarizer技能专用模型(可选)Skill-dedicated (optional)
+
summarizer通用摘要模型General summarizer
+
OpenClaw Native从 openclaw.json 读取Auto-detected from openclaw.json
+
+
    +
  • 每一级失败后自动尝试下一级,无需手动干预Each level auto-falls back to the next on failure, zero manual intervention
  • +
  • skillSummarizer 未配置时直接跳到 summarizerIf skillSummarizer is not configured, skips directly to summarizer
  • +
  • OpenClaw 原生模型从 ~/.openclaw/openclaw.jsonagents.defaults.model.primary 自动读取OpenClaw native model auto-detected from ~/.openclaw/openclaw.jsonagents.defaults.model.primary
  • +
  • 如果所有模型均失败,回退到规则方法(无 LLM)或跳过该步骤If all models fail, falls back to rule-based methods (no LLM) or skips the step
  • +
+
+ +
+

数据库Database

+

~/.openclaw/memos-local/memos.db, WAL. Tables: chunks (owner), chunks_fts, embeddings, tasks (owner), skills (owner, visibility), skill_versions, task_skills, skill_embeddings, skills_fts.

+
+ +
+

安全Security

+

Viewer 仅 127.0.0.1;密码 SHA-256;HttpOnly+SameSite Cookie;会话 24h;数据仅本地。127.0.0.1 only; SHA-256 password; HttpOnly+SameSite; 24h session; data stays local.

+
+ +
+

默认值Defaults

+ + + + + + + + + + + + + + + +
参数Parameter默认Default说明Description
maxResults6 (max 20)默认返回数Default result count
minScore (tool)0.45memory_search 最低分memory_search minimum
minScore (viewer)0.64Viewer 搜索向量阈值Viewer search vector threshold
rrfK60RRF 融合常数RRF fusion constant
mmrLambda0.7MMR 相关性 vs 多样性MMR relevance vs diversity
recencyHalfLife14d时间衰减半衰期Recency decay half-life
vectorSearchMaxChunks0 (all)0=搜索全部;大库可设 200k-300k0=search all; set 200k-300k for large DBs
dedup threshold0.75语义去重余弦相似度Semantic dedup cosine similarity
viewerPort18799Memory Viewer
taskIdle2h任务空闲超时Task idle timeout
topicJudgeWarmup1LLM 话题判断预热(用户消息数)LLM topic judge warm-up (user turns)
skillMinChunks6技能评估最小 chunk 数Min chunks for skill evaluation
importConcurrency1 (max 8)导入 Agent 并行度Import agent parallelism
+
+ +
+

MemOSMemOS MemOS — OpenClaw Plugin · Docs

+

首页Home · 安装排查指南Troubleshooting · npm · GitHub · MIT

+
+
+ + + + + + + + diff --git a/apps/memos-local-openclaw/docs/troubleshooting.html b/apps/memos-local-openclaw/docs/troubleshooting.html new file mode 100644 index 000000000..e48df69d8 --- /dev/null +++ b/apps/memos-local-openclaw/docs/troubleshooting.html @@ -0,0 +1,438 @@ + + + + + +MemOS Local — 安装排查指南 + + + + + + + +
+ +
+

MemOS Local — 安装排查指南

+

遇到安装问题?按以下步骤逐一排查

+

📦 better-sqlite3 官方排查文档  |  GitHub Issues

+
+ + + + +

1. 快速诊断命令

+ +

在终端依次运行以下命令,快速判断问题所在:

+ +
# 1) 插件目录是否存在
+ls ~/.openclaw/extensions/memos-local-openclaw-plugin/
+
+# 2) better-sqlite3 原生模块是否可用
+cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+node -e "require('better-sqlite3'); console.log('✔ better-sqlite3 OK')"
+
+# 3) 核心依赖是否完整
+node -e "['@sinclair/typebox','uuid','posthog-node'].forEach(d=>{try{require.resolve(d);console.log('✔',d)}catch{console.log('✖',d)}})"
+
+# 4) 运行 postinstall 脚本查看完整诊断
+node scripts/postinstall.cjs
+
+# 5) 查看 gateway 日志中的插件相关信息
+grep -i "memos\|plugin.*error\|plugin.*fail" /tmp/openclaw/openclaw-$(date +%Y-%m-%d).log
+ + +

2. 运行 postinstall 脚本

+ +

postinstall 脚本会自动检测并修复常见问题。进入插件目录后运行:

+ +
cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+node scripts/postinstall.cjs
+ +

正常输出应该包含三个阶段,每个都显示

+ +
─── Phase 0: 检测核心依赖 / Check core dependencies ───
+  @sinclair/typebox 
+  uuid 
+  posthog-node 
+  @huggingface/transformers 
+ All core dependencies present.
+
+─── Phase 1: 清理旧版本插件 / Clean up legacy plugins ───
+ No legacy plugin directories found. Clean.
+
+─── Phase 2: 检查 better-sqlite3 原生模块 / Check native module ───
+ better-sqlite3 is ready.
+
+✔ Setup complete!
+ +
+ ⚠ 如果 Phase 0 失败 +

缺少依赖通常是网络问题。手动安装:

+
cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+npm install --omit=dev
+
+ +
+ ⚠ 如果 Phase 2 失败 +

better-sqlite3 编译失败,参见下一节。

+
+ + +

3. better-sqlite3 编译失败

+ +

这是最常见的安装问题。better-sqlite3 是一个需要 C/C++ 编译的原生 Node.js 模块。如果以下步骤无法解决你的问题,请参考 better-sqlite3 官方排查文档 获取更多平台特定的解决方案。

+ +

错误表现

+
Error: Could not locate the bindings file. Tried:
+ → .../node_modules/better-sqlite3/build/better_sqlite3.node
+ → .../node_modules/better-sqlite3/build/Release/better_sqlite3.node
+ ...
+ +

解决步骤

+ +
+
+ 1 +
+

安装 C/C++ 编译工具

+
+
+
+ +
# macOS
+xcode-select --install
+
+# Ubuntu / Debian
+sudo apt install build-essential python3
+
+# Windows — 通常不需要!
+# better-sqlite3 对 Windows + Node.js LTS 提供预编译二进制文件,
+# 大部分情况下可直接安装成功。
+# 如果仍然失败,安装 Visual Studio Build Tools:
+# https://visualstudio.microsoft.com/visual-cpp-build-tools/
+# 安装时勾选 "C++ build tools" 工作负载
+ +
+
+ 2 +
+

重新编译 better-sqlite3

+
+
+
+ +
cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+npm rebuild better-sqlite3
+ +
+
+ 3 +
+

验证是否成功

+
+
+
+ +
node -e "require('better-sqlite3'); console.log('✔ OK')"
+ +
+
+ 4 +
+

重启 gateway

+
+
+
+ +
openclaw gateway stop && openclaw gateway start
+ +
+ 💡 Node.js 版本说明 +

如果使用非 LTS 版本的 Node.js(如 v25.x),better-sqlite3 可能没有预编译的二进制文件,必须从源码编译。确保已安装上述编译工具。

+

推荐使用 Node.js LTS 版本(v18.x 或 v20.x),这些版本有预编译的二进制文件,通常不需要本地编译。

+
+ +
+ 💡 更多排查资源 +

如果上述方法均无法解决,请查看以下资源:

+ +
+ + +

4. Plugin ID Mismatch 警告

+ +

错误表现

+
warn plugin id mismatch (manifest uses "memos-local-openclaw-plugin",
+     entry hints "memos-lite-openclaw-plugin")
+ +

原因

+

旧版本插件(memos-lite-*)的残留目录或配置未清理。

+ +

解决方法

+
# 运行 postinstall 脚本自动清理(推荐)
+cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+node scripts/postinstall.cjs
+
+# 或手动清理旧目录
+rm -rf ~/.openclaw/extensions/memos-lite
+rm -rf ~/.openclaw/extensions/memos-lite-openclaw-plugin
+ +

然后检查配置文件中是否有旧条目:

+
cat ~/.openclaw/openclaw.json | grep -i "memos-lite"
+ +

如果有,删除对应的配置条目,或直接运行 postinstall 脚本自动迁移。

+ + +

5. 插件加载失败 (register error)

+ +

错误表现

+
error [plugins] memos-local-openclaw-plugin failed during register:
+Error: Could not locate the bindings file.
+ +

解决方法

+

这几乎都是 better-sqlite3 的问题,按照第 3 节的步骤修复即可。

+ +

插件内置了自愈机制——启动时会自动尝试 npm rebuild better-sqlite3,但如果系统没有编译工具,自愈也会失败。

+ + +

6. Memory Viewer 页面报错

+ +

Scan failed: Cannot read properties of undefined

+

通常是新安装时数据库为空或 store 未初始化。升级到最新版本即可解决:

+
openclaw plugins update memos-local-openclaw-plugin
+ +

页面显示 undefined 或数据为空

+

尝试强制刷新浏览器缓存:Ctrl+Shift+R(macOS: Cmd+Shift+R

+ + +

7. 升级问题

+ +

升级命令(推荐)

+
openclaw plugins update memos-local-openclaw-plugin
+ +

升级过程会自动运行 postinstall 脚本,处理依赖安装、旧版清理和原生模块编译。

+ +

如果 update 不可用,重新安装

+
# 必须先删除旧目录,否则 install 会报 "plugin already exists"
+rm -rf ~/.openclaw/extensions/memos-local-openclaw-plugin
+openclaw plugins install @memtensor/memos-local-openclaw-plugin
+ +
+ 💡 为什么要先删除? +

OpenClaw 的 plugins install 命令检测到目标目录已存在时会直接拒绝安装,不会运行任何脚本。这是 OpenClaw 框架的安全机制,插件自身无法绕过。

+
+ +
+ ✔ 数据安全 +

升级不会删除已有的记忆数据。数据库位于 ~/.openclaw/memos-local/memos.db,独立于插件目录。

+
+ +

升级后 gateway 未加载新版本

+
openclaw gateway stop && openclaw gateway start
+ + +

8. 查看日志

+ +

Gateway 运行日志

+
# 查看当天完整日志
+cat /tmp/openclaw/openclaw-$(date +%Y-%m-%d).log
+
+# 只看插件相关
+grep -i "memos" /tmp/openclaw/openclaw-$(date +%Y-%m-%d).log
+
+# 只看错误
+grep -i "error\|fail\|warn" /tmp/openclaw/openclaw-$(date +%Y-%m-%d).log | grep -i "memos\|plugin"
+
+# 实时追踪(debug 用)
+tail -f /tmp/openclaw/openclaw-$(date +%Y-%m-%d).log | grep -i "memos"
+ +

重新启动并捕获完整启动日志

+
openclaw gateway stop
+openclaw gateway start 2>&1 | tee /tmp/gateway-debug.log
+

然后将 /tmp/gateway-debug.log 发给开发者排查。

+ +

postinstall 诊断日志

+
cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+node scripts/postinstall.cjs 2>&1 | tee /tmp/postinstall-debug.log
+ + +

9. 完全重装

+ +

如果以上方法都无法解决,可以完全重装(不会丢失记忆数据):

+ +
# 1) 卸载
+openclaw plugins uninstall memos-local-openclaw-plugin
+
+# 2) 确认旧目录已删除
+rm -rf ~/.openclaw/extensions/memos-local-openclaw-plugin
+rm -rf ~/.openclaw/extensions/memos-lite
+rm -rf ~/.openclaw/extensions/memos-lite-openclaw-plugin
+
+# 3) 重新安装
+openclaw plugins install @memtensor/memos-local-openclaw-plugin
+
+# 4) 重启 gateway
+openclaw gateway stop && openclaw gateway start
+ +
+ ✔ 数据保留 +

记忆数据存储在 ~/.openclaw/memos-local/memos.db,不在插件目录内,重装不会影响。

+
+ + +

10. 常见问题

+ +
+

Q: 安装时一直卡在 "Installing plugin dependencies..." 不动

+

这通常是 better-sqlite3 正在编译。首次编译可能需要 30-60 秒,取决于网络和机器性能。如果超过 2 分钟,按 Ctrl+C 中断,然后手动运行:

+
cd ~/.openclaw/extensions/memos-local-openclaw-plugin
+npm install --omit=dev
+npm rebuild better-sqlite3
+
+ +
+

Q: macOS 提示 "xcrun: error: invalid active developer path"

+

需要安装 Xcode 命令行工具:

+
xcode-select --install
+

安装完成后重新运行 npm rebuild better-sqlite3

+
+ +
+

Q: 升级后 Memory Viewer 显示异常

+

浏览器缓存了旧版本页面。强制刷新:Ctrl+Shift+R(macOS: Cmd+Shift+R)。

+
+ +
+

Q: 我的数据在哪?安全吗?

+

所有记忆数据存储在 ~/.openclaw/memos-local/memos.db(SQLite 文件),独立于插件安装目录。升级、重装插件都不会影响数据。

+

建议定期备份:

+
cp ~/.openclaw/memos-local/memos.db ~/memos-backup-$(date +%Y%m%d).db
+
+ +
+

Q: 如何确认插件版本?

+
cat ~/.openclaw/extensions/memos-local-openclaw-plugin/package.json | grep version
+
+ +
+

Q: 任务摘要/技能生成/去重 LLM 调用失败

+

所有 LLM 调用使用三级自动降级链:skillSummarizersummarizer → OpenClaw 原生模型。

+
    +
  • 检查 gateway 日志中的 failedtrying next 信息
  • +
  • 确认 API Key 和 Endpoint 配置正确
  • +
  • 如果所有模型都失败,功能会降级为规则方法或跳过
  • +
  • 可通过 Viewer → Settings 在线修改模型配置,保存后立即生效
  • +
+
+ +
+

Q: 任务划分不准确(过度切分或不切分)

+

任务边界检测使用逐轮 LLM 话题判断:

+
    +
  • 确认 summarizer 模型已正确配置且可用
  • +
  • 更强的 LLM 模型(如 GPT-4、Claude)会有更好的话题判断效果
  • +
  • 如果判断效果不理想,可尝试配置 skillSummarizer 使用更强的模型
  • +
  • 查看 gateway 日志中的 Topic judge 日志确认 LLM 是否被正确调用
  • +
+
+ +
+

Q: duplicate plugin id detected 警告

+

同一个 plugin ID 被多个目录加载。检查是否有重复的插件目录:

+
ls ~/.openclaw/extensions/ | grep memos
+

只保留 memos-local-openclaw-plugin,删除其他的:

+
rm -rf ~/.openclaw/extensions/memos-local  # 如果存在
+
+ + + +
+ + diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 50a2fe808..0cadf038e 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -23,7 +23,7 @@ import { ViewerServer } from "./src/viewer/server"; import { HubServer } from "./src/hub/server"; import { hubGetMemoryDetail, hubRequestJson, hubSearchMemories, hubSearchSkills, resolveHubClient } from "./src/client/hub"; import { getHubStatus, connectToHub } from "./src/client/connector"; -import { fetchHubSkillBundle, publishSkillBundleToHub, restoreSkillBundleFromHub } from "./src/client/skill-sync"; +import { fetchHubSkillBundle, publishSkillBundleToHub, restoreSkillBundleFromHub, unpublishSkillBundleFromHub } from "./src/client/skill-sync"; import { SkillEvolver } from "./src/skill/evolver"; import { SkillInstaller } from "./src/skill/installer"; import { Summarizer } from "./src/ingest/providers"; @@ -326,6 +326,89 @@ const memosLocalPlugin = { } }; + const getCurrentOwner = () => `agent:${currentAgentId}`; + const resolveMemorySearchScope = (scope?: string): "local" | "group" | "all" => + scope === "group" || scope === "all" ? scope : "local"; + const resolveMemoryShareTarget = (target?: string): "agents" | "hub" | "both" => + target === "hub" || target === "both" ? target : "agents"; + const resolveMemoryUnshareTarget = (target?: string): "agents" | "hub" | "all" => + target === "agents" || target === "hub" ? target : "all"; + const resolveSkillPublishTarget = (target?: string, scope?: string): "agents" | "hub" => { + if (target === "hub") return "hub"; + if (target === "agents") return "agents"; + return scope === "public" || scope === "group" ? "hub" : "agents"; + }; + const resolveSkillHubVisibility = (visibility?: string, scope?: string): "public" | "group" => + visibility === "group" || scope === "group" ? "group" : "public"; + const resolveSkillUnpublishTarget = (target?: string): "agents" | "hub" | "all" => + target === "hub" || target === "all" ? target : "agents"; + + const shareMemoryToHub = async ( + chunkId: string, + input?: { visibility?: "public" | "group"; groupId?: string; hubAddress?: string; userToken?: string }, + ): Promise<{ memoryId: string; visibility: "public" | "group"; groupId: string | null }> => { + const chunk = store.getChunk(chunkId); + if (!chunk) { + throw new Error(`Memory not found: ${chunkId}`); + } + + const visibility = input?.visibility === "group" ? "group" : "public"; + const groupId = visibility === "group" ? (input?.groupId ?? null) : null; + const hubClient = await resolveHubClient(store, ctx, { hubAddress: input?.hubAddress, userToken: input?.userToken }); + const response = await hubRequestJson(hubClient.hubUrl, hubClient.userToken, "/api/v1/hub/memories/share", { + method: "POST", + body: JSON.stringify({ + memory: { + sourceChunkId: chunk.id, + role: chunk.role, + content: chunk.content, + summary: chunk.summary, + kind: chunk.kind, + groupId, + visibility, + }, + }), + }) as { memoryId?: string; visibility?: "public" | "group" }; + + const now = Date.now(); + const existing = store.getHubMemoryBySource(hubClient.userId, chunk.id); + store.upsertHubMemory({ + id: response?.memoryId ?? existing?.id ?? `${chunk.id}-hub`, + sourceChunkId: chunk.id, + sourceUserId: hubClient.userId, + role: chunk.role, + content: chunk.content, + summary: chunk.summary ?? "", + kind: chunk.kind, + groupId, + visibility, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + }); + + return { + memoryId: response?.memoryId ?? existing?.id ?? `${chunk.id}-hub`, + visibility, + groupId, + }; + }; + + const unshareMemoryFromHub = async ( + chunkId: string, + input?: { hubAddress?: string; userToken?: string }, + ): Promise => { + const chunk = store.getChunk(chunkId); + if (!chunk) { + throw new Error(`Memory not found: ${chunkId}`); + } + const hubClient = await resolveHubClient(store, ctx, { hubAddress: input?.hubAddress, userToken: input?.userToken }); + await hubRequestJson(hubClient.hubUrl, hubClient.userToken, "/api/v1/hub/memories/unshare", { + method: "POST", + body: JSON.stringify({ sourceChunkId: chunk.id }), + }); + store.deleteHubMemoryBySource(hubClient.userId, chunk.id); + }; + // ─── Tool: memory_search ─── api.registerTool( @@ -334,24 +417,43 @@ const memosLocalPlugin = { label: "Memory Search", description: "Search long-term conversation memory for past conversations, user preferences, decisions, and experiences. " + - "Relevant memories are automatically injected at the start of each turn, but call this tool when you need " + - "to search with a different query or the auto-recalled context is insufficient. " + - "Pass only a short natural-language query (2-5 key words).", + "Use scope='local' for this agent plus local shared memories, or scope='group'/'all' to include Hub-shared memories. " + + "Supports optional maxResults, minScore, and role filtering when you need tighter control.", parameters: Type.Object({ query: Type.String({ description: "Short natural language search query (2-5 key words)" }), + scope: Type.Optional(Type.String({ description: "Search scope: 'local' (default), 'group', or 'all'. Use group/all to include Hub-shared memories." })), + maxResults: Type.Optional(Type.Number({ description: "Maximum results to return. Default 10, max 20." })), + minScore: Type.Optional(Type.Number({ description: "Minimum score threshold for local recall. Default 0.45, floor 0.35." })), + role: Type.Optional(Type.String({ description: "Optional local role filter: 'user', 'assistant', 'tool', or 'system'." })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for group/all search." })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for group/all search." })), }), execute: trackTool("memory_search", async (_toolCallId: any, params: any) => { - const { query } = params as { query: string }; - const role = undefined; - const minScore = undefined; - const searchScope = "local"; - const searchLimit = 10; - const hubAddress: string | undefined = undefined; - const userToken: string | undefined = undefined; + const { + query, + scope: rawScope, + maxResults, + minScore: rawMinScore, + role: rawRole, + hubAddress, + userToken, + } = params as { + query: string; + scope?: string; + maxResults?: number; + minScore?: number; + role?: string; + hubAddress?: string; + userToken?: string; + }; + const role = rawRole === "user" || rawRole === "assistant" || rawRole === "tool" || rawRole === "system" ? rawRole : undefined; + const minScore = typeof rawMinScore === "number" ? Math.max(0.35, Math.min(1, rawMinScore)) : undefined; + const searchScope = resolveMemorySearchScope(rawScope); + const searchLimit = typeof maxResults === "number" ? Math.max(1, Math.min(20, Math.round(maxResults))) : 10; const agentId = currentAgentId; - const ownerFilter = [`agent:${agentId}`, "public"]; - const effectiveMaxResults = 10; + const ownerFilter = [getCurrentOwner(), "public"]; + const effectiveMaxResults = searchLimit; ctx.log.debug(`memory_search query="${query}" maxResults=${effectiveMaxResults} minScore=${minScore ?? 0.45} role=${role ?? "all"} owner=agent:${agentId}`); const result = await engine.search({ query, maxResults: effectiveMaxResults, minScore, role, ownerFilter }); ctx.log.debug(`memory_search raw candidates: ${result.hits.length}`); @@ -364,7 +466,7 @@ const memosLocalPlugin = { original_excerpt: (h.original_excerpt ?? "").slice(0, 200), })); - if (result.hits.length === 0) { + if (result.hits.length === 0 && searchScope === "local") { return { content: [{ type: "text", text: result.meta.note ?? "No relevant memories found." }], details: { candidates: [], meta: result.meta }, @@ -388,11 +490,13 @@ const memosLocalPlugin = { const indexSet = new Set(filterResult.relevant); filteredHits = result.hits.filter((_, i) => indexSet.has(i + 1)); ctx.log.debug(`memory_search LLM filter: ${result.hits.length} → ${filteredHits.length} hits, sufficient=${sufficient}`); - } else { + } else if (searchScope === "local") { return { content: [{ type: "text", text: "No relevant memories found for this query." }], details: { candidates: rawCandidates, filtered: [], meta: result.meta }, }; + } else { + filteredHits = []; } } @@ -868,7 +972,9 @@ ${detail.content}`, { name: "network_team_info", label: "Network Team Info", - description: "Show current Hub connection status, signed-in user, role, and group memberships.", + description: + "Show current Hub connection status, signed-in user, role, and group memberships. " + + "Use this as a preflight check before any Hub share/unshare or Hub pull operation.", parameters: Type.Object({}), execute: trackTool("network_team_info", async () => { const status = await getHubStatus(store, ctx.config); @@ -1044,12 +1150,13 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, api.registerTool( { name: "memory_write_public", - label: "Write Public Memory", + label: "Write Local Shared Memory", description: - "Write a piece of information to public memory. Public memories are visible to all agents during memory_search. " + - "Use this for shared knowledge, team decisions, or cross-agent coordination information.", + "Write a piece of information to local shared memory for all agents in this OpenClaw workspace. " + + "Use this when you are creating a new shared note from scratch. This does not publish to Hub. " + + "If you already have a memory chunk and want to expose it, use memory_share instead.", parameters: Type.Object({ - content: Type.String({ description: "The content to write to public memory" }), + content: Type.String({ description: "The content to write to local shared memory" }), summary: Type.Optional(Type.String({ description: "Optional short summary of the content" })), }), execute: trackTool("memory_write_public", async (_toolCallId: any, params: any) => { @@ -1094,7 +1201,7 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, } return { - content: [{ type: "text", text: `Public memory written successfully (id: ${chunkId}).` }], + content: [{ type: "text", text: `Memory shared to local agents successfully (id: ${chunkId}).` }], details: { chunkId, owner: "public" }, }; }), @@ -1102,6 +1209,164 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, { name: "memory_write_public" }, ); + api.registerTool( + { + name: "memory_share", + label: "Share Memory", + description: + "Share an existing memory either with local OpenClaw agents, to the Hub team, or to both targets. " + + "Use this only for an existing chunkId. Use target='agents' for local multi-agent sharing, target='hub' for team sharing, or target='both' for both. " + + "If you need to create a brand new shared memory instead of exposing an existing one, use memory_write_public.", + parameters: Type.Object({ + chunkId: Type.String({ description: "Existing local memory chunk ID to share" }), + target: Type.Optional(Type.String({ description: "Share target: 'agents' (default), 'hub', or 'both'" })), + visibility: Type.Optional(Type.String({ description: "Hub visibility when target includes hub: 'public' (default) or 'group'" })), + groupId: Type.Optional(Type.String({ description: "Optional Hub group ID when visibility='group'" })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override" })), + }), + execute: trackTool("memory_share", async (_toolCallId: any, params: any) => { + const { + chunkId, + target: rawTarget, + visibility: rawVisibility, + groupId, + hubAddress, + userToken, + } = params as { + chunkId: string; + target?: string; + visibility?: string; + groupId?: string; + hubAddress?: string; + userToken?: string; + }; + + const chunk = store.getChunk(chunkId); + if (!chunk) { + return { content: [{ type: "text", text: `Memory not found: ${chunkId}` }], details: { error: "not_found", chunkId } }; + } + + const target = resolveMemoryShareTarget(rawTarget); + const visibility = rawVisibility === "group" ? "group" : "public"; + const details: Record = { chunkId, target }; + const messages: string[] = []; + + if (target === "agents" || target === "both") { + const local = store.markMemorySharedLocally(chunkId); + if (!local.ok) { + return { content: [{ type: "text", text: `Failed to share memory ${chunkId} to local agents.` }], details: { error: local.reason ?? "local_share_failed", chunkId, target } }; + } + details.local = { + shared: true, + owner: local.owner, + originalOwner: local.originalOwner ?? null, + }; + messages.push("shared to local agents"); + } + + if (target === "hub" || target === "both") { + const hub = await shareMemoryToHub(chunkId, { visibility, groupId, hubAddress, userToken }); + details.hub = { + shared: true, + memoryId: hub.memoryId, + visibility: hub.visibility, + groupId: hub.groupId, + }; + messages.push(`shared to Hub (${hub.visibility})`); + } + + return { + content: [{ type: "text", text: `Memory "${chunk.summary || chunk.id}" ${messages.join(" and ")}.` }], + details, + }; + }), + }, + { name: "memory_share" }, + ); + + api.registerTool( + { + name: "memory_unshare", + label: "Unshare Memory", + description: + "Remove sharing from an existing memory. Use target='agents' to stop local multi-agent sharing, target='hub' to remove it from Hub, or target='all' (default) to remove both. " + + "privateOwner is only needed for older public memories that were never tracked with an original owner.", + parameters: Type.Object({ + chunkId: Type.String({ description: "Existing local memory chunk ID to unshare" }), + target: Type.Optional(Type.String({ description: "Unshare target: 'agents', 'hub', or 'all' (default)" })), + privateOwner: Type.Optional(Type.String({ description: "Optional owner to restore when converting a public memory back to private and no original owner was tracked" })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override" })), + }), + execute: trackTool("memory_unshare", async (_toolCallId: any, params: any) => { + const { + chunkId, + target: rawTarget, + privateOwner, + hubAddress, + userToken, + } = params as { + chunkId: string; + target?: string; + privateOwner?: string; + hubAddress?: string; + userToken?: string; + }; + + const chunk = store.getChunk(chunkId); + if (!chunk) { + return { content: [{ type: "text", text: `Memory not found: ${chunkId}` }], details: { error: "not_found", chunkId } }; + } + + const target = resolveMemoryUnshareTarget(rawTarget); + const details: Record = { chunkId, target }; + const messages: string[] = []; + + if (target === "agents" || target === "all") { + const local = store.unmarkMemorySharedLocally(chunkId, privateOwner); + if (!local.ok) { + return { + content: [{ + type: "text", + text: local.reason === "original_owner_missing" + ? `Cannot restore memory "${chunk.summary || chunk.id}" to a private owner automatically. Pass privateOwner to unshare it locally.` + : `Failed to stop local sharing for memory ${chunkId}.`, + }], + details: { error: local.reason ?? "local_unshare_failed", chunkId, target }, + }; + } + details.local = { + shared: false, + owner: local.owner, + }; + messages.push("removed from local agent sharing"); + } + + if (target === "hub" || target === "all") { + try { + await unshareMemoryFromHub(chunkId, { hubAddress, userToken }); + details.hub = { shared: false }; + messages.push("removed from Hub"); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + if (target === "all" && msg.includes("hub client connection is not configured")) { + details.hub = { shared: false, skipped: true, reason: "hub_not_configured" }; + } else { + throw err; + } + } + } + + return { + content: [{ type: "text", text: `Memory "${chunk.summary || chunk.id}" ${messages.join(" and ")}.` }], + details, + }; + }), + }, + { name: "memory_unshare" }, + ); + // ─── Tool: skill_search ─── api.registerTool( @@ -1109,16 +1374,16 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, name: "skill_search", label: "Skill Search", description: - "Search available skills by natural language. Searches local skills by default, or local + Hub skills when scope=group/all. " + - "Use when you need a capability or guide and don't have a matching skill at hand.", + "Search available skills by natural language. Use scope='mix' (default) for this agent plus local shared skills, 'self' for this agent only, 'public' for local shared skills only, or 'group'/'all' to include Hub skills as well. " + + "Use this when you need a capability or guide and don't have a matching skill at hand.", parameters: Type.Object({ query: Type.String({ description: "Natural language description of the needed skill" }), - scope: Type.Optional(Type.String({ description: "Search scope: 'mix'/'self'/'public' for local search, or 'group'/'all' for local + Hub search" })), + scope: Type.Optional(Type.String({ description: "Search scope: 'mix' (default), 'self', 'public', 'group', or 'all'." })), }), execute: trackTool("skill_search", async (_toolCallId: any, params: any, context?: any) => { const { query: skillQuery, scope: rawScope } = params as { query: string; scope?: string }; const scope = (rawScope === "self" || rawScope === "public") ? rawScope : "mix"; - const currentOwner = `agent:${currentAgentId}`; + const currentOwner = getCurrentOwner(); if (rawScope === "group" || rawScope === "all") { const [localHits, hub] = await Promise.all([ @@ -1134,7 +1399,7 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, } const localText = localHits.length > 0 - ? localHits.map((h, i) => `${i + 1}. [${h.name}] ${h.description.slice(0, 150)}${h.visibility === "public" ? " (public)" : ""}`).join("\n") + ? localHits.map((h, i) => `${i + 1}. [${h.name}] ${h.description.slice(0, 150)}${h.visibility === "public" ? " (shared to local agents)" : ""}`).join("\n") : "(none)"; const hubText = hub.hits.length > 0 ? hub.hits.map((h, i) => `${i + 1}. [${h.name}] ${h.description.slice(0, 150)} (${h.visibility}${h.groupName ? `:${h.groupName}` : ""}, owner=${h.ownerName})`).join("\n") @@ -1156,7 +1421,7 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, } const text = hits.map((h, i) => - `${i + 1}. [${h.name}] ${h.description}${h.visibility === "public" ? " (public)" : ""}`, + `${i + 1}. [${h.name}] ${h.description}${h.visibility === "public" ? " (shared to local agents)" : ""}`, ).join("\n"); return { @@ -1174,31 +1439,54 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, { name: "skill_publish", label: "Publish Skill", - description: "Make a skill public so other agents can discover and install it via skill_search.", + description: + "Share a skill with local agents or publish it to the Hub. " + + "Use target='agents' for local sharing, or target='hub' with visibility='public'/'group' for Hub publishing. " + + "The old scope parameter is still accepted for backward compatibility.", parameters: Type.Object({ skillId: Type.String({ description: "The skill ID to publish" }), - scope: Type.Optional(Type.String({ description: "Publish scope: omit for local public, or use 'public' / 'group' to publish to Hub" })), + target: Type.Optional(Type.String({ description: "Publish target: 'agents' (default) or 'hub'." })), + visibility: Type.Optional(Type.String({ description: "Hub visibility when target='hub': 'public' (default) or 'group'." })), + scope: Type.Optional(Type.String({ description: "Deprecated alias: omit for local agents, or use 'public' / 'group' to publish to Hub." })), groupId: Type.Optional(Type.String({ description: "Optional group ID when scope='group'" })), hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), }), execute: trackTool("skill_publish", async (_toolCallId: any, params: any) => { - const { skillId: pubSkillId, scope, groupId, hubAddress, userToken } = params as { skillId: string; scope?: string; groupId?: string; hubAddress?: string; userToken?: string }; + const { + skillId: pubSkillId, + target: rawTarget, + visibility: rawVisibility, + scope, + groupId, + hubAddress, + userToken, + } = params as { + skillId: string; + target?: string; + visibility?: string; + scope?: string; + groupId?: string; + hubAddress?: string; + userToken?: string; + }; const skill = store.getSkill(pubSkillId); if (!skill) { return { content: [{ type: "text", text: `Skill not found: ${pubSkillId}` }] }; } - if (scope === "public" || scope === "group") { - const published = await publishSkillBundleToHub(store, ctx, { skillId: pubSkillId, visibility: scope, groupId, hubAddress, userToken }); + const target = resolveSkillPublishTarget(rawTarget, scope); + const visibility = resolveSkillHubVisibility(rawVisibility, scope); + if (target === "hub") { + const published = await publishSkillBundleToHub(store, ctx, { skillId: pubSkillId, visibility, groupId, hubAddress, userToken }); return { - content: [{ type: "text", text: `Skill "${skill.name}" published to hub (${published.visibility}).` }], - details: { skillId: pubSkillId, name: skill.name, publishedToHub: true, hubSkillId: published.skillId, visibility: published.visibility }, + content: [{ type: "text", text: `Skill "${skill.name}" shared to Hub (${published.visibility}).` }], + details: { skillId: pubSkillId, name: skill.name, target, publishedToHub: true, hubSkillId: published.skillId, visibility: published.visibility }, }; } store.setSkillVisibility(pubSkillId, "public"); return { - content: [{ type: "text", text: `Skill "${skill.name}" is now public.` }], - details: { skillId: pubSkillId, name: skill.name, visibility: "public", publishedToHub: false }, + content: [{ type: "text", text: `Skill "${skill.name}" is now shared with local agents.` }], + details: { skillId: pubSkillId, name: skill.name, target, visibility: "public", publishedToHub: false }, }; }), }, @@ -1211,20 +1499,46 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, { name: "skill_unpublish", label: "Unpublish Skill", - description: "Make a skill private. Other agents will no longer be able to discover it.", + description: + "Stop sharing a skill with local agents, remove it from the Hub, or do both. " + + "Use target='agents' (default), 'hub', or 'all'.", parameters: Type.Object({ skillId: Type.String({ description: "The skill ID to unpublish" }), + target: Type.Optional(Type.String({ description: "Unpublish target: 'agents' (default), 'hub', or 'all'." })), + hubAddress: Type.Optional(Type.String({ description: "Optional Hub address override for tests or manual routing" })), + userToken: Type.Optional(Type.String({ description: "Optional Hub bearer token override for tests" })), }), execute: trackTool("skill_unpublish", async (_toolCallId: any, params: any) => { - const { skillId: unpubSkillId } = params as { skillId: string }; + const { skillId: unpubSkillId, target, hubAddress, userToken } = params as { skillId: string; target?: string; hubAddress?: string; userToken?: string }; const skill = store.getSkill(unpubSkillId); if (!skill) { return { content: [{ type: "text", text: `Skill not found: ${unpubSkillId}` }] }; } - store.setSkillVisibility(unpubSkillId, "private"); + const resolvedTarget = resolveSkillUnpublishTarget(target); + const messages: string[] = []; + const details: Record = { skillId: unpubSkillId, name: skill.name, target: resolvedTarget }; + if (resolvedTarget === "hub" || resolvedTarget === "all") { + try { + await unpublishSkillBundleFromHub(store, ctx, { skillId: unpubSkillId, hubAddress, userToken }); + details.hub = { unpublished: true }; + messages.push("removed from Hub sharing"); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + if (resolvedTarget === "all" && msg.includes("hub client connection is not configured")) { + details.hub = { unpublished: false, skipped: true, reason: "hub_not_configured" }; + } else { + throw err; + } + } + } + if (resolvedTarget === "agents" || resolvedTarget === "all") { + store.setSkillVisibility(unpubSkillId, "private"); + details.local = { visibility: "private" }; + messages.push("limited to this agent"); + } return { - content: [{ type: "text", text: `Skill "${skill.name}" is now private.` }], - details: { skillId: unpubSkillId, name: skill.name, visibility: "private" }, + content: [{ type: "text", text: `Skill "${skill.name}" ${messages.join(" and ")}.` }], + details, }; }), }, diff --git a/apps/memos-local-openclaw/openclaw.plugin.json b/apps/memos-local-openclaw/openclaw.plugin.json index daba3ce51..8f74c33c8 100644 --- a/apps/memos-local-openclaw/openclaw.plugin.json +++ b/apps/memos-local-openclaw/openclaw.plugin.json @@ -3,7 +3,7 @@ "name": "MemOS Local Memory", "description": "Full-write local conversation memory with hybrid search (RRF + MMR + recency). Provides memory_search, memory_get, task_summary, memory_timeline, memory_viewer for layered retrieval.", "kind": "memory", - "version": "0.1.11", + "version": "0.1.12", "skills": [ "skill/memos-memory-guide" ], diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index db7b02846..f98900cf1 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,7 +1,7 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.3", - "description": "MemOS Local memory plugin for OpenClaw \u2014 full-write, hybrid-recall, progressive retrieval", + "version": "1.0.4-beta.8", + "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", "types": "dist/index.d.ts", @@ -64,4 +64,4 @@ "typescript": "^5.7.0", "vitest": "^2.1.0" } -} \ No newline at end of file +} diff --git a/apps/memos-local-openclaw/skill/memos-memory-guide/SKILL.md b/apps/memos-local-openclaw/skill/memos-memory-guide/SKILL.md index db43dafad..c7897bb49 100644 --- a/apps/memos-local-openclaw/skill/memos-memory-guide/SKILL.md +++ b/apps/memos-local-openclaw/skill/memos-memory-guide/SKILL.md @@ -1,17 +1,22 @@ --- name: memos-memory-guide -description: "Use the MemOS Local memory system to search and use the user's past conversations. Use this skill whenever the user refers to past chats, their own preferences or history, or when you need to answer from prior context. When auto-recall returns nothing (long or unclear user query), generate your own short search query and call memory_search. Available tools: memory_search, memory_get, memory_write_public, task_summary, skill_get, skill_search, skill_install, skill_publish, skill_unpublish, memory_timeline, memory_viewer." +description: "Use the MemOS Local memory system to search and use the user's past conversations. Use this skill whenever the user refers to past chats, their own preferences or history, or when you need to answer from prior context. When auto-recall returns nothing (long or unclear user query), generate your own short search query and call memory_search. Available tools: memory_search, memory_get, memory_write_public, memory_share, memory_unshare, task_summary, skill_get, skill_search, skill_install, skill_publish, skill_unpublish, network_memory_detail, network_skill_pull, network_team_info, memory_timeline, memory_viewer." --- # MemOS Local Memory — Agent Guide -This skill describes how to use the MemOS memory tools so you can reliably search and use the user's long-term conversation history, query Hub-shared team data, share tasks, and discover or pull reusable skills. +This skill describes how to use the MemOS memory tools so you can reliably search and use the user's long-term conversation history, query team-shared data, share tasks, and discover or pull reusable skills. + +Two sharing planes exist and must not be confused: + +- **Local agent sharing:** visible to agents in the same OpenClaw workspace only. +- **Team sharing:** visible to teammates through the configured team server. ## How memory is provided each turn - **Automatic recall (hook):** At the start of each turn, the system runs a memory search using the user's current message and injects relevant past memories into your context. You do not need to call any tool for that. - **When that is not enough:** If the user's message is very long, vague, or the automatic search returns **no memories**, you should **generate your own short, focused query** and call `memory_search` yourself. -- **Memory isolation:** Each agent can only see its own local private memories and local `public` memories. Hub-shared data only appears when you search with `scope="group"` or `scope="all"`. +- **Memory isolation:** Each agent can only see its own local private memories and local `public` memories. Team-shared data only appears when you search with `scope="group"` or `scope="all"`. ## Tools — what they do and when to call @@ -24,9 +29,10 @@ This skill describes how to use the MemOS memory tools so you can reliably searc - You need to search with a different angle (e.g. filter by `role='user'`). - **Parameters:** - `query` (string, **required**) — Natural language search query. - - `maxResults` (number, optional) — Max results, default 20, max 20. - - `minScore` (number, optional) — Minimum score 0–1, default 0.45, floor 0.35. - - `role` (string, optional) — Filter by role: `'user'`, `'assistant'`, or `'tool'`. Use `'user'` to find what the user said. + - `scope` (string, optional) — `'local'` (default) for current agent + local shared memories, or `'group'` / `'all'` to include team-shared memories. + - `maxResults` (number, optional) — Increase when the first search is too narrow. + - `minScore` (number, optional) — Lower slightly if recall is too strict. + - `role` (string, optional) — Filter local results by `'user'`, `'assistant'`, `'tool'`, or `'system'`. ### memory_get @@ -38,12 +44,32 @@ This skill describes how to use the MemOS memory tools so you can reliably searc ### memory_write_public -- **What it does:** Write a piece of information to public memory. Public memories are visible to all agents during `memory_search`. Use for shared knowledge, team decisions, or cross-agent coordination information. -- **When to call:** In multi-agent or collaborative scenarios, when you have persistent information useful to everyone (e.g. shared decisions, conventions, configurations, workflows). Do not write session-only or purely private content. +- **What it does:** Create a brand new local shared memory. These memories are visible to all agents in the same OpenClaw workspace during `memory_search`. This does **not** publish anything to the team server. +- **When to call:** In multi-agent or collaborative scenarios, when you want to create a new persistent shared note from scratch (e.g. shared decisions, conventions, configurations, workflows). Do not use it if you already have a specific memory chunk to expose. - **Parameters:** - - `content` (string, **required**) — The content to write to public memory. + - `content` (string, **required**) — The content to write to local shared memory. - `summary` (string, optional) — Short summary of the content. +### memory_share + +- **What it does:** Share an existing memory either with local OpenClaw agents, to the team, or to both. +- **When to call:** You already have a useful memory chunk and want to expose it beyond the current agent. +- **Do not use when:** You are creating a new shared note from scratch. In that case use `memory_write_public`. +- **Parameters:** + - `chunkId` (string, **required**) — Existing memory chunk ID. + - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'both'`. + - `visibility` (string, optional) — Team visibility when target includes team: `'public'` (default) or `'group'`. + - `groupId` (string, optional) — Optional team group ID when `visibility='group'`. + +### memory_unshare + +- **What it does:** Remove an existing memory from local agent sharing, team sharing, or both. +- **When to call:** A memory should no longer be visible outside the current agent or should be removed from the team. +- **Parameters:** + - `chunkId` (string, **required**) — Existing memory chunk ID. + - `target` (string, optional) — `'agents'`, `'hub'`, or `'all'` (default). + - `privateOwner` (string, optional) — Rare fallback only for older public memories that have no recorded original owner. + ### task_summary - **What it does:** Get the detailed summary of a complete task: title, status, narrative summary, and related skills. Use when `memory_search` returns a hit with a `task_id` and you need the full story. Preserves critical information: URLs, file paths, commands, error codes, step-by-step instructions. @@ -62,11 +88,11 @@ This skill describes how to use the MemOS memory tools so you can reliably searc ### skill_search -- **What it does:** Search available skills by natural language. Searches your own skills, public skills, or both — controlled by the `scope` parameter. +- **What it does:** Search available skills by natural language. Searches your own skills, local shared skills, or both. It can also include team skills. - **When to call:** The current task requires a capability or guide you don't have. Use `skill_search` to find one first; after finding it, use `skill_get` to read it, then `skill_install` to load it for future turns. - **Parameters:** - `query` (string, **required**) — Natural language description of the needed skill. - - `scope` (string, optional) — Search scope: `'mix'` (default, self + public), `'self'` (own only), `'public'` (public only). + - `scope` (string, optional) — `'mix'` (default, self + local shared), `'self'`, `'public'` (local shared only), or `'group'` / `'all'` to include team results. ### skill_install @@ -77,40 +103,46 @@ This skill describes how to use the MemOS memory tools so you can reliably searc ### skill_publish -- **What it does:** Make a skill public so other agents can discover and install it via `skill_search`. -- **When to call:** You have a useful skill that other agents could benefit from, and you want to share it. +- **What it does:** Share a skill with local agents, or publish it to the team. +- **When to call:** You have a useful skill that other agents or your team could benefit from. - **Parameters:** - `skillId` (string, **required**) — The skill ID to publish. + - `target` (string, optional) — `'agents'` (default) or `'hub'`. + - `visibility` (string, optional) — When `target='hub'`, use `'public'` (default) or `'group'`. + - `groupId` (string, optional) — Optional team group ID when `target='hub'` and `visibility='group'`. + - `scope` (string, optional) — Backward-compatible alias for old calls. Prefer `target` + `visibility` in new calls. ### skill_unpublish -- **What it does:** Make a skill private again. Other agents will no longer be able to discover it. +- **What it does:** Stop local agent sharing, remove a team-published copy, or do both. - **When to call:** You want to stop sharing a previously published skill. - **Parameters:** - `skillId` (string, **required**) — The skill ID to unpublish. + - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'all'`. ### network_memory_detail -- **What it does:** Fetches the full content behind a Hub search hit. -- **When to call:** A `memory_search` result came from the Hub and you need the full shared memory content. +- **What it does:** Fetches the full content behind a team search hit. +- **When to call:** A `memory_search` result came from the team and you need the full shared memory content. - **Parameters:** `remoteHitId`. ### task_share / task_unshare -- **What they do:** Share a local task to the Hub, or remove it later. +- **What they do:** Share a local task to the team, or remove it later. - **When to call:** A task is valuable to your group or to the whole team and should be discoverable via shared search. - **Parameters:** `taskId`, plus sharing visibility/scope when required. ### network_skill_pull -- **What it does:** Pulls a Hub-shared skill bundle down into local storage. -- **When to call:** `skill_search` found a useful Hub skill and you want to use it locally or offline. +- **What it does:** Pulls a team-shared skill bundle down into local storage. +- **When to call:** `skill_search` found a useful team skill and you want to use it locally or offline. - **Parameters:** `skillId`. ### network_team_info -- **What it does:** Returns current Hub connection information, user, role, and groups. +- **What it does:** Returns current team server connection information, user, role, and groups. - **When to call:** You need to confirm whether team sharing is configured or which groups the current client belongs to. +- **Call this first before:** `memory_share(... target='hub'|'both')`, `memory_unshare(... target='hub'|'all')`, `task_share`, `task_unshare`, `skill_publish(... target='hub')`, `skill_unpublish(... target='hub'|'all')`, or `network_skill_pull`. - **Parameters:** none. ### memory_timeline @@ -147,13 +179,19 @@ This skill describes how to use the MemOS memory tools so you can reliably searc 6. **You need a capability/guide that you don't have** → Call `skill_search(query="...", scope="mix")` to discover available skills. -7. **You have shared knowledge useful to all agents** - → Call `memory_write_public(content="...")` to persist it in public memory. +7. **You have new shared knowledge useful to all local agents** + → Call `memory_write_public(content="...")`. + +8. **You already have an existing memory chunk and want to expose or hide it** + → Call `memory_share(chunkId="...", target="agents|hub|both")` or `memory_unshare(chunkId="...", target="agents|hub|all")`. + +9. **You are about to do anything team-sharing-related** + → Call `network_team_info()` first if team server availability is uncertain. -8. **You want to share/stop sharing a skill with other agents** - → Call `skill_publish(skillId="...")` or `skill_unpublish(skillId="...")`. +10. **You want to share/stop sharing a skill with local agents or team** + → Prefer `skill_publish(skillId="...", target="agents|hub", visibility=...)` and `skill_unpublish(skillId="...", target="agents|hub|all")`. -9. **User asks where to see or manage their memories** +11. **User asks where to see or manage their memories** → Call `memory_viewer()` and share the URL. ## Writing good search queries @@ -168,6 +206,6 @@ This skill describes how to use the MemOS memory tools so you can reliably searc Each memory is tagged with an `owner` (e.g. `agent:main`, `agent:sales-bot`). This is handled **automatically** — you do not need to pass any owner parameter. - **Your memories:** All tools (`memory_search`, `memory_get`, `memory_timeline`) automatically scope queries to your agent's own memories. -- **Public memories:** Memories marked as `public` are visible to all agents. Use `memory_write_public` to write shared knowledge. +- **Local shared memories:** Memories marked as local shared are visible to all agents in the same OpenClaw workspace. Use `memory_write_public` to create them, or `memory_share(target='agents')` to expose an existing chunk. - **Cross-agent isolation:** You cannot see memories owned by other agents (unless they are public). - **How it works:** The system identifies your agent ID from the OpenClaw runtime context and applies owner filtering automatically on every search, recall, and retrieval. diff --git a/apps/memos-local-openclaw/src/capture/index.ts b/apps/memos-local-openclaw/src/capture/index.ts index 0eaee1af3..729a1036d 100644 --- a/apps/memos-local-openclaw/src/capture/index.ts +++ b/apps/memos-local-openclaw/src/capture/index.ts @@ -33,6 +33,9 @@ const SENTINEL_FAST_RE = new RegExp( const ENVELOPE_PREFIX_RE = /^\s*\[(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}(?::\d{2})?\s+[A-Z]{3}[+-]\d{1,2}\]\s*/; +const ENVELOPE_EXTRACT_RE = + /^\s*\[(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+(\d{4}-\d{2}-\d{2})\s+(\d{2}:\d{2}(?::\d{2})?)\s+([A-Z]{3}[+-]\d{1,2})\]/; + /** * Extract writable messages from a conversation turn. * @@ -47,9 +50,11 @@ export function captureMessages( evidenceTag: string, log: Logger, owner?: string, + userSearchTime?: number, ): ConversationMessage[] { const now = Date.now(); const result: ConversationMessage[] = []; + let lastTimestamp = 0; for (const msg of messages) { const role = msg.role as Role; @@ -75,10 +80,19 @@ export function captureMessages( } if (!content.trim()) continue; + let ts: number; + if (role === "user" && userSearchTime && userSearchTime > 0) { + ts = userSearchTime; + } else { + ts = now; + } + if (ts <= lastTimestamp) ts = lastTimestamp + 1; + lastTimestamp = ts; + result.push({ role, content, - timestamp: now, + timestamp: ts, turnId, sessionKey, toolName: role === "tool" ? msg.toolName : undefined, @@ -150,13 +164,27 @@ export function stripInboundMetadata(text: string): string { return stripEnvelopePrefix(result.join("\n")).trim(); } -/** Strip …… blocks emitted by DeepSeek-style reasoning models. */ const THINKING_TAG_RE = /][\s\S]*?<\/think>\s*/gi; function stripThinkingTags(text: string): string { return text.replace(THINKING_TAG_RE, ""); } +function extractEnvelopeTimestamp(text: string): number | null { + const m = ENVELOPE_EXTRACT_RE.exec(text); + if (!m) return null; + const [, date, time, tz] = m; + const timeStr = time.includes(":") && time.split(":").length === 3 ? time : time + ":00"; + const offsetMatch = tz.match(/([+-])(\d{1,2})$/); + const offsetStr = offsetMatch + ? `${offsetMatch[1]}${offsetMatch[2].padStart(2, "0")}:00` + : "+00:00"; + const iso = `${date}T${timeStr}${offsetStr}`; + const ts = new Date(iso).getTime(); + return Number.isNaN(ts) ? null : ts; +} + function stripEnvelopePrefix(text: string): string { return text.replace(ENVELOPE_PREFIX_RE, ""); } diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts index 9f5f026b0..a1d8c617a 100644 --- a/apps/memos-local-openclaw/src/client/connector.ts +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -1,5 +1,5 @@ import type { Logger, MemosLocalConfig } from "../types"; -import type { GroupInfo, UserRole, UserStatus } from "../sharing/types"; +import type { UserRole, UserStatus } from "../sharing/types"; import type { SqliteStore } from "../storage/sqlite"; import { hubRequestJson, normalizeHubUrl } from "./hub"; @@ -20,7 +20,6 @@ export interface HubStatusInfo { username: string; role: UserRole; status: UserStatus | string; - groups: GroupInfo[]; }; } @@ -35,6 +34,41 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, if (!userToken && config.sharing?.client?.teamToken) { if (!log) throw new Error("hub client connection is not configured (no userToken, has teamToken but no logger for auto-join)"); + + // If DB has a pending connection (userId exists, no token), check registration-status first + const persisted = store.getClientHubConnection(); + if (persisted?.userId && !persisted.userToken && hubAddress) { + const hubUrl = normalizeHubUrl(hubAddress); + const teamToken = config.sharing.client!.teamToken!; + try { + const result = await hubRequestJson(hubUrl, "", "/api/v1/hub/registration-status", { + method: "POST", + body: JSON.stringify({ teamToken, userId: persisted.userId }), + }) as any; + if (result.status === "active" && result.userToken) { + log.info(`Pending user approved! Connecting with token. userId=${persisted.userId}`); + store.setClientHubConnection({ + hubUrl, + userId: persisted.userId, + username: persisted.username || "", + userToken: result.userToken, + role: "member", + connectedAt: Date.now(), + }); + return store.getClientHubConnection()!; + } + if (result.status === "pending") { + throw new PendingApprovalError(persisted.userId); + } + if (result.status === "rejected") { + throw new Error("Join request was rejected by the Hub admin."); + } + } catch (err) { + if (err instanceof PendingApprovalError) throw err; + log.warn(`registration-status check failed, falling back to autoJoinHub: ${err}`); + } + } + return autoJoinHub(store, config, log); } @@ -57,29 +91,100 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig): Promise { const conn = store.getClientHubConnection(); - const hubAddress = conn?.hubUrl || config.sharing?.client?.hubAddress || ""; + const configHubAddress = config.sharing?.client?.hubAddress || ""; + const hubAddress = conn?.hubUrl || (configHubAddress ? normalizeHubUrl(configHubAddress) : ""); const userToken = conn?.userToken || config.sharing?.client?.userToken || ""; + + // If DB has a connection to a different Hub than config, the DB data is stale + if (conn && configHubAddress && conn.hubUrl && normalizeHubUrl(configHubAddress) !== conn.hubUrl) { + store.clearClientHubConnection(); + return { connected: false, user: null }; + } + + if (conn && conn.userId && (!userToken || userToken === "")) { + const teamToken = config.sharing?.client?.teamToken ?? ""; + if (hubAddress && teamToken) { + try { + const result = await hubRequestJson(normalizeHubUrl(hubAddress), "", "/api/v1/hub/registration-status", { + method: "POST", + body: JSON.stringify({ teamToken, userId: conn.userId }), + }) as any; + if (result.status === "pending") { + return { + connected: false, + hubUrl: normalizeHubUrl(hubAddress), + user: { + id: conn.userId, + username: conn.username || "", + role: "member", + status: "pending", + }, + }; + } + if (result.status === "active" && result.userToken) { + store.setClientHubConnection({ + hubUrl: normalizeHubUrl(hubAddress), + userId: conn.userId, + username: conn.username || "", + userToken: result.userToken, + role: "member", + connectedAt: Date.now(), + }); + const me = await hubRequestJson(normalizeHubUrl(hubAddress), result.userToken, "/api/v1/hub/me", { method: "GET" }) as any; + return { + connected: true, + hubUrl: normalizeHubUrl(hubAddress), + user: { + id: String(me.id), + username: String(me.username ?? ""), + role: String(me.role ?? "member") as UserRole, + status: String(me.status ?? "active"), + }, + }; + } + if (result.status === "rejected") { + return { + connected: false, + hubUrl: normalizeHubUrl(hubAddress), + user: { + id: conn.userId, + username: conn.username || "", + role: "member", + status: "rejected", + }, + }; + } + } catch { /* fall through */ } + } + return { connected: false, user: null }; + } + if (!hubAddress || !userToken) { return { connected: false, user: null }; } try { const me = await hubRequestJson(normalizeHubUrl(hubAddress), userToken, "/api/v1/hub/me", { method: "GET" }) as any; + const latestUsername = String(me.username ?? ""); + const latestRole = String(me.role ?? "member") as UserRole; + if (conn && (conn.username !== latestUsername || conn.role !== latestRole)) { + store.setClientHubConnection({ + hubUrl: conn.hubUrl, + userId: conn.userId, + username: latestUsername, + userToken: conn.userToken, + role: latestRole, + connectedAt: conn.connectedAt, + }); + } return { connected: true, hubUrl: normalizeHubUrl(hubAddress), user: { id: String(me.id), - username: String(me.username ?? ""), - role: String(me.role ?? "member") as UserRole, + username: latestUsername, + role: latestRole, status: String(me.status ?? "active"), - groups: Array.isArray(me.groups) - ? me.groups.map((group: any) => ({ - id: String(group.id), - name: String(group.name), - description: typeof group.description === "string" ? group.description : undefined, - })) - : [], }, }; } catch { @@ -98,15 +203,44 @@ export async function autoJoinHub( throw new Error("hubAddress and teamToken are required for auto-join"); } const hubUrl = normalizeHubUrl(hubAddress); - const hostname = typeof globalThis.process !== "undefined" ? (await import("os")).hostname() : "unknown"; - const username = typeof globalThis.process !== "undefined" ? (await import("os")).userInfo().username : "user"; + const osModule = typeof globalThis.process !== "undefined" ? await import("os") : null; + const hostname = osModule ? osModule.hostname() : "unknown"; + const nickname = config.sharing?.client?.nickname; + const username = nickname || (osModule ? osModule.userInfo().username : "user"); + let clientIp = ""; + if (osModule) { + const nets = osModule.networkInterfaces(); + for (const name of Object.keys(nets)) { + for (const net of nets[name] ?? []) { + if (net.family === "IPv4" && !net.internal) { clientIp = net.address; break; } + } + if (clientIp) break; + } + } log.info(`Joining Hub at ${hubUrl} as "${username}"...`); const result = await hubRequestJson(hubUrl, "", "/api/v1/hub/join", { method: "POST", - body: JSON.stringify({ teamToken, username, deviceName: hostname }), + body: JSON.stringify({ teamToken, username, deviceName: hostname, clientIp }), }) as any; + if (result.status === "pending") { + log.info(`Join request submitted, awaiting admin approval. userId=${result.userId}`); + store.setClientHubConnection({ + hubUrl, + userId: String(result.userId), + username, + userToken: "", + role: "member", + connectedAt: Date.now(), + }); + throw new PendingApprovalError(result.userId); + } + + if (result.status === "rejected") { + throw new Error(`Join request was rejected by the Hub admin.`); + } + if (!result.userToken) { throw new Error(`Hub join failed: ${JSON.stringify(result)}`); } @@ -122,3 +256,12 @@ export async function autoJoinHub( }); return store.getClientHubConnection()!; } + +export class PendingApprovalError extends Error { + public readonly userId: string; + constructor(userId: string) { + super("Awaiting admin approval"); + this.name = "PendingApprovalError"; + this.userId = userId; + } +} diff --git a/apps/memos-local-openclaw/src/client/hub.ts b/apps/memos-local-openclaw/src/client/hub.ts index 284c00073..17a1d7e5b 100644 --- a/apps/memos-local-openclaw/src/client/hub.ts +++ b/apps/memos-local-openclaw/src/client/hub.ts @@ -134,16 +134,57 @@ export async function hubGetMemoryDetail( }; } +export async function hubUpdateUsername( + store: SqliteStore, + ctx: PluginContext, + newUsername: string, +): Promise<{ ok: boolean; username: string; userToken: string }> { + const client = await resolveHubClient(store, ctx); + const result = await hubRequestJson(client.hubUrl, client.userToken, "/api/v1/hub/me/update-profile", { + method: "POST", + body: JSON.stringify({ username: newUsername }), + }) as { ok: boolean; username: string; userToken: string }; + if (result.ok && result.userToken) { + store.setClientHubConnection({ + hubUrl: client.hubUrl, + userId: client.userId, + username: result.username, + userToken: result.userToken, + role: client.role as "admin" | "member", + connectedAt: Date.now(), + }); + } + return result; +} + +let _cachedClientIp: string | null = null; +function getClientIp(): string { + if (_cachedClientIp !== null) return _cachedClientIp; + try { + const os = require("os"); + const nets = os.networkInterfaces(); + for (const name of Object.keys(nets)) { + for (const net of nets[name] ?? []) { + if (net.family === "IPv4" && !net.internal) { _cachedClientIp = net.address; return _cachedClientIp!; } + } + } + } catch { /* browser or no os module */ } + _cachedClientIp = ""; + return ""; +} + export async function hubRequestJson( hubUrl: string, userToken: string, route: string, init: RequestInit = {}, ): Promise { + const clientIp = getClientIp(); const res = await fetch(`${normalizeHubUrl(hubUrl)}${route}`, { ...init, headers: { authorization: `Bearer ${userToken}`, + ...(clientIp ? { "x-client-ip": clientIp } : {}), ...(init.body ? { "content-type": "application/json" } : {}), ...(init.headers ?? {}), }, diff --git a/apps/memos-local-openclaw/src/client/skill-sync.ts b/apps/memos-local-openclaw/src/client/skill-sync.ts index e6abb952f..dc0d428cb 100644 --- a/apps/memos-local-openclaw/src/client/skill-sync.ts +++ b/apps/memos-local-openclaw/src/client/skill-sync.ts @@ -89,6 +89,20 @@ export async function publishSkillBundleToHub( }) as Promise<{ skillId: string; visibility: "public" | "group" }>; } +export async function unpublishSkillBundleFromHub( + store: SqliteStore, + ctx: PluginContext, + input: { skillId: string; hubAddress?: string; userToken?: string }, +): Promise<{ ok: boolean }> { + const client = await resolveHubClient(store, ctx, { hubAddress: input.hubAddress, userToken: input.userToken }); + return hubRequestJson(client.hubUrl, client.userToken, "/api/v1/hub/skills/unpublish", { + method: "POST", + body: JSON.stringify({ + sourceSkillId: input.skillId, + }), + }) as Promise<{ ok: boolean }>; +} + export async function fetchHubSkillBundle( store: SqliteStore, ctx: PluginContext, diff --git a/apps/memos-local-openclaw/src/config.ts b/apps/memos-local-openclaw/src/config.ts index 2be53a61d..c745ce14e 100644 --- a/apps/memos-local-openclaw/src/config.ts +++ b/apps/memos-local-openclaw/src/config.ts @@ -115,22 +115,22 @@ export function resolveConfig(raw: Partial | undefined, stateD : undefined; })(), } : undefined, - sharing: { - enabled: cfg.sharing?.enabled ?? false, - role: cfg.sharing?.role ?? "client", - hub: { + sharing: (() => { + const role = cfg.sharing?.role ?? "client"; + const enabled = cfg.sharing?.enabled ?? false; + const hub = role === "hub" ? { port: cfg.sharing?.hub?.port ?? 18800, teamName: cfg.sharing?.hub?.teamName ?? "", teamToken: cfg.sharing?.hub?.teamToken ?? "", - }, - client: { + } : { port: 18800, teamName: "", teamToken: "" }; + const client = role === "client" ? { hubAddress: cfg.sharing?.client?.hubAddress ?? "", userToken: cfg.sharing?.client?.userToken ?? "", teamToken: cfg.sharing?.client?.teamToken ?? "", pendingUserId: cfg.sharing?.client?.pendingUserId ?? "", - }, - capabilities: sharingCapabilities, - }, + } : { hubAddress: "", userToken: "", teamToken: "", pendingUserId: "" }; + return { enabled, role, hub, client, capabilities: sharingCapabilities }; + })(), }; } diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index f1f3a9f61..3cd6c89ce 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -34,6 +34,11 @@ export class HubServer { private static readonly RATE_LIMIT_SEARCH = 30; private rateBuckets = new Map(); + private static readonly OFFLINE_THRESHOLD_MS = 2 * 60 * 1000; + private static readonly OFFLINE_CHECK_INTERVAL_MS = 30 * 1000; + private offlineCheckTimer?: ReturnType; + private knownOnlineUsers = new Set(); + constructor(private opts: HubServerOptions) { this.userManager = new HubUserManager(opts.store, opts.log); this.authStatePath = path.join(opts.dataDir, "hub-auth.json"); @@ -101,10 +106,14 @@ export class HubServer { this.opts.log.info(`memos-local: bootstrap admin token persisted to ${this.authStatePath}`); } + this.initOnlineTracking(); + this.offlineCheckTimer = setInterval(() => this.checkOfflineUsers(), HubServer.OFFLINE_CHECK_INTERVAL_MS); + return `http://127.0.0.1:${this.port}`; } async stop(): Promise { + if (this.offlineCheckTimer) { clearInterval(this.offlineCheckTimer); this.offlineCheckTimer = undefined; } if (!this.server) return; const server = this.server; this.server = undefined; @@ -192,17 +201,44 @@ export class HubServer { return this.json(res, 403, { error: "invalid_team_token" }); } const username = String(body.username || `user-${randomUUID().slice(0, 8)}`); + const joinIp = (typeof body.clientIp === "string" && body.clientIp) + || (req.headers["x-client-ip"] as string)?.trim() + || (req.headers["x-forwarded-for"] as string)?.split(",")[0]?.trim() + || req.socket.remoteAddress || ""; + const existingUsers = this.opts.store.listHubUsers(); + const existingUser = existingUsers.find(u => u.username === username); + if (existingUser) { + try { this.opts.store.updateHubUserActivity(existingUser.id, joinIp); } catch { /* best-effort */ } + if (existingUser.status === "active") { + const token = issueUserToken( + { userId: existingUser.id, username: existingUser.username, role: existingUser.role, status: "active" }, + this.authSecret, + ); + this.userManager.approveUser(existingUser.id, token); + return this.json(res, 200, { status: "active", userId: existingUser.id, userToken: token }); + } + if (existingUser.status === "pending") { + this.notifyAdmins("user_join_request", "user", username, "", { dedup: true }); + return this.json(res, 200, { status: "pending", userId: existingUser.id }); + } + if (existingUser.status === "rejected") { + if (body.reapply === true) { + this.userManager.resetToPending(existingUser.id); + this.notifyAdmins("user_join_request", "user", username, ""); + this.opts.log.info(`Hub: rejected user "${username}" (${existingUser.id}) re-applied, reset to pending`); + return this.json(res, 200, { status: "pending", userId: existingUser.id }); + } + return this.json(res, 200, { status: "rejected", userId: existingUser.id }); + } + } const user = this.userManager.createPendingUser({ username, deviceName: typeof body.deviceName === "string" ? body.deviceName : undefined, }); - const token = issueUserToken( - { userId: user.id, username, role: "member", status: "active" }, - this.authSecret, - ); - this.userManager.approveUser(user.id, token); - this.opts.log.info(`Hub: auto-approved user "${username}" (${user.id})`); - return this.json(res, 200, { status: "active", userId: user.id, userToken: token }); + try { this.opts.store.updateHubUserActivity(user.id, joinIp); } catch { /* best-effort */ } + this.opts.log.info(`Hub: user "${username}" (${user.id}) registered as pending, awaiting admin approval`); + this.notifyAdmins("user_join_request", "user", username, ""); + return this.json(res, 200, { status: "pending", userId: user.id }); } if (req.method === "POST" && routePath === "/api/v1/hub/registration-status") { @@ -239,12 +275,49 @@ export class HubServer { return this.json(res, 429, { error: "rate_limit_exceeded", retryAfterMs: HubServer.RATE_WINDOW_MS }); } + if (req.method === "POST" && routePath === "/api/v1/hub/heartbeat") { + return this.json(res, 200, { ok: true }); + } + + if (req.method === "POST" && routePath === "/api/v1/hub/leave") { + try { + this.opts.store.updateHubUserActivity(auth.userId, "", 0); + } catch { /* best-effort */ } + this.knownOnlineUsers.delete(auth.userId); + this.notifyAdmins("user_offline", "user", auth.username, auth.userId); + this.opts.log.info(`Hub: user "${auth.username}" (${auth.userId}) left voluntarily`); + return this.json(res, 200, { ok: true }); + } + if (req.method === "GET" && routePath === "/api/v1/hub/me") { const user = this.opts.store.getHubUser(auth.userId); if (!user) return this.json(res, 401, { error: "unauthorized" }); return this.json(res, 200, user); } + if (req.method === "POST" && routePath === "/api/v1/hub/me/update-profile") { + const body = await this.readJson(req); + if (!body) return this.json(res, 400, { error: "invalid_body" }); + const newUsername = String(body.username || "").trim(); + if (!newUsername || newUsername.length < 2 || newUsername.length > 32) { + return this.json(res, 400, { error: "invalid_username", message: "Username must be 2-32 characters" }); + } + if (this.userManager.isUsernameTaken(newUsername, auth.userId)) { + return this.json(res, 409, { error: "username_taken", message: "Username already in use" }); + } + const updated = this.userManager.updateUsername(auth.userId, newUsername); + if (!updated) return this.json(res, 404, { error: "not_found" }); + const ttlMs = updated.role === "admin" ? 3650 * 24 * 60 * 60 * 1000 : undefined; + const newToken = issueUserToken( + { userId: updated.id, username: newUsername, role: updated.role, status: updated.status }, + this.authSecret, + ttlMs, + ); + this.userManager.approveUser(updated.id, newToken); + this.opts.log.info(`Hub: user "${auth.userId}" renamed to "${newUsername}"`); + return this.json(res, 200, { ok: true, username: newUsername, userToken: newToken }); + } + if (req.method === "GET" && routePath === "/api/v1/hub/admin/pending-users") { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); return this.json(res, 200, { users: this.userManager.listPendingUsers() }); @@ -256,6 +329,7 @@ export class HubServer { const token = issueUserToken({ userId: String(body.userId), username: String(body.username || ""), role: "member", status: "active" }, this.authSecret); const approved = this.userManager.approveUser(String(body.userId), token); if (!approved) return this.json(res, 404, { error: "not_found" }); + try { this.opts.store.updateHubUserActivity(String(body.userId), ""); } catch { /* best-effort */ } return this.json(res, 200, { status: "active", token }); } @@ -270,95 +344,85 @@ export class HubServer { if (req.method === "GET" && routePath === "/api/v1/hub/admin/users") { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const users = this.opts.store.listHubUsers().filter(u => u.status === "active"); - return this.json(res, 200, { users: users.map(u => ({ id: u.id, username: u.username, role: u.role, status: u.status })) }); - } - - // ── Group management ── - - if (req.method === "GET" && routePath === "/api/v1/hub/groups") { - const groups = this.opts.store.listHubGroups(); - return this.json(res, 200, { groups }); + const contribs = this.opts.store.getHubUserContributions(); + const ownerId = this.authState.bootstrapAdminUserId || ""; + const now = Date.now(); + return this.json(res, 200, { users: users.map(u => { + const c = contribs[u.id] || { memoryCount: 0, taskCount: 0, skillCount: 0 }; + const isOnline = u.id === ownerId || (!!u.lastActiveAt && now - u.lastActiveAt < HubServer.OFFLINE_THRESHOLD_MS); + return { + id: u.id, username: u.username, role: u.role, status: u.status, + deviceName: u.deviceName, createdAt: u.createdAt, approvedAt: u.approvedAt, + lastIp: u.lastIp || "", lastActiveAt: u.lastActiveAt, + isOwner: u.id === ownerId, isOnline, + memoryCount: c.memoryCount, taskCount: c.taskCount, skillCount: c.skillCount, + }; + }) }); } - if (req.method === "POST" && routePath === "/api/v1/hub/groups") { + if (req.method === "POST" && routePath === "/api/v1/hub/admin/change-role") { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const body = await this.readJson(req); - const name = String(body.name || "").trim(); - if (!name) return this.json(res, 400, { error: "name_required" }); - const groupId = randomUUID(); - this.opts.store.upsertHubGroup({ - id: groupId, - name, - description: String(body.description || ""), - createdAt: Date.now(), - }); - return this.json(res, 201, { id: groupId, name }); - } - - const groupDetailMatch = routePath.match(/^\/api\/v1\/hub\/groups\/([^/]+)$/); - if (groupDetailMatch) { - const groupId = decodeURIComponent(groupDetailMatch[1]); - - if (req.method === "GET") { - const group = this.opts.store.getHubGroupById(groupId); - if (!group) return this.json(res, 404, { error: "not_found" }); - const members = this.opts.store.listHubGroupMembers(groupId); - return this.json(res, 200, { ...group, members }); + const userId = String(body?.userId || ""); + const newRole = String(body?.role || ""); + if (!userId || (newRole !== "admin" && newRole !== "member")) return this.json(res, 400, { error: "invalid_params" }); + if (newRole === "member" && userId === this.authState.bootstrapAdminUserId) { + return this.json(res, 403, { error: "cannot_demote_owner", message: "The hub owner cannot be demoted" }); } + const user = this.opts.store.getHubUser(userId); + if (!user || user.status !== "active") return this.json(res, 404, { error: "not_found" }); + const updatedUser = { ...user, role: newRole as "admin" | "member" }; + this.opts.store.upsertHubUser(updatedUser); + this.opts.log.info(`Hub: admin "${auth.userId}" changed role of "${userId}" to "${newRole}"`); + return this.json(res, 200, { ok: true, role: newRole }); + } - if (req.method === "PUT") { - if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); - const existing = this.opts.store.getHubGroupById(groupId); - if (!existing) return this.json(res, 404, { error: "not_found" }); - const body = await this.readJson(req); - this.opts.store.upsertHubGroup({ - id: groupId, - name: String(body.name || existing.name).trim(), - description: String(body.description ?? existing.description), - createdAt: existing.createdAt, - }); - return this.json(res, 200, { ok: true }); + if (req.method === "POST" && routePath === "/api/v1/hub/admin/rename-user") { + if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); + const body = await this.readJson(req); + const userId = String(body?.userId || ""); + const newUsername = String(body?.username || "").trim(); + if (!userId || !newUsername || newUsername.length < 2 || newUsername.length > 32) { + return this.json(res, 400, { error: "invalid_params", message: "userId and username (2-32 chars) required" }); } - - if (req.method === "DELETE") { - if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); - const deleted = this.opts.store.deleteHubGroup(groupId); - if (!deleted) return this.json(res, 404, { error: "not_found" }); - return this.json(res, 200, { ok: true }); + if (this.userManager.isUsernameTaken(newUsername, userId)) { + return this.json(res, 409, { error: "username_taken", message: "Username already in use" }); } + const user = this.opts.store.getHubUser(userId); + if (!user || user.status !== "active") return this.json(res, 404, { error: "not_found" }); + const ttlMs = user.role === "admin" ? 3650 * 24 * 60 * 60 * 1000 : undefined; + const newToken = issueUserToken( + { userId: user.id, username: newUsername, role: user.role, status: user.status }, + this.authSecret, + ttlMs, + ); + this.userManager.approveUser(user.id, newToken); + const updated = this.opts.store.getHubUser(userId)!; + const finalUser = { ...updated, username: newUsername }; + this.opts.store.upsertHubUser(finalUser); + this.opts.log.info(`Hub: admin "${auth.userId}" renamed user "${userId}" to "${newUsername}"`); + return this.json(res, 200, { ok: true, username: newUsername }); } - const groupMembersMatch = routePath.match(/^\/api\/v1\/hub\/groups\/([^/]+)\/members$/); - if (groupMembersMatch) { - const groupId = decodeURIComponent(groupMembersMatch[1]); - - if (req.method === "POST") { - if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); - const group = this.opts.store.getHubGroupById(groupId); - if (!group) return this.json(res, 404, { error: "group_not_found" }); - const body = await this.readJson(req); - const userId = String(body.userId || ""); - if (!userId) return this.json(res, 400, { error: "userId_required" }); - const user = this.opts.store.getHubUser(userId); - if (!user) return this.json(res, 404, { error: "user_not_found" }); - this.opts.store.addHubGroupMember(groupId, userId); - return this.json(res, 200, { ok: true }); - } - - if (req.method === "DELETE") { - if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); - const body = await this.readJson(req); - const userId = String(body.userId || ""); - if (!userId) return this.json(res, 400, { error: "userId_required" }); - this.opts.store.removeHubGroupMember(groupId, userId); - return this.json(res, 200, { ok: true }); - } + if (req.method === "POST" && routePath === "/api/v1/hub/admin/remove-user") { + if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); + const body = await this.readJson(req); + const userId = String(body?.userId || ""); + if (!userId) return this.json(res, 400, { error: "missing_user_id" }); + if (userId === auth.userId) return this.json(res, 400, { error: "cannot_remove_self" }); + if (userId === this.authState.bootstrapAdminUserId) return this.json(res, 403, { error: "cannot_remove_owner", message: "The hub owner cannot be removed" }); + const cleanResources = body?.cleanResources === true; + const deleted = this.opts.store.deleteHubUser(userId, cleanResources); + if (!deleted) return this.json(res, 404, { error: "not_found" }); + this.opts.log.info(`Hub: admin "${auth.userId}" removed user "${userId}" (cleanResources=${cleanResources})`); + return this.json(res, 200, { ok: true }); } if (req.method === "POST" && routePath === "/api/v1/hub/tasks/share") { const body = await this.readJson(req); if (!body?.task) return this.json(res, 400, { error: "invalid_payload" }); const task = { ...body.task, sourceUserId: auth.userId }; + const existingTask = task.sourceTaskId ? this.opts.store.getHubTaskBySource(auth.userId, task.sourceTaskId) : null; this.opts.store.upsertHubTask(task); const chunks = Array.isArray(body.chunks) ? body.chunks : []; const chunkIds: string[] = []; @@ -366,16 +430,23 @@ export class HubServer { this.opts.store.upsertHubChunk({ ...chunk, sourceUserId: auth.userId }); chunkIds.push(chunk.id); } - // Async embedding: don't block the response if (this.opts.embedder && chunkIds.length > 0) { this.embedChunksAsync(chunkIds, chunks); } + if (!existingTask) { + this.notifyAdmins("resource_shared", "task", String(task.title || task.sourceTaskId || ""), auth.userId); + } return this.json(res, 200, { ok: true, chunks: chunkIds.length }); } if (req.method === "POST" && routePath === "/api/v1/hub/tasks/unshare") { const body = await this.readJson(req); - this.opts.store.deleteHubTaskBySource(auth.userId, String(body.sourceTaskId)); + const srcTaskId = String(body.sourceTaskId); + const existing = this.opts.store.getHubTaskBySource(auth.userId, srcTaskId); + this.opts.store.deleteHubTaskBySource(auth.userId, srcTaskId); + if (existing) { + this.notifyAdmins("resource_unshared", "task", existing.title || srcTaskId, auth.userId); + } return this.json(res, 200, { ok: true }); } @@ -387,15 +458,8 @@ export class HubServer { if (!sourceChunkId) return this.json(res, 400, { error: "missing_source_chunk_id" }); const existing = this.opts.store.getHubMemoryBySource(auth.userId, sourceChunkId); const memoryId = existing?.id ?? randomUUID(); - const visibility = m.visibility === "group" ? "group" : "public"; - let resolvedGroupId: string | null = null; - if (visibility === "group") { - const gid = String(m.groupId || ""); - if (!gid) return this.json(res, 400, { error: "missing_group_id" }); - const group = this.opts.store.getHubGroupById(gid); - if (!group) return this.json(res, 404, { error: "group_not_found" }); - resolvedGroupId = gid; - } + const visibility = "public"; + const resolvedGroupId: string | null = null; const now = Date.now(); this.opts.store.upsertHubMemory({ id: memoryId, @@ -413,6 +477,9 @@ export class HubServer { if (this.opts.embedder) { this.embedMemoryAsync(memoryId, String(m.summary || ""), String(m.content || "")); } + if (!existing) { + this.notifyAdmins("resource_shared", "memory", String(m.summary || m.content?.slice(0, 60) || memoryId), auth.userId); + } return this.json(res, 200, { ok: true, memoryId, visibility }); } @@ -420,7 +487,11 @@ export class HubServer { const body = await this.readJson(req); const sourceChunkId = String(body?.sourceChunkId || ""); if (!sourceChunkId) return this.json(res, 400, { error: "missing_source_chunk_id" }); + const existing = this.opts.store.getHubMemoryBySource(auth.userId, sourceChunkId); this.opts.store.deleteHubMemoryBySource(auth.userId, sourceChunkId); + if (existing) { + this.notifyAdmins("resource_unshared", "memory", existing.summary || existing.content?.slice(0, 60) || sourceChunkId, auth.userId); + } return this.json(res, 200, { ok: true }); } @@ -555,7 +626,7 @@ export class HubServer { if (!sourceSkillId) return this.json(res, 400, { error: "missing_skill_id" }); const existing = this.opts.store.getHubSkillBySource(auth.userId, sourceSkillId); const skillId = existing?.id ?? randomUUID(); - const visibility = body?.visibility === "group" ? "group" : "public"; + const visibility = "public"; this.opts.store.upsertHubSkill({ id: skillId, sourceSkillId, @@ -563,13 +634,16 @@ export class HubServer { name: String(metadata.name || sourceSkillId), description: String(metadata.description || ""), version: Number(metadata.version || 1), - groupId: visibility === "group" ? String(body?.groupId || "") || null : null, + groupId: null, visibility, bundle: JSON.stringify(body?.bundle ?? {}), qualityScore: metadata.qualityScore == null ? null : Number(metadata.qualityScore), createdAt: existing?.createdAt ?? Date.now(), updatedAt: Date.now(), }); + if (!existing) { + this.notifyAdmins("resource_shared", "skill", String(metadata.name || sourceSkillId), auth.userId); + } return this.json(res, 200, { ok: true, skillId, visibility }); } @@ -577,10 +651,6 @@ export class HubServer { if (skillBundleMatch) { const skill = this.opts.store.getHubSkillById(decodeURIComponent(skillBundleMatch[1])); if (!skill) return this.json(res, 404, { error: "not_found" }); - const user = this.opts.store.getHubUser(auth.userId); - const groups = new Set((user?.groups ?? []).map((group) => group.id)); - const allowed = skill.visibility === "public" || (skill.groupId != null && groups.has(skill.groupId)); - if (!allowed) return this.json(res, 403, { error: "forbidden" }); return this.json(res, 200, { skillId: skill.id, metadata: { @@ -596,7 +666,12 @@ export class HubServer { if (req.method === "POST" && routePath === "/api/v1/hub/skills/unpublish") { const body = await this.readJson(req); - this.opts.store.deleteHubSkillBySource(auth.userId, String(body?.sourceSkillId || "")); + const srcSkillId = String(body?.sourceSkillId || ""); + const existing = this.opts.store.getHubSkillBySource(auth.userId, srcSkillId); + this.opts.store.deleteHubSkillBySource(auth.userId, srcSkillId); + if (existing) { + this.notifyAdmins("resource_unshared", "skill", existing.name || srcSkillId, auth.userId); + } return this.json(res, 200, { ok: true }); } @@ -608,12 +683,48 @@ export class HubServer { return this.json(res, 200, { tasks }); } + const hubTaskDetailMatch = req.method === "GET" ? routePath.match(/^\/api\/v1\/hub\/shared-tasks\/([^/]+)\/detail$/) : null; + if (hubTaskDetailMatch) { + const taskId = decodeURIComponent(hubTaskDetailMatch[1]); + const task = this.opts.store.getHubTaskById(taskId); + if (!task) return this.json(res, 404, { error: "not_found" }); + const chunks = this.opts.store.listHubChunksByTaskId(taskId); + return this.json(res, 200, { + id: task.id, title: task.title, summary: task.summary, + startedAt: task.createdAt, endedAt: task.updatedAt, + chunks: chunks.map(c => ({ role: c.role, content: c.content, summary: c.summary, kind: c.kind, createdAt: c.createdAt })), + }); + } + + const hubSkillDetailMatch = req.method === "GET" ? routePath.match(/^\/api\/v1\/hub\/shared-skills\/([^/]+)\/detail$/) : null; + if (hubSkillDetailMatch) { + const skillId = decodeURIComponent(hubSkillDetailMatch[1]); + const skill = this.opts.store.getHubSkillById(skillId); + if (!skill) return this.json(res, 404, { error: "not_found" }); + let files: Array<{ path: string; type: string; size: number }> = []; + try { + const bundle = JSON.parse(skill.bundle || "{}"); + if (Array.isArray(bundle.files)) { + files = bundle.files.map((f: any) => ({ path: f.path ?? f.name ?? "unknown", type: f.type ?? "file", size: f.size ?? (f.content ? f.content.length : 0) })); + } + } catch { /* ignore parse error */ } + return this.json(res, 200, { + skill: { id: skill.id, name: skill.name, description: skill.description, version: skill.version, qualityScore: skill.qualityScore, status: "published" }, + files, + versions: [], + }); + } + const adminTaskDeleteMatch = req.method === "DELETE" ? routePath.match(/^\/api\/v1\/hub\/admin\/shared-tasks\/([^/]+)$/) : null; if (adminTaskDeleteMatch) { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const taskId = decodeURIComponent(adminTaskDeleteMatch[1]); + const taskInfo = this.opts.store.getHubTaskById(taskId); const deleted = this.opts.store.deleteHubTaskById(taskId); if (!deleted) return this.json(res, 404, { error: "not_found" }); + if (taskInfo) { + this.opts.store.insertHubNotification({ id: randomUUID(), userId: taskInfo.sourceUserId, type: "resource_removed", resource: "task", title: taskInfo.title }); + } return this.json(res, 200, { ok: true }); } @@ -627,8 +738,12 @@ export class HubServer { if (adminSkillDeleteMatch) { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const skillId = decodeURIComponent(adminSkillDeleteMatch[1]); + const skillInfo = this.opts.store.getHubSkillById(skillId); const deleted = this.opts.store.deleteHubSkillById(skillId); if (!deleted) return this.json(res, 404, { error: "not_found" }); + if (skillInfo) { + this.opts.store.insertHubNotification({ id: randomUUID(), userId: skillInfo.sourceUserId, type: "resource_removed", resource: "skill", title: skillInfo.name }); + } return this.json(res, 200, { ok: true }); } @@ -642,8 +757,12 @@ export class HubServer { if (adminMemoryDeleteMatch) { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const memoryId = decodeURIComponent(adminMemoryDeleteMatch[1]); + const memInfo = this.opts.store.getHubMemoryById(memoryId); const deleted = this.opts.store.deleteHubMemoryById(memoryId); if (!deleted) return this.json(res, 404, { error: "not_found" }); + if (memInfo) { + this.opts.store.insertHubNotification({ id: randomUUID(), userId: memInfo.sourceUserId, type: "resource_removed", resource: "memory", title: memInfo.summary || memInfo.id }); + } return this.json(res, 200, { ok: true }); } @@ -666,9 +785,87 @@ export class HubServer { }); } + if (req.method === "GET" && routePath === "/api/v1/hub/notifications") { + const unread = (new URL(req.url!, `http://${req.headers.host}`)).searchParams.get("unread") === "1"; + const list = this.opts.store.listHubNotifications(auth.userId, { unreadOnly: unread, limit: 50 }); + const unreadCount = this.opts.store.countUnreadHubNotifications(auth.userId); + return this.json(res, 200, { notifications: list, unreadCount }); + } + + if (req.method === "POST" && routePath === "/api/v1/hub/notifications/read") { + const body = await this.readJson(req); + const ids = Array.isArray(body.ids) ? body.ids as string[] : undefined; + this.opts.store.markHubNotificationsRead(auth.userId, ids); + return this.json(res, 200, { ok: true }); + } + + if (req.method === "POST" && routePath === "/api/v1/hub/notifications/clear") { + this.opts.store.clearHubNotifications(auth.userId); + return this.json(res, 200, { ok: true }); + } + return this.json(res, 404, { error: "not_found" }); } + private notifyAdmins(type: string, resource: string, title: string, fromUserId: string, opts?: { dedup?: boolean; deduoWindowMs?: number }): void { + try { + const admins = this.opts.store.listHubUsers("active").filter(u => u.role === "admin" && u.id !== fromUserId); + for (const admin of admins) { + if (opts?.dedup && this.opts.store.hasRecentHubNotification(admin.id, type, resource, opts.deduoWindowMs ?? 300_000)) { + continue; + } + this.opts.store.insertHubNotification({ id: randomUUID(), userId: admin.id, type, resource, title }); + } + } catch { /* best-effort */ } + } + + private initOnlineTracking(): void { + try { + const ownerId = this.authState.bootstrapAdminUserId || ""; + const users = this.opts.store.listHubUsers("active"); + const now = Date.now(); + for (const u of users) { + if (u.id === ownerId) continue; + if (u.lastActiveAt && now - u.lastActiveAt < HubServer.OFFLINE_THRESHOLD_MS) { + this.knownOnlineUsers.add(u.id); + } + } + } catch { /* best-effort */ } + } + + private checkOfflineUsers(): void { + try { + const ownerId = this.authState.bootstrapAdminUserId || ""; + const users = this.opts.store.listHubUsers("active"); + const now = Date.now(); + const currentlyOnline = new Set(); + for (const u of users) { + if (u.id === ownerId) continue; + if (u.lastActiveAt && now - u.lastActiveAt < HubServer.OFFLINE_THRESHOLD_MS) { + currentlyOnline.add(u.id); + } + } + for (const uid of this.knownOnlineUsers) { + if (!currentlyOnline.has(uid)) { + const user = users.find(u => u.id === uid); + if (user) { + this.notifyAdmins("user_offline", "user", user.username, uid); + this.opts.log.info(`Hub: user "${user.username}" (${uid}) went offline`); + } + } + } + for (const uid of currentlyOnline) { + if (!this.knownOnlineUsers.has(uid)) { + const user = users.find(u => u.id === uid); + if (user) { + this.notifyAdmins("user_online", "user", user.username, uid); + } + } + } + this.knownOnlineUsers = currentlyOnline; + } catch { /* best-effort */ } + } + private authenticate(req: http.IncomingMessage) { const header = req.headers.authorization; if (!header || !header.startsWith("Bearer ")) return null; @@ -679,6 +876,10 @@ export class HubServer { if (!user || user.status !== "active") return null; const hash = createHash("sha256").update(token).digest("hex"); if (user.tokenHash !== hash) return null; + const clientIp = (req.headers["x-client-ip"] as string)?.trim() + || (req.headers["x-forwarded-for"] as string)?.split(",")[0]?.trim() + || req.socket.remoteAddress || ""; + try { this.opts.store.updateHubUserActivity(user.id, clientIp); } catch { /* best-effort */ } return { userId: user.id, username: user.username, diff --git a/apps/memos-local-openclaw/src/hub/user-manager.ts b/apps/memos-local-openclaw/src/hub/user-manager.ts index 3165dbf2c..862ff3aa9 100644 --- a/apps/memos-local-openclaw/src/hub/user-manager.ts +++ b/apps/memos-local-openclaw/src/hub/user-manager.ts @@ -1,10 +1,10 @@ import { randomUUID, createHash } from "crypto"; -import { issueUserToken } from "./auth"; +import { issueUserToken, verifyUserToken } from "./auth"; import type { Logger } from "../types"; import type { UserInfo } from "../sharing/types"; import type { SqliteStore } from "../storage/sqlite"; -type ManagedHubUser = UserInfo & { tokenHash: string; createdAt: number; approvedAt: number | null }; +type ManagedHubUser = UserInfo & { tokenHash: string; createdAt: number; approvedAt: number | null; lastIp: string; lastActiveAt: number | null }; export class HubUserManager { constructor(private store: SqliteStore, private log: Logger) {} @@ -20,6 +20,8 @@ export class HubUserManager { tokenHash: "", createdAt: Date.now(), approvedAt: null, + lastIp: "", + lastActiveAt: null, }; this.store.upsertHubUser(user); return user; @@ -46,7 +48,7 @@ export class HubUserManager { if (bootstrapUserId) { const bootstrapUser = this.store.getHubUser(bootstrapUserId); if (bootstrapUser && bootstrapUser.role === "admin" && bootstrapUser.status === "active") { - if (bootstrapToken && bootstrapUser.tokenHash === createHash("sha256").update(bootstrapToken).digest("hex")) { + if (bootstrapToken && bootstrapUser.tokenHash === createHash("sha256").update(bootstrapToken).digest("hex") && verifyUserToken(bootstrapToken, secret)) { return { user: bootstrapUser, token: bootstrapToken }; } const refreshedToken = issueUserToken( @@ -88,6 +90,8 @@ export class HubUserManager { tokenHash: "", createdAt: Date.now(), approvedAt: Date.now(), + lastIp: "", + lastActiveAt: null, }; const token = issueUserToken( { userId: user.id, username: user.username, role: user.role, status: user.status }, @@ -99,6 +103,19 @@ export class HubUserManager { return { user, token }; } + isUsernameTaken(username: string, excludeUserId?: string): boolean { + const users = this.store.listHubUsers(); + return users.some(u => u.username === username && u.id !== excludeUserId); + } + + updateUsername(userId: string, newUsername: string): ManagedHubUser | null { + const user = this.store.getHubUser(userId); + if (!user) return null; + const updated = { ...user, username: newUsername }; + this.store.upsertHubUser(updated); + return updated; + } + rejectUser(userId: string): ManagedHubUser | null { const user = this.store.getHubUser(userId); if (!user) return null; @@ -110,4 +127,17 @@ export class HubUserManager { this.store.upsertHubUser(updated); return updated; } + + resetToPending(userId: string): ManagedHubUser | null { + const user = this.store.getHubUser(userId); + if (!user) return null; + const updated = { + ...user, + status: "pending" as const, + tokenHash: "", + approvedAt: null, + }; + this.store.upsertHubUser(updated); + return updated; + } } diff --git a/apps/memos-local-openclaw/src/index.ts b/apps/memos-local-openclaw/src/index.ts index e10f3305f..abf3e3590 100644 --- a/apps/memos-local-openclaw/src/index.ts +++ b/apps/memos-local-openclaw/src/index.ts @@ -64,8 +64,10 @@ export function initPlugin(opts: PluginInitOptions = {}): MemosLocalPlugin { const worker = new IngestWorker(store, embedder, ctx); const engine = new RecallEngine(store, embedder, ctx); + const sharedState = { lastSearchTime: 0 }; + const tools: ToolDefinition[] = [ - createMemorySearchTool(engine, store, ctx), + createMemorySearchTool(engine, store, ctx, sharedState), createMemoryTimelineTool(store), createMemoryGetTool(store), createNetworkMemoryDetailTool(store, ctx), @@ -87,7 +89,10 @@ export function initPlugin(opts: PluginInitOptions = {}): MemosLocalPlugin { const turnId = uuid(); const tag = ctx.config.capture?.evidenceWrapperTag ?? "STORED_MEMORY"; - const captured = captureMessages(messages, session, turnId, tag, ctx.log, owner); + const userSearchTime = sharedState.lastSearchTime || 0; + sharedState.lastSearchTime = 0; + + const captured = captureMessages(messages, session, turnId, tag, ctx.log, owner, userSearchTime); if (captured.length > 0) { worker.enqueue(captured); } diff --git a/apps/memos-local-openclaw/src/recall/engine.ts b/apps/memos-local-openclaw/src/recall/engine.ts index 4a46aad88..7956b94b9 100644 --- a/apps/memos-local-openclaw/src/recall/engine.ts +++ b/apps/memos-local-openclaw/src/recall/engine.ts @@ -74,10 +74,46 @@ export class RecallEngine { score: 1 / (i + 1), })); + // Step 1c: Hub memories search (when sharing is enabled and hub_memories exist) + let hubMemFtsRanked: Array<{ id: string; score: number }> = []; + let hubMemVecRanked: Array<{ id: string; score: number }> = []; + if (query && this.ctx.config.sharing?.enabled) { + try { + const hubFtsHits = this.store.searchHubMemories(query, { maxResults: candidatePool }); + hubMemFtsRanked = hubFtsHits.map(({ hit }, i) => ({ + id: `hubmem:${hit.id}`, score: 1 / (i + 1), + })); + } catch { /* hub_memories table may not exist */ } + try { + const hubMemEmbs = this.store.getVisibleHubMemoryEmbeddings(""); + if (hubMemEmbs.length > 0) { + const qv = await this.embedder.embedQuery(query).catch(() => null); + if (qv) { + const scored: Array<{ id: string; score: number }> = []; + for (const e of hubMemEmbs) { + let dot = 0, nA = 0, nB = 0; + for (let i = 0; i < qv.length && i < e.vector.length; i++) { + dot += qv[i] * e.vector[i]; nA += qv[i] * qv[i]; nB += e.vector[i] * e.vector[i]; + } + const sim = nA > 0 && nB > 0 ? dot / (Math.sqrt(nA) * Math.sqrt(nB)) : 0; + if (sim > 0.3) { + scored.push({ id: `hubmem:${e.memoryId}`, score: sim }); + } + } + scored.sort((a, b) => b.score - a.score); + hubMemVecRanked = scored.slice(0, candidatePool); + } + } + } catch { /* best-effort */ } + } + // Step 2: RRF fusion const ftsRanked = ftsCandidates.map((c) => ({ id: c.chunkId, score: c.score })); const vecRanked = vecCandidates.map((c) => ({ id: c.chunkId, score: c.score })); - const rrfScores = rrfFuse([ftsRanked, vecRanked, patternRanked], recallCfg.rrfK); + const allRankedLists = [ftsRanked, vecRanked, patternRanked]; + if (hubMemFtsRanked.length > 0) allRankedLists.push(hubMemFtsRanked); + if (hubMemVecRanked.length > 0) allRankedLists.push(hubMemVecRanked); + const rrfScores = rrfFuse(allRankedLists, recallCfg.rrfK); if (rrfScores.size === 0) { this.recordQuery(query, maxResults, minScore, 0); @@ -101,6 +137,11 @@ export class RecallEngine { // Step 4: Time decay const withTs = mmrResults.map((r) => { + if (r.id.startsWith("hubmem:")) { + const memId = r.id.slice(7); + const mem = this.store.getHubMemoryById(memId); + return { ...r, createdAt: mem?.createdAt ?? 0 }; + } const chunk = this.store.getChunk(r.id); return { ...r, createdAt: chunk?.createdAt ?? 0 }; }); @@ -128,6 +169,34 @@ export class RecallEngine { const hits: SearchHit[] = []; for (const candidate of normalized) { if (hits.length >= maxResults) break; + + if (candidate.id.startsWith("hubmem:")) { + const memId = candidate.id.slice(7); + const mem = this.store.getHubMemoryById(memId); + if (!mem) continue; + if (roleFilter && mem.role !== roleFilter) continue; + hits.push({ + summary: mem.summary || mem.content.slice(0, 200), + original_excerpt: mem.content, + ref: { + sessionKey: `hub-shared:${mem.sourceUserId}`, + chunkId: mem.id, + turnId: "", + seq: 0, + }, + score: Math.round(candidate.score * 1000) / 1000, + taskId: null, + skillId: null, + owner: `hub-user:${mem.sourceUserId}`, + source: { + ts: mem.createdAt, + role: (mem.role || "assistant") as any, + sessionKey: `hub-shared:${mem.sourceUserId}`, + }, + }); + continue; + } + const chunk = this.store.getChunk(candidate.id); if (!chunk) continue; if (roleFilter && chunk.role !== roleFilter) continue; diff --git a/apps/memos-local-openclaw/src/storage/ensure-binding.ts b/apps/memos-local-openclaw/src/storage/ensure-binding.ts index ac7f9bf6c..29fbd4e96 100644 --- a/apps/memos-local-openclaw/src/storage/ensure-binding.ts +++ b/apps/memos-local-openclaw/src/storage/ensure-binding.ts @@ -3,8 +3,6 @@ import { execSync } from "child_process"; import path from "path"; import { createRequire } from "module"; -const require = createRequire(import.meta.url); - /** * Ensure the better-sqlite3 native binary is available. * @@ -13,14 +11,15 @@ const require = createRequire(import.meta.url); * and restores it from bundled prebuilds if missing. */ export function ensureSqliteBinding(log?: { info: (msg: string) => void; warn: (msg: string) => void }): void { - const bsqlPkg = require.resolve("better-sqlite3/package.json"); + const _req = typeof require !== "undefined" ? require : createRequire(__filename); + const bsqlPkg = _req.resolve("better-sqlite3/package.json"); const bsqlDir = path.dirname(bsqlPkg); const bindingPath = path.join(bsqlDir, "build", "Release", "better_sqlite3.node"); if (existsSync(bindingPath)) return; const platform = `${process.platform}-${process.arch}`; - const pluginRoot = path.resolve(path.dirname(new URL(import.meta.url).pathname), "..", ".."); + const pluginRoot = path.resolve(__dirname, "..", ".."); const prebuildSrc = path.join(pluginRoot, "prebuilds", platform, "better_sqlite3.node"); if (existsSync(prebuildSrc)) { diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index 6a8cb4587..f5ca36aba 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -3,7 +3,7 @@ import { createHash } from "crypto"; import * as fs from "fs"; import * as path from "path"; import type { Chunk, ChunkRef, DedupStatus, Task, TaskStatus, Skill, SkillStatus, SkillVisibility, SkillVersion, TaskSkillLink, TaskSkillRelation, Logger } from "../types"; -import type { GroupInfo, SharedVisibility, UserInfo, UserRole, UserStatus } from "../sharing/types"; +import type { SharedVisibility, UserInfo, UserRole, UserStatus } from "../sharing/types"; export class SqliteStore { private db: Database.Database; @@ -112,6 +112,7 @@ export class SqliteStore { this.migrateSkillEmbeddingsAndFts(); this.migrateFtsToTrigram(); this.migrateHubTables(); + this.migrateLocalSharedTasksOwner(); this.log.debug("Database schema initialized"); } @@ -119,6 +120,16 @@ export class SqliteStore { this.db.exec("CREATE INDEX IF NOT EXISTS idx_chunks_dedup_created ON chunks(dedup_status, created_at DESC)"); } + private migrateLocalSharedTasksOwner(): void { + try { + const cols = this.db.prepare("PRAGMA table_info(local_shared_tasks)").all() as Array<{ name: string }>; + if (cols.length > 0 && !cols.some((c) => c.name === "original_owner")) { + this.db.exec("ALTER TABLE local_shared_tasks ADD COLUMN original_owner TEXT NOT NULL DEFAULT 'agent:main'"); + this.log.info("Migrated: added original_owner column to local_shared_tasks"); + } + } catch { /* table may not exist yet */ } + } + private migrateOwnerFields(): void { const chunkCols = this.db.prepare("PRAGMA table_info(chunks)").all() as Array<{ name: string }>; if (!chunkCols.some((c) => c.name === "owner")) { @@ -516,12 +527,13 @@ export class SqliteStore { ).run(toolName, Math.round(durationMs), success ? 1 : 0, Date.now()); } - getToolMetrics(minutes: number): { + getToolMetrics(minutes: number, fromMs?: number, toMs?: number): { tools: string[]; series: Array<{ minute: string; [tool: string]: number | string }>; aggregated: Array<{ tool: string; totalCalls: number; avgMs: number; p95Ms: number; errorCount: number }>; } { - const since = Date.now() - minutes * 60 * 1000; + const since = fromMs ?? (Date.now() - minutes * 60 * 1000); + const until = toMs ?? Date.now(); const rows = this.db.prepare( `SELECT tool_name, @@ -529,9 +541,9 @@ export class SqliteStore { success, strftime('%Y-%m-%d %H:%M', called_at/1000, 'unixepoch', 'localtime') as minute_key FROM tool_calls - WHERE called_at >= ? + WHERE called_at >= ? AND called_at <= ? ORDER BY called_at`, - ).all(since) as Array<{ tool_name: string; duration_ms: number; success: number; minute_key: string }>; + ).all(since, until) as Array<{ tool_name: string; duration_ms: number; success: number; minute_key: string }>; const toolSet = new Set(); const minuteMap = new Map>(); @@ -683,34 +695,27 @@ export class SqliteStore { shared_at INTEGER NOT NULL ); + CREATE TABLE IF NOT EXISTS local_shared_memories ( + chunk_id TEXT PRIMARY KEY REFERENCES chunks(id) ON DELETE CASCADE, + original_owner TEXT NOT NULL, + shared_at INTEGER NOT NULL + ); + CREATE TABLE IF NOT EXISTS hub_users ( - id TEXT PRIMARY KEY, - username TEXT NOT NULL UNIQUE, - device_name TEXT NOT NULL DEFAULT '', - role TEXT NOT NULL, - status TEXT NOT NULL, - token_hash TEXT NOT NULL DEFAULT '', - created_at INTEGER NOT NULL, - approved_at INTEGER + id TEXT PRIMARY KEY, + username TEXT NOT NULL UNIQUE, + device_name TEXT NOT NULL DEFAULT '', + role TEXT NOT NULL, + status TEXT NOT NULL, + token_hash TEXT NOT NULL DEFAULT '', + created_at INTEGER NOT NULL, + approved_at INTEGER, + last_ip TEXT NOT NULL DEFAULT '', + last_active_at INTEGER ); CREATE INDEX IF NOT EXISTS idx_hub_users_status ON hub_users(status); CREATE INDEX IF NOT EXISTS idx_hub_users_role ON hub_users(role); - CREATE TABLE IF NOT EXISTS hub_groups ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL UNIQUE, - description TEXT NOT NULL DEFAULT '', - created_at INTEGER NOT NULL - ); - - CREATE TABLE IF NOT EXISTS hub_group_members ( - group_id TEXT NOT NULL REFERENCES hub_groups(id) ON DELETE CASCADE, - user_id TEXT NOT NULL REFERENCES hub_users(id) ON DELETE CASCADE, - joined_at INTEGER NOT NULL, - PRIMARY KEY (group_id, user_id) - ); - CREATE INDEX IF NOT EXISTS idx_hub_group_members_user ON hub_group_members(user_id); - CREATE TABLE IF NOT EXISTS hub_tasks ( id TEXT PRIMARY KEY, source_task_id TEXT NOT NULL, @@ -872,6 +877,32 @@ export class SqliteStore { VALUES (new.rowid, new.summary, new.content); END; `); + + this.db.exec(` + CREATE TABLE IF NOT EXISTS hub_notifications ( + id TEXT PRIMARY KEY, + user_id TEXT NOT NULL, + type TEXT NOT NULL, + resource TEXT NOT NULL, + title TEXT NOT NULL, + message TEXT NOT NULL DEFAULT '', + read INTEGER NOT NULL DEFAULT 0, + created_at INTEGER NOT NULL + ); + CREATE INDEX IF NOT EXISTS idx_hub_notif_user ON hub_notifications(user_id, read, created_at DESC); + `); + + try { + const cols = this.db.prepare("PRAGMA table_info(hub_users)").all() as Array<{ name: string }>; + if (cols.length > 0 && !cols.some(c => c.name === "last_ip")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN last_ip TEXT NOT NULL DEFAULT ''"); + this.log.info("Migrated: added last_ip column to hub_users"); + } + if (cols.length > 0 && !cols.some(c => c.name === "last_active_at")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN last_active_at INTEGER"); + this.log.info("Migrated: added last_active_at column to hub_users"); + } + } catch { /* table may not exist yet */ } } // ─── Write ─── @@ -1213,6 +1244,8 @@ export class SqliteStore { "skill_embeddings", "skill_versions", "skills", + "local_shared_memories", + "local_shared_tasks", "embeddings", "chunks", "tasks", @@ -1684,6 +1717,67 @@ export class SqliteStore { return rows.map(r => ({ taskId: r.task_id, hubTaskId: r.hub_task_id, visibility: r.visibility, groupId: r.group_id, syncedChunks: r.synced_chunks })); } + // ─── Local Shared Memories (client-side tracking) ─── + + markMemorySharedLocally(chunkId: string): { ok: boolean; owner?: string; originalOwner?: string; sharedAt?: number; reason?: string } { + const chunk = this.getChunk(chunkId); + if (!chunk) return { ok: false, reason: "not_found" }; + if (chunk.owner === "public") { + const existing = this.getLocalSharedMemory(chunkId); + return { + ok: true, + owner: "public", + originalOwner: existing?.originalOwner ?? undefined, + sharedAt: existing?.sharedAt ?? undefined, + }; + } + + const sharedAt = Date.now(); + this.db.transaction(() => { + this.db.prepare(` + INSERT INTO local_shared_memories (chunk_id, original_owner, shared_at) + VALUES (?, ?, ?) + ON CONFLICT(chunk_id) DO UPDATE SET + original_owner = excluded.original_owner, + shared_at = excluded.shared_at + `).run(chunkId, chunk.owner, sharedAt); + this.updateChunk(chunkId, { owner: "public" }); + })(); + + return { ok: true, owner: "public", originalOwner: chunk.owner, sharedAt }; + } + + unmarkMemorySharedLocally(chunkId: string, fallbackOwner?: string): { ok: boolean; owner?: string; originalOwner?: string; reason?: string } { + const chunk = this.getChunk(chunkId); + if (!chunk) return { ok: false, reason: "not_found" }; + if (chunk.owner !== "public") { + return { ok: true, owner: chunk.owner }; + } + + const existing = this.getLocalSharedMemory(chunkId); + const restoreOwner = existing?.originalOwner ?? fallbackOwner; + if (!restoreOwner || restoreOwner === "public") { + return { ok: false, reason: "original_owner_missing" }; + } + + this.db.transaction(() => { + this.updateChunk(chunkId, { owner: restoreOwner }); + this.db.prepare("DELETE FROM local_shared_memories WHERE chunk_id = ?").run(chunkId); + })(); + + return { ok: true, owner: restoreOwner, originalOwner: restoreOwner }; + } + + getLocalSharedMemory(chunkId: string): { chunkId: string; originalOwner: string; sharedAt: number } | null { + const row = this.db.prepare("SELECT chunk_id, original_owner, shared_at FROM local_shared_memories WHERE chunk_id = ?").get(chunkId) as any; + if (!row) return null; + return { + chunkId: row.chunk_id, + originalOwner: row.original_owner, + sharedAt: row.shared_at, + }; + } + // ─── Hub Users / Groups ─── upsertHubUser(user: HubUserRecord): void { @@ -1704,74 +1798,39 @@ export class SqliteStore { getHubUser(userId: string): HubUserRecord | null { const row = this.db.prepare('SELECT * FROM hub_users WHERE id = ?').get(userId) as HubUserRow | undefined; if (!row) return null; - return this.attachGroupsToHubUser(rowToHubUser(row)); + return rowToHubUser(row); } listHubUsers(status?: UserStatus): HubUserRecord[] { const rows = status ? this.db.prepare('SELECT * FROM hub_users WHERE status = ? ORDER BY created_at').all(status) as HubUserRow[] : this.db.prepare('SELECT * FROM hub_users ORDER BY created_at').all() as HubUserRow[]; - return rows.map((row) => this.attachGroupsToHubUser(rowToHubUser(row))); - } - - upsertHubGroup(group: HubGroupRecord): void { - this.db.prepare(` - INSERT INTO hub_groups (id, name, description, created_at) - VALUES (?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - name = excluded.name, - description = excluded.description, - created_at = excluded.created_at - `).run(group.id, group.name, group.description, group.createdAt); - } - - listHubGroups(): HubGroupRecord[] { - const rows = this.db.prepare('SELECT * FROM hub_groups ORDER BY name').all() as HubGroupRow[]; - return rows.map(rowToHubGroup); - } - - addHubGroupMember(groupId: string, userId: string, joinedAt = Date.now()): void { - this.db.prepare(` - INSERT INTO hub_group_members (group_id, user_id, joined_at) - VALUES (?, ?, ?) - ON CONFLICT(group_id, user_id) DO UPDATE SET joined_at = excluded.joined_at - `).run(groupId, userId, joinedAt); + return rows.map(rowToHubUser); } - getHubGroupById(groupId: string): HubGroupRecord | undefined { - const row = this.db.prepare('SELECT * FROM hub_groups WHERE id = ?').get(groupId) as HubGroupRow | undefined; - return row ? rowToHubGroup(row) : undefined; - } - - deleteHubGroup(groupId: string): boolean { - const result = this.db.prepare('DELETE FROM hub_groups WHERE id = ?').run(groupId); + deleteHubUser(userId: string, cleanResources = false): boolean { + if (cleanResources) { + this.db.prepare('DELETE FROM hub_tasks WHERE source_user_id = ?').run(userId); + this.db.prepare('DELETE FROM hub_skills WHERE source_user_id = ?').run(userId); + this.db.prepare('DELETE FROM hub_memories WHERE source_user_id = ?').run(userId); + } + const result = this.db.prepare('DELETE FROM hub_users WHERE id = ?').run(userId); return result.changes > 0; } - listHubGroupMembers(groupId: string): Array<{ userId: string; username: string; joinedAt: number }> { - const rows = this.db.prepare(` - SELECT gm.user_id, hu.username, gm.joined_at - FROM hub_group_members gm - JOIN hub_users hu ON hu.id = gm.user_id - WHERE gm.group_id = ? - ORDER BY gm.joined_at - `).all(groupId) as Array<{ user_id: string; username: string; joined_at: number }>; - return rows.map(r => ({ userId: r.user_id, username: r.username, joinedAt: r.joined_at })); - } - - removeHubGroupMember(groupId: string, userId: string): void { - this.db.prepare('DELETE FROM hub_group_members WHERE group_id = ? AND user_id = ?').run(groupId, userId); + updateHubUserActivity(userId: string, ip: string, timestamp?: number): void { + this.db.prepare('UPDATE hub_users SET last_ip = ?, last_active_at = ? WHERE id = ?').run(ip, timestamp ?? Date.now(), userId); } - getGroupsForHubUser(userId: string): GroupInfo[] { - const rows = this.db.prepare(` - SELECT g.* - FROM hub_group_members gm - JOIN hub_groups g ON g.id = gm.group_id - WHERE gm.user_id = ? - ORDER BY g.name - `).all(userId) as HubGroupRow[]; - return rows.map((row) => ({ id: row.id, name: row.name, description: row.description || undefined })); + getHubUserContributions(): Record { + const result: Record = {}; + const memRows = this.db.prepare('SELECT source_user_id, COUNT(*) as cnt FROM hub_memories GROUP BY source_user_id').all() as Array<{ source_user_id: string; cnt: number }>; + const taskRows = this.db.prepare('SELECT source_user_id, COUNT(*) as cnt FROM hub_tasks GROUP BY source_user_id').all() as Array<{ source_user_id: string; cnt: number }>; + const skillRows = this.db.prepare('SELECT source_user_id, COUNT(*) as cnt FROM hub_skills GROUP BY source_user_id').all() as Array<{ source_user_id: string; cnt: number }>; + for (const r of memRows) { if (!result[r.source_user_id]) result[r.source_user_id] = { memoryCount: 0, taskCount: 0, skillCount: 0 }; result[r.source_user_id].memoryCount = r.cnt; } + for (const r of taskRows) { if (!result[r.source_user_id]) result[r.source_user_id] = { memoryCount: 0, taskCount: 0, skillCount: 0 }; result[r.source_user_id].taskCount = r.cnt; } + for (const r of skillRows) { if (!result[r.source_user_id]) result[r.source_user_id] = { memoryCount: 0, taskCount: 0, skillCount: 0 }; result[r.source_user_id].skillCount = r.cnt; } + return result; } // ─── Hub Shared Data ─── @@ -1795,6 +1854,11 @@ export class SqliteStore { return row ? rowToHubTask(row) : null; } + getHubTaskById(taskId: string): HubTaskRecord | null { + const row = this.db.prepare('SELECT * FROM hub_tasks WHERE id = ?').get(taskId) as HubTaskRow | undefined; + return row ? rowToHubTask(row) : null; + } + upsertHubChunk(chunk: HubChunkUpsertInput): void { if (!chunk.sourceTaskId) throw new Error("sourceTaskId is required for hub chunk upserts"); const taskId = this.resolveCanonicalHubTaskId(chunk.hubTaskId, chunk.sourceUserId, chunk.sourceTaskId); @@ -1874,24 +1938,16 @@ export class SqliteStore { const limit = options?.maxResults ?? 10; const userId = options?.userId ?? ""; const rows = this.db.prepare(` - SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, hg.name as group_name, hu.username as owner_name, + SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, '' as group_name, hu.username as owner_name, bm25(hub_chunks_fts) as rank FROM hub_chunks_fts f JOIN hub_chunks hc ON hc.rowid = f.rowid JOIN hub_tasks ht ON ht.id = hc.hub_task_id - LEFT JOIN hub_groups hg ON hg.id = ht.group_id LEFT JOIN hub_users hu ON hu.id = ht.source_user_id WHERE hub_chunks_fts MATCH ? - AND ( - ht.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = ht.group_id AND gm.user_id = ? - ) - ) ORDER BY rank LIMIT ? - `).all(sanitizeFtsQuery(query), userId, limit) as HubSearchRow[]; + `).all(sanitizeFtsQuery(query), limit) as HubSearchRow[]; return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); } @@ -1916,12 +1972,7 @@ export class SqliteStore { FROM hub_embeddings he JOIN hub_chunks hc ON hc.id = he.chunk_id JOIN hub_tasks ht ON ht.id = hc.hub_task_id - WHERE ht.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = ht.group_id AND gm.user_id = ? - ) - `).all(userId) as Array<{ chunk_id: string; vector: Buffer; dimensions: number }>; + `).all() as Array<{ chunk_id: string; vector: Buffer; dimensions: number }>; return rows.map(r => ({ chunkId: r.chunk_id, vector: new Float32Array(r.vector.buffer, r.vector.byteOffset, r.dimensions), @@ -1930,22 +1981,14 @@ export class SqliteStore { getVisibleHubSearchHitByChunkId(chunkId: string, userId: string): HubSearchRow | null { const row = this.db.prepare(` - SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, hg.name as group_name, hu.username as owner_name, + SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, '' as group_name, hu.username as owner_name, 0 as rank FROM hub_chunks hc JOIN hub_tasks ht ON ht.id = hc.hub_task_id - LEFT JOIN hub_groups hg ON hg.id = ht.group_id LEFT JOIN hub_users hu ON hu.id = ht.source_user_id WHERE hc.id = ? - AND ( - ht.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = ht.group_id AND gm.user_id = ? - ) - ) LIMIT 1 - `).get(chunkId, userId) as HubSearchRow | undefined; + `).get(chunkId) as HubSearchRow | undefined; return row ?? null; } @@ -1961,38 +2004,24 @@ export class SqliteStore { let rows: HubSkillSearchRow[]; if (sanitized) { rows = this.db.prepare(` - SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, hg.name AS group_name, hu.username AS owner_name, hs.quality_score, + SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, '' AS group_name, hu.username AS owner_name, hs.quality_score, bm25(hub_skills_fts) as rank FROM hub_skills_fts f JOIN hub_skills hs ON hs.rowid = f.rowid - LEFT JOIN hub_groups hg ON hg.id = hs.group_id LEFT JOIN hub_users hu ON hu.id = hs.source_user_id WHERE hub_skills_fts MATCH ? - AND ( - hs.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = hs.group_id AND gm.user_id = ? - ) - ) ORDER BY rank LIMIT ? - `).all(sanitized, userId, limit) as HubSkillSearchRow[]; + `).all(sanitized, limit) as HubSkillSearchRow[]; } else { rows = this.db.prepare(` - SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, hg.name AS group_name, hu.username AS owner_name, hs.quality_score, + SELECT hs.id, hs.name, hs.description, hs.version, hs.visibility, '' AS group_name, hu.username AS owner_name, hs.quality_score, 0 as rank FROM hub_skills hs - LEFT JOIN hub_groups hg ON hg.id = hs.group_id LEFT JOIN hub_users hu ON hu.id = hs.source_user_id - WHERE hs.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = hs.group_id AND gm.user_id = ? - ) ORDER BY hs.updated_at DESC LIMIT ? - `).all(userId, limit) as HubSkillSearchRow[]; + `).all(limit) as HubSkillSearchRow[]; } return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); } @@ -2003,19 +2032,13 @@ export class SqliteStore { listVisibleHubTasks(userId: string, limit = 40): Array<{ id: string; sourceTaskId: string; sourceUserId: string; title: string; summary: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; chunkCount: number; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT t.*, u.username AS owner_name, g.name AS group_name, + SELECT t.*, u.username AS owner_name, NULL AS group_name, (SELECT COUNT(*) FROM hub_chunks c WHERE c.hub_task_id = t.id) AS chunk_count FROM hub_tasks t LEFT JOIN hub_users u ON u.id = t.source_user_id - LEFT JOIN hub_groups g ON g.id = t.group_id - WHERE t.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = t.group_id AND gm.user_id = ? - ) ORDER BY t.updated_at DESC LIMIT ? - `).all(userId, limit) as any[]; + `).all(limit) as any[]; return rows.map(r => ({ id: r.id, sourceTaskId: r.source_task_id, sourceUserId: r.source_user_id, title: r.title, summary: r.summary, groupId: r.group_id, groupName: r.group_name ?? null, @@ -2026,21 +2049,25 @@ export class SqliteStore { listAllHubTasks(): Array<{ id: string; sourceTaskId: string; sourceUserId: string; title: string; summary: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; chunkCount: number; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT t.*, u.username AS owner_name, g.name AS group_name, + SELECT t.*, u.username AS owner_name, (SELECT COUNT(*) FROM hub_chunks c WHERE c.hub_task_id = t.id) AS chunk_count FROM hub_tasks t LEFT JOIN hub_users u ON u.id = t.source_user_id - LEFT JOIN hub_groups g ON g.id = t.group_id ORDER BY t.updated_at DESC `).all() as any[]; return rows.map(r => ({ id: r.id, sourceTaskId: r.source_task_id, sourceUserId: r.source_user_id, - title: r.title, summary: r.summary, groupId: r.group_id, groupName: r.group_name ?? null, + title: r.title, summary: r.summary, groupId: r.group_id, groupName: null as string | null, visibility: r.visibility, ownerName: r.owner_name ?? "unknown", chunkCount: r.chunk_count ?? 0, createdAt: r.created_at, updatedAt: r.updated_at, })); } + listHubChunksByTaskId(hubTaskId: string): HubChunkRecord[] { + const rows = this.db.prepare('SELECT * FROM hub_chunks WHERE hub_task_id = ? ORDER BY created_at ASC').all(hubTaskId) as HubChunkRow[]; + return rows.map(rowToHubChunk); + } + deleteHubTaskById(taskId: string): boolean { const info = this.db.prepare('DELETE FROM hub_tasks WHERE id = ?').run(taskId); return info.changes > 0; @@ -2048,18 +2075,12 @@ export class SqliteStore { listVisibleHubSkills(userId: string, limit = 40): Array<{ id: string; sourceSkillId: string; sourceUserId: string; name: string; description: string; version: number; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; qualityScore: number | null; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT s.*, u.username AS owner_name, g.name AS group_name + SELECT s.*, u.username AS owner_name, NULL AS group_name FROM hub_skills s LEFT JOIN hub_users u ON u.id = s.source_user_id - LEFT JOIN hub_groups g ON g.id = s.group_id - WHERE s.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = s.group_id AND gm.user_id = ? - ) ORDER BY s.updated_at DESC LIMIT ? - `).all(userId, limit) as any[]; + `).all(limit) as any[]; return rows.map(r => ({ id: r.id, sourceSkillId: r.source_skill_id, sourceUserId: r.source_user_id, name: r.name, description: r.description, version: r.version, @@ -2071,16 +2092,15 @@ export class SqliteStore { listAllHubSkills(): Array<{ id: string; sourceSkillId: string; sourceUserId: string; name: string; description: string; version: number; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; qualityScore: number | null; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT s.*, u.username AS owner_name, g.name AS group_name + SELECT s.*, u.username AS owner_name FROM hub_skills s LEFT JOIN hub_users u ON u.id = s.source_user_id - LEFT JOIN hub_groups g ON g.id = s.group_id ORDER BY s.updated_at DESC `).all() as any[]; return rows.map(r => ({ id: r.id, sourceSkillId: r.source_skill_id, sourceUserId: r.source_user_id, name: r.name, description: r.description, version: r.version, - groupId: r.group_id, groupName: r.group_name ?? null, visibility: r.visibility, + groupId: r.group_id, groupName: null as string | null, visibility: r.visibility, ownerName: r.owner_name ?? "unknown", qualityScore: r.quality_score, createdAt: r.created_at, updatedAt: r.updated_at, })); @@ -2128,6 +2148,47 @@ export class SqliteStore { return info.changes > 0; } + // ─── Hub Notifications ─── + + insertHubNotification(n: { id: string; userId: string; type: string; resource: string; title: string; message?: string }): void { + this.db.prepare( + 'INSERT INTO hub_notifications (id, user_id, type, resource, title, message, read, created_at) VALUES (?, ?, ?, ?, ?, ?, 0, ?)' + ).run(n.id, n.userId, n.type, n.resource, n.title, n.message ?? '', Date.now()); + } + + hasRecentHubNotification(userId: string, type: string, resource: string, windowMs: number = 300_000): boolean { + const since = Date.now() - windowMs; + const row = this.db.prepare( + 'SELECT COUNT(*) AS cnt FROM hub_notifications WHERE user_id = ? AND type = ? AND resource = ? AND created_at > ?' + ).get(userId, type, resource, since) as { cnt: number }; + return row.cnt > 0; + } + + listHubNotifications(userId: string, opts?: { unreadOnly?: boolean; limit?: number }): Array<{ id: string; userId: string; type: string; resource: string; title: string; message: string; read: boolean; createdAt: number }> { + const where = opts?.unreadOnly ? 'WHERE user_id = ? AND read = 0' : 'WHERE user_id = ?'; + const limit = opts?.limit ?? 50; + const rows = this.db.prepare(`SELECT * FROM hub_notifications ${where} ORDER BY created_at DESC LIMIT ?`).all(userId, limit) as any[]; + return rows.map(r => ({ id: r.id, userId: r.user_id, type: r.type, resource: r.resource, title: r.title, message: r.message, read: !!r.read, createdAt: r.created_at })); + } + + countUnreadHubNotifications(userId: string): number { + const row = this.db.prepare('SELECT COUNT(*) AS cnt FROM hub_notifications WHERE user_id = ? AND read = 0').get(userId) as { cnt: number }; + return row.cnt; + } + + markHubNotificationsRead(userId: string, ids?: string[]): void { + if (ids && ids.length > 0) { + const placeholders = ids.map(() => '?').join(','); + this.db.prepare(`UPDATE hub_notifications SET read = 1 WHERE user_id = ? AND id IN (${placeholders})`).run(userId, ...ids); + } else { + this.db.prepare('UPDATE hub_notifications SET read = 1 WHERE user_id = ?').run(userId); + } + } + + clearHubNotifications(userId: string): void { + this.db.prepare('DELETE FROM hub_notifications WHERE user_id = ?').run(userId); + } + upsertHubMemoryEmbedding(memoryId: string, vector: Float32Array): void { const buf = Buffer.from(vector.buffer, vector.byteOffset, vector.byteLength); this.db.prepare(` @@ -2149,23 +2210,15 @@ export class SqliteStore { const sanitized = sanitizeFtsQuery(query); if (!sanitized) return []; const rows = this.db.prepare(` - SELECT hm.id, hm.content, hm.summary, hm.role, hm.created_at, hm.visibility, hg.name as group_name, hu.username as owner_name, + SELECT hm.id, hm.content, hm.summary, hm.role, hm.created_at, hm.visibility, '' as group_name, hu.username as owner_name, bm25(hub_memories_fts) as rank FROM hub_memories_fts f JOIN hub_memories hm ON hm.rowid = f.rowid - LEFT JOIN hub_groups hg ON hg.id = hm.group_id LEFT JOIN hub_users hu ON hu.id = hm.source_user_id WHERE hub_memories_fts MATCH ? - AND ( - hm.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = hm.group_id AND gm.user_id = ? - ) - ) ORDER BY rank LIMIT ? - `).all(sanitized, userId, limit) as HubMemorySearchRow[]; + `).all(sanitized, limit) as HubMemorySearchRow[]; return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); } @@ -2174,12 +2227,7 @@ export class SqliteStore { SELECT hme.memory_id, hme.vector, hme.dimensions FROM hub_memory_embeddings hme JOIN hub_memories hm ON hm.id = hme.memory_id - WHERE hm.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = hm.group_id AND gm.user_id = ? - ) - `).all(userId) as Array<{ memory_id: string; vector: Buffer; dimensions: number }>; + `).all() as Array<{ memory_id: string; vector: Buffer; dimensions: number }>; return rows.map(r => ({ memoryId: r.memory_id, vector: new Float32Array(r.vector.buffer, r.vector.byteOffset, r.dimensions), @@ -2188,58 +2236,43 @@ export class SqliteStore { getVisibleHubSearchHitByMemoryId(memoryId: string, userId: string): HubMemorySearchRow | null { const row = this.db.prepare(` - SELECT hm.id, hm.content, hm.summary, hm.role, hm.created_at, hm.visibility, hg.name as group_name, hu.username as owner_name, + SELECT hm.id, hm.content, hm.summary, hm.role, hm.created_at, hm.visibility, '' as group_name, hu.username as owner_name, 0 as rank FROM hub_memories hm - LEFT JOIN hub_groups hg ON hg.id = hm.group_id LEFT JOIN hub_users hu ON hu.id = hm.source_user_id WHERE hm.id = ? - AND ( - hm.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = hm.group_id AND gm.user_id = ? - ) - ) LIMIT 1 - `).get(memoryId, userId) as HubMemorySearchRow | undefined; + `).get(memoryId) as HubMemorySearchRow | undefined; return row ?? null; } - listVisibleHubMemories(userId: string, limit = 40): Array<{ id: string; sourceChunkId: string; sourceUserId: string; role: string; summary: string; kind: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; createdAt: number; updatedAt: number }> { + listVisibleHubMemories(userId: string, limit = 40): Array<{ id: string; sourceChunkId: string; sourceUserId: string; role: string; content: string; summary: string; kind: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT m.*, u.username AS owner_name, g.name AS group_name + SELECT m.*, u.username AS owner_name, NULL AS group_name FROM hub_memories m LEFT JOIN hub_users u ON u.id = m.source_user_id - LEFT JOIN hub_groups g ON g.id = m.group_id - WHERE m.visibility = 'public' - OR EXISTS ( - SELECT 1 FROM hub_group_members gm - WHERE gm.group_id = m.group_id AND gm.user_id = ? - ) ORDER BY m.updated_at DESC LIMIT ? - `).all(userId, limit) as any[]; + `).all(limit) as any[]; return rows.map(r => ({ id: r.id, sourceChunkId: r.source_chunk_id, sourceUserId: r.source_user_id, - role: r.role, summary: r.summary, kind: r.kind, + role: r.role, content: r.content ?? "", summary: r.summary, kind: r.kind, groupId: r.group_id, groupName: r.group_name ?? null, visibility: r.visibility, ownerName: r.owner_name ?? "unknown", createdAt: r.created_at, updatedAt: r.updated_at, })); } - listAllHubMemories(): Array<{ id: string; sourceChunkId: string; sourceUserId: string; role: string; summary: string; kind: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; createdAt: number; updatedAt: number }> { + listAllHubMemories(): Array<{ id: string; sourceChunkId: string; sourceUserId: string; role: string; content: string; summary: string; kind: string; groupId: string | null; groupName: string | null; visibility: string; ownerName: string; createdAt: number; updatedAt: number }> { const rows = this.db.prepare(` - SELECT m.*, u.username AS owner_name, g.name AS group_name + SELECT m.*, u.username AS owner_name FROM hub_memories m LEFT JOIN hub_users u ON u.id = m.source_user_id - LEFT JOIN hub_groups g ON g.id = m.group_id ORDER BY m.updated_at DESC `).all() as any[]; return rows.map(r => ({ id: r.id, sourceChunkId: r.source_chunk_id, sourceUserId: r.source_user_id, - role: r.role, summary: r.summary, kind: r.kind, - groupId: r.group_id, groupName: r.group_name ?? null, visibility: r.visibility, + role: r.role, content: r.content ?? "", summary: r.summary, kind: r.kind, + groupId: r.group_id, groupName: null as string | null, visibility: r.visibility, ownerName: r.owner_name ?? "unknown", createdAt: r.created_at, updatedAt: r.updated_at, })); } @@ -2264,13 +2297,6 @@ export class SqliteStore { throw new Error(`source skill not found for skillId=${skillId}`); } - private attachGroupsToHubUser(user: HubUserRecord): HubUserRecord { - return { - ...user, - groups: this.getGroupsForHubUser(user.id), - }; - } - getSessionOwnerMap(sessionKeys: string[]): Map { const result = new Map(); if (sessionKeys.length === 0) return result; @@ -2482,6 +2508,8 @@ interface HubUserRecord extends UserInfo { tokenHash: string; createdAt: number; approvedAt: number | null; + lastIp: string; + lastActiveAt: number | null; } interface HubUserRow { @@ -2493,6 +2521,8 @@ interface HubUserRow { token_hash: string; created_at: number; approved_at: number | null; + last_ip: string; + last_active_at: number | null; } function rowToHubUser(row: HubUserRow): HubUserRecord { @@ -2506,29 +2536,8 @@ function rowToHubUser(row: HubUserRow): HubUserRecord { tokenHash: row.token_hash, createdAt: row.created_at, approvedAt: row.approved_at, - }; -} - -interface HubGroupRecord { - id: string; - name: string; - description: string; - createdAt: number; -} - -interface HubGroupRow { - id: string; - name: string; - description: string; - created_at: number; -} - -function rowToHubGroup(row: HubGroupRow): HubGroupRecord { - return { - id: row.id, - name: row.name, - description: row.description, - createdAt: row.created_at, + lastIp: row.last_ip || "", + lastActiveAt: row.last_active_at ?? null, }; } diff --git a/apps/memos-local-openclaw/src/tools/memory-search.ts b/apps/memos-local-openclaw/src/tools/memory-search.ts index 878808453..43cad5bc8 100644 --- a/apps/memos-local-openclaw/src/tools/memory-search.ts +++ b/apps/memos-local-openclaw/src/tools/memory-search.ts @@ -24,7 +24,7 @@ function emptyHubResult(scope: HubScope): HubSearchResult { }; } -export function createMemorySearchTool(engine: RecallEngine, store?: SqliteStore, ctx?: PluginContext): ToolDefinition { +export function createMemorySearchTool(engine: RecallEngine, store?: SqliteStore, ctx?: PluginContext, sharedState?: { lastSearchTime: number }): ToolDefinition { return { name: "memory_search", description: @@ -60,6 +60,7 @@ export function createMemorySearchTool(engine: RecallEngine, store?: SqliteStore }, }, handler: async (input) => { + if (sharedState) sharedState.lastSearchTime = Date.now(); const query = (input.query as string) ?? ""; const maxResults = input.maxResults as number | undefined; const minScore = input.minScore as number | undefined; diff --git a/apps/memos-local-openclaw/src/types.ts b/apps/memos-local-openclaw/src/types.ts index c246e07cb..26901ceb6 100644 --- a/apps/memos-local-openclaw/src/types.ts +++ b/apps/memos-local-openclaw/src/types.ts @@ -275,6 +275,7 @@ export interface ClientModeConfig { hubAddress?: string; userToken?: string; teamToken?: string; + nickname?: string; pendingUserId?: string; } diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 673b3969a..82d360c0b 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -5,12 +5,14 @@ return ` -OpenClaw Memory - Powered by MemOS + +MemOS 记忆 @@ -770,7 +1108,9 @@ input,textarea,select{font-family:inherit;font-size:inherit}
- +

OpenClaw Memory

Powered by MemOS

Set a password to protect your memories

@@ -788,7 +1128,9 @@ input,textarea,select{font-family:inherit;font-size:inherit}
- +

OpenClaw Memory

Powered by MemOS

Enter your password to access memories

@@ -839,9 +1181,10 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
-
- OpenClaw Memory${vBadge} + +
MemOSPowered by MemOS
${vBadge}
- +
+ +
+
\u{1F514} Notifications
+
No notifications
+
+
+
-
-
-
Sessions
-
- + +
+ +
-
+
+
@@ -903,9 +1261,8 @@ input,textarea,select{font-family:inherit;font-size:inherit} - +
@@ -917,7 +1274,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
+
-Total Tasks
@@ -931,11 +1288,10 @@ input,textarea,select{font-family:inherit;font-size:inherit} -
@@ -969,14 +1325,14 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
+
@@ -999,13 +1355,12 @@ input,textarea,select{font-family:inherit;font-size:inherit} -
-
-
Hub Skills
-
No hub skills loaded.
+
@@ -1013,6 +1368,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}

+ @@ -1020,7 +1376,6 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
Skill Files
SKILL.md Content
@@ -1032,7 +1387,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
+
Range @@ -1054,10 +1409,18 @@ input,textarea,select{font-family:inherit;font-size:inherit}

\u26A1 Tool Response Time (per minute avg)

-
+
+ + + + | + + + +
@@ -1068,7 +1431,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
+
- - - -
-
- - -
-
- - -
-
+
+
+ + +
-
-

\u{1F4E1} Embedding Model

-
-
- - -
-
- - -
-
- - -
-
- - +
+ + +
+
+
\u{1F9E0}
+
+
AI Models
+
Configure embedding, summarizer and skill evolution models
+
+
+ +
\u{1F4E1} Embedding Model
+
Vector embedding model for memory search and retrieval
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
+ +
-
- - + +
+ + +
\u{1F4DD} Summarizer Model
+
LLM for memory summarization, deduplication and analysis
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
+ +
-
-
-

\u{1F9E0} Summarizer Model

-
-
- - -
-
- - -
-
- - -
-
- - -
-
- - +
+ + +
\u{1F527} Skill Evolution
+
Auto-extract reusable skills from conversation patterns
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
+
Skill Dedicated Model
+
If not configured, the main Summarizer Model above will be used for skill generation. Configure a dedicated model here for higher quality skill output.
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
+ + +
+
+ +
+ \u2713 Saved + + +
+
Some changes require restarting the OpenClaw gateway to take effect.
-
- - -
-
-
-
-

\u{1F527} Skill Evolution

-
-
- - -
-
- - -
-
- - -
-
- - + +
+
+
\u{1F310}
+
+
Team Sharing
+
Share memories, tasks and skills with your team
+
-
-
-

Skill Dedicated Model

-
If not configured, the main Summarizer Model above will be used for skill generation. Configure a dedicated model here for higher quality skill output.
-
-
- - +
+ +
+ +
\u{1F680} Get Started with Team Collaboration
+
MemOS supports team memory sharing. Choose one of the following options to enable collaboration, or continue using local-only mode.
+
+
+
+
\u{1F310}
+
Join a Remote Team
+
+
Your team already has a server running? Join it to share memories, tasks and skills with team members.
+
    +
  1. Ask your team admin for the Server Address and Team Token
  2. +
  3. Enable sharing above, select "Client" mode
  4. +
  5. Fill in Server Address and Team Token, click "Test Connection"
  6. +
  7. Save settings and restart the OpenClaw gateway (page refreshes automatically)
  8. +
+ +
+
+
+
\u{1F5A5}\uFE0F
+
Start Your Own Team Server
+
+
Be the team server. Run it on this device so others can connect and share memories with you.
+
    +
  1. Enable sharing above, select "Server" mode
  2. +
  3. Set a team name, save settings, and restart the gateway (page refreshes automatically)
  4. +
  5. Share the Server Address and Team Token with your team members
  6. +
  7. Approve join requests in the Admin Panel
  8. +
+ +
+
-
- - + +
Enable to share memories, tasks and skills with your team. When disabled, all features work normally in local-only mode.
+
+ + +
+ + -
- - + + -
- - + +
+ \u2713 Saved + +
-
-
- - +
Some changes require restarting the OpenClaw gateway to take effect.
-
-
-

\u{1F4CA} Telemetry

-
-
- - -
-
-
Anonymous usage analytics to help improve the plugin. Only sends tool names, latencies, and version info. No memory content, queries, or personal data is ever sent.
+ +
+
+
\u2699\uFE0F
+
+
General
+
System status, ports and telemetry
+
-
-
+
+
\u{1F4CA} Model Health
+
+
Loading model status...
+
+
+
+
+ + +
Requires restart to take effect
+
+
+
+
+ + +
+
Anonymous usage analytics to help improve the plugin. Only sends tool names, latencies, and version info. No memory content, queries, or personal data is ever sent.
-
-

\u{1F4BE} General

-
-
- - -
Requires restart to take effect
+
+ \u2713 Saved + + +
-
-
-

Hub & Team

-
-

\u{1F517} Hub Connection

-
Loading...
-
-
-

\u{1F465} Team & Groups

-
Loading...
-
-
-

\u{1F6E1} Admin Pending Users

-
Loading...
-
-
- \u2713 Saved - - -
-
Some changes require restarting the OpenClaw gateway to take effect.
+
-
-
-

\u{1F6E1} Hub Admin Panel

- +
+ +
+
+
+

\u{26A1} Team Admin Panel

+
+
Manage team members and shared resources
+
-
-
- - - - - +
+ + + +
-
-
+
+
-
+

\u{1F4E5} Import OpenClaw Memory

Migrate your existing OpenClaw built-in memories and conversation history into this plugin. The import process uses smart deduplication to avoid duplicates.

@@ -1506,6 +2002,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
+
@@ -1528,12 +2025,14 @@ input,textarea,select{font-family:inherit;font-size:inherit} diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index b51d3bbbe..c1717992c 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -2577,6 +2577,15 @@ export class ViewerServer { const newRole = merged.role as string | undefined; const newEnabled = Boolean(merged.enabled); + // Detect disabling sharing or switching away from hub mode + const wasHub = oldSharingEnabled && oldSharingRole === "hub"; + const isHub = newEnabled && newRole === "hub"; + if (wasHub && !isHub) { + await this.notifyHubShutdown(); + this.stopHubHeartbeat(); + this.log.info("Hub shutting down: notified connected clients"); + } + // Detect disabling sharing or switching away from client mode const wasClient = oldSharingEnabled && oldSharingRole === "client"; const isClient = newEnabled && newRole === "client"; @@ -2670,6 +2679,41 @@ export class ViewerServer { } } + private async notifyHubShutdown(): Promise { + try { + const sharing = this.ctx?.config.sharing; + if (!sharing || sharing.role !== "hub") return; + const hubPort = sharing.hub?.port ?? 18800; + const authPath = path.join(this.dataDir, "hub-auth.json"); + let adminToken: string | undefined; + try { + const authData = JSON.parse(fs.readFileSync(authPath, "utf8")); + adminToken = authData?.bootstrapAdminToken; + } catch { return; } + if (!adminToken) return; + + const users = this.store.listHubUsers("active"); + const { v4: uuidv4 } = require("uuid"); + for (const u of users) { + try { + this.store.insertHubNotification({ + id: uuidv4(), + userId: u.id, + type: "hub_shutdown", + resource: "hub", + title: "Hub is shutting down", + message: "The Hub server is shutting down. You may be disconnected.", + }); + } catch (e) { + this.log.warn(`Failed to insert shutdown notification for user ${u.id}: ${e}`); + } + } + this.log.info(`Hub shutdown: notified ${users.length} approved user(s)`); + } catch (e) { + this.log.warn(`notifyHubShutdown error: ${e}`); + } + } + private handleUpdateUsername(req: http.IncomingMessage, res: http.ServerResponse): void { this.readBody(req, async (body) => { if (!this.ctx) return this.jsonResponse(res, { error: "sharing_unavailable" }); From 93e6ac37b064d78c3dea1eb0e1fb5ed7ab780c28 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Fri, 20 Mar 2026 03:13:53 +0800 Subject: [PATCH 58/85] fix(memos-local): shared memories disappearing from list & telemetry credentials externalization - Fix owner filter in serveMemories/serveStats: shared memories (owner='public') were excluded because session_key LIKE 'agent:main:%' never matched. Changed to simply include all public memories alongside agent-owned ones. - Add toast notifications for retry-connection in all states (loading/fail/error) so sidebar clicks also get user feedback. - Remove hardcoded ARMS telemetry credentials from source code. Credentials are now loaded at runtime from telemetry.credentials.json (generated by CI from GitHub Secrets) or MEMOS_ARMS_* env vars. If neither is available, telemetry is silently disabled. Made-with: Cursor --- .github/workflows/openclaw-plugin-publish.yml | 7 ++++ apps/memos-local-openclaw/.env.example | 7 ++++ apps/memos-local-openclaw/.gitignore | 3 ++ apps/memos-local-openclaw/package.json | 1 + .../generate-telemetry-credentials.cjs | 31 ++++++++++++++++ apps/memos-local-openclaw/src/telemetry.ts | 36 ++++++++++++++----- apps/memos-local-openclaw/src/viewer/html.ts | 3 ++ .../memos-local-openclaw/src/viewer/server.ts | 10 +++--- 8 files changed, 83 insertions(+), 15 deletions(-) create mode 100644 apps/memos-local-openclaw/scripts/generate-telemetry-credentials.cjs diff --git a/.github/workflows/openclaw-plugin-publish.yml b/.github/workflows/openclaw-plugin-publish.yml index 7c7ac971c..16820439d 100644 --- a/.github/workflows/openclaw-plugin-publish.yml +++ b/.github/workflows/openclaw-plugin-publish.yml @@ -87,6 +87,13 @@ jobs: - name: Install dependencies (skip native build) run: npm install --ignore-scripts + - name: Generate telemetry credentials + run: node scripts/generate-telemetry-credentials.cjs + env: + MEMOS_ARMS_ENDPOINT: ${{ secrets.MEMOS_ARMS_ENDPOINT }} + MEMOS_ARMS_PID: ${{ secrets.MEMOS_ARMS_PID }} + MEMOS_ARMS_ENV: ${{ secrets.MEMOS_ARMS_ENV }} + - name: Bump version run: npm version ${{ inputs.version }} --no-git-tag-version diff --git a/apps/memos-local-openclaw/.env.example b/apps/memos-local-openclaw/.env.example index 453efc02e..bfb409298 100644 --- a/apps/memos-local-openclaw/.env.example +++ b/apps/memos-local-openclaw/.env.example @@ -23,3 +23,10 @@ SUMMARIZER_TEMPERATURE=0 # No memory content, queries, or personal data is ever sent — only tool names, latencies, and version info. # Enabled by default. Set to false to opt-out. # TELEMETRY_ENABLED=false +# +# Telemetry backend credentials (for maintainers / CI only). +# End users do NOT need to set these — they are bundled into the npm package at publish time. +# If not set and telemetry.credentials.json is absent, telemetry is silently disabled. +# MEMOS_ARMS_ENDPOINT=https://your-arms-endpoint.log.aliyuncs.com/rum/web/v2?workspace=...&service_id=... +# MEMOS_ARMS_PID=your-arms-pid +# MEMOS_ARMS_ENV=prod diff --git a/apps/memos-local-openclaw/.gitignore b/apps/memos-local-openclaw/.gitignore index dfc119bee..2fe5cd4d5 100644 --- a/apps/memos-local-openclaw/.gitignore +++ b/apps/memos-local-openclaw/.gitignore @@ -25,6 +25,9 @@ ppt/ # Prebuilt native binaries (included in npm package via `files`, not in git) prebuilds/ +# Telemetry credentials (generated by CI, not committed to git) +telemetry.credentials.json + # Database files *.sqlite *.sqlite-journal diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 6ee155db3..ee42f2f1f 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -13,6 +13,7 @@ "prebuilds", "scripts/postinstall.cjs", "openclaw.plugin.json", + "telemetry.credentials.json", "README.md", ".env.example" ], diff --git a/apps/memos-local-openclaw/scripts/generate-telemetry-credentials.cjs b/apps/memos-local-openclaw/scripts/generate-telemetry-credentials.cjs new file mode 100644 index 000000000..8f39fd4ae --- /dev/null +++ b/apps/memos-local-openclaw/scripts/generate-telemetry-credentials.cjs @@ -0,0 +1,31 @@ +#!/usr/bin/env node +/** + * Generate telemetry.credentials.json from environment variables. + * + * Called by CI before `npm publish` so the npm package ships with + * working telemetry credentials while the git repo stays clean. + * + * Required env vars: + * MEMOS_ARMS_ENDPOINT — full ARMS RUM endpoint URL + * MEMOS_ARMS_PID — ARMS application PID + * MEMOS_ARMS_ENV — environment tag (default: "prod") + */ + +const fs = require("fs"); +const path = require("path"); + +const endpoint = process.env.MEMOS_ARMS_ENDPOINT || ""; +const pid = process.env.MEMOS_ARMS_PID || ""; +const env = process.env.MEMOS_ARMS_ENV || "prod"; + +if (!endpoint) { + console.warn( + "[generate-telemetry-credentials] MEMOS_ARMS_ENDPOINT not set — " + + "skipping. Telemetry will be disabled in this build.", + ); + process.exit(0); +} + +const out = path.resolve(__dirname, "..", "telemetry.credentials.json"); +fs.writeFileSync(out, JSON.stringify({ endpoint, pid, env }, null, 2) + "\n", "utf-8"); +console.log("[generate-telemetry-credentials] wrote " + out); diff --git a/apps/memos-local-openclaw/src/telemetry.ts b/apps/memos-local-openclaw/src/telemetry.ts index 4bf999fc2..f688260d5 100644 --- a/apps/memos-local-openclaw/src/telemetry.ts +++ b/apps/memos-local-openclaw/src/telemetry.ts @@ -18,14 +18,27 @@ export interface TelemetryConfig { enabled?: boolean; } -const ARMS_ENDPOINT = - "https://proj-xtrace-e218d9316b328f196a3c640cc7ca84-cn-hangzhou.cn-hangzhou.log.aliyuncs.com" + - "/rum/web/v2" + - "?workspace=default-cms-1026429231103299-cn-hangzhou" + - "&service_id=a3u72ukxmr@066657d42a13a9a9f337f"; +function loadTelemetryCredentials(): { endpoint: string; pid: string; env: string } { + if (process.env.MEMOS_ARMS_ENDPOINT) { + return { + endpoint: process.env.MEMOS_ARMS_ENDPOINT, + pid: process.env.MEMOS_ARMS_PID ?? "", + env: process.env.MEMOS_ARMS_ENV ?? "prod", + }; + } + try { + const credPath = path.resolve(__dirname, "..", "telemetry.credentials.json"); + const raw = fs.readFileSync(credPath, "utf-8"); + const creds = JSON.parse(raw); + if (creds.endpoint) return { endpoint: creds.endpoint, pid: creds.pid ?? "", env: creds.env ?? "prod" }; + } catch {} + return { endpoint: "", pid: "", env: "prod" }; +} -const ARMS_PID = "a3u72ukxmr@066657d42a13a9a9f337f"; -const ARMS_ENV = "prod"; +const _creds = loadTelemetryCredentials(); +const ARMS_ENDPOINT = _creds.endpoint; +const ARMS_PID = _creds.pid; +const ARMS_ENV = _creds.env; const FLUSH_AT = 10; const FLUSH_INTERVAL_MS = 30_000; @@ -63,8 +76,13 @@ export class Telemetry { this.firstSeenDate = this.loadOrCreateFirstSeen(stateDir); this.sessionId = this.loadOrCreateSessionId(stateDir); - if (!this.enabled) { - this.log.debug("Telemetry disabled (opt-out via TELEMETRY_ENABLED=false)"); + if (!this.enabled || !ARMS_ENDPOINT) { + this.enabled = false; + this.log.debug( + !ARMS_ENDPOINT + ? "Telemetry disabled (no credentials configured)" + : "Telemetry disabled (opt-out)", + ); return; } diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index bffb93427..154c5f591 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -3902,6 +3902,7 @@ async function retryConnection(){ var result=document.getElementById('retryConnResult'); if(btn){btn.disabled=true;btn.textContent=t('sharing.retryConnection.loading');} if(result) result.innerHTML=''+t('sharing.retryConnection.loading')+''; + toast(t('sharing.retryConnection.loading'),'info'); try{ await loadSharingStatus(false); var d=sharingStatusCache; @@ -3909,9 +3910,11 @@ async function retryConnection(){ toast(t('sharing.retryConnection.success'),'success'); if(result) result.innerHTML='\\u2705 '+t('sharing.retryConnection.success')+''; }else{ + toast(t('sharing.retryConnection.fail'),'error'); if(result) result.innerHTML=''+t('sharing.retryConnection.fail')+''; } }catch(e){ + toast(t('sharing.retryConnection.fail'),'error'); if(result) result.innerHTML=''+t('sharing.retryConnection.fail')+''; } if(btn){btn.disabled=false;btn.textContent=t('sharing.retryConnection');} diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index c1717992c..bfb9a2ec0 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -451,9 +451,8 @@ export class ViewerServer { if (session) { conditions.push("session_key = ?"); params.push(session); } if (role) { conditions.push("role = ?"); params.push(role); } if (owner && owner.startsWith("agent:")) { - const agentPrefix = owner + ":"; - conditions.push("(owner = ? OR (owner = 'public' AND session_key LIKE ?))"); - params.push(owner, agentPrefix + "%"); + conditions.push("(owner = ? OR owner = 'public')"); + params.push(owner); } else if (owner) { conditions.push("owner = ?"); params.push(owner); } @@ -640,9 +639,8 @@ export class ViewerServer { let sessionQuery: string; let sessionParams: any[]; if (ownerFilter && ownerFilter.startsWith("agent:")) { - const agentPrefix = ownerFilter + ":"; - sessionQuery = "SELECT session_key, COUNT(*) as count, MIN(created_at) as earliest, MAX(created_at) as latest FROM chunks WHERE (owner = ? OR (owner = 'public' AND session_key LIKE ?)) GROUP BY session_key ORDER BY latest DESC"; - sessionParams = [ownerFilter, agentPrefix + "%"]; + sessionQuery = "SELECT session_key, COUNT(*) as count, MIN(created_at) as earliest, MAX(created_at) as latest FROM chunks WHERE (owner = ? OR owner = 'public') GROUP BY session_key ORDER BY latest DESC"; + sessionParams = [ownerFilter]; } else if (ownerFilter) { sessionQuery = "SELECT session_key, COUNT(*) as count, MIN(created_at) as earliest, MAX(created_at) as latest FROM chunks WHERE owner = ? GROUP BY session_key ORDER BY latest DESC"; sessionParams = [ownerFilter]; From 3e32f7f0562f8fd1c95f9763dc0563cafa5173bd Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Fri, 20 Mar 2026 03:32:17 +0800 Subject: [PATCH 59/85] fix(memos-local): prevent team sharing when hub disconnected and ensure atomicity MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Disable "team" scope option in the sharing modal when hub connection is not active, preventing users from triggering a doomed request. On the backend, reorder scope handlers (memory, task, skill) so the hub remote call executes before any local state mutation — if the hub request fails, local data stays unchanged, avoiding inconsistency. Made-with: Cursor --- apps/memos-local-openclaw/src/viewer/html.ts | 3 +- .../memos-local-openclaw/src/viewer/server.ts | 37 ++++++++++++------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 154c5f591..6bd06b987 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -5019,6 +5019,7 @@ function openScopeSelectorModal(resourceType, resourceId, currentScope, onConfir var existing=document.getElementById('scopeSelectorOverlay'); if(existing) existing.remove(); var teamEnabled=sharingStatusCache&&sharingStatusCache.enabled; + var teamConnected=teamEnabled&&sharingStatusCache.connection&&sharingStatusCache.connection.connected; var overlay=document.createElement('div'); overlay.id='scopeSelectorOverlay'; overlay.style.cssText='position:fixed;top:0;left:0;width:100%;height:100%;background:rgba(0,0,0,0.5);backdrop-filter:blur(6px);z-index:10000;display:flex;align-items:center;justify-content:center;animation:fadeIn 0.12s ease'; @@ -5032,7 +5033,7 @@ function openScopeSelectorModal(resourceType, resourceId, currentScope, onConfir for(var i=0;i Date: Fri, 20 Mar 2026 03:48:21 +0800 Subject: [PATCH 60/85] fix(memos-local): scan all agents in migrate/scan to match import count The migrate/scan endpoint only counted sessions under agents/main/, while the actual import traverses agents/*/sessions/ for all configured agents. This caused a mismatch between the previewed message count and the import progress total. Made-with: Cursor --- .../memos-local-openclaw/src/viewer/server.ts | 55 ++++++++++--------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index 957214597..cae0d6088 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -3203,7 +3203,7 @@ export class ViewerServer { try { const ocHome = this.getOpenClawHome(); const memoryDir = path.join(ocHome, "memory"); - const sessionsDir = path.join(ocHome, "agents", "main", "sessions"); + const agentsDir = path.join(ocHome, "agents"); const sqliteFiles: Array<{ file: string; chunks: number }> = []; if (fs.existsSync(memoryDir)) { @@ -3222,31 +3222,36 @@ export class ViewerServer { let sessionCount = 0; let messageCount = 0; - if (fs.existsSync(sessionsDir)) { - const jsonlFiles = fs.readdirSync(sessionsDir).filter(f => f.includes(".jsonl")); - sessionCount = jsonlFiles.length; - for (const f of jsonlFiles) { - try { - const content = fs.readFileSync(path.join(sessionsDir, f), "utf-8"); - const lines = content.split("\n").filter(l => l.trim()); - for (const line of lines) { - try { - const obj = JSON.parse(line); - if (obj.type === "message") { - const role = obj.message?.role ?? obj.role; - if (role === "user" || role === "assistant") { - const mc = obj.message?.content ?? obj.content; - let txt = ""; - if (typeof mc === "string") txt = mc; - else if (Array.isArray(mc)) txt = mc.filter((p: any) => p.type === "text" && p.text).map((p: any) => p.text).join("\n"); - else txt = JSON.stringify(mc); - if (role === "user") txt = stripInboundMetadata(txt); - if (txt && txt.length >= 10) messageCount++; + if (fs.existsSync(agentsDir)) { + for (const entry of fs.readdirSync(agentsDir, { withFileTypes: true })) { + if (!entry.isDirectory()) continue; + const sessDir = path.join(agentsDir, entry.name, "sessions"); + if (!fs.existsSync(sessDir)) continue; + const jsonlFiles = fs.readdirSync(sessDir).filter(f => f.includes(".jsonl")); + sessionCount += jsonlFiles.length; + for (const f of jsonlFiles) { + try { + const content = fs.readFileSync(path.join(sessDir, f), "utf-8"); + const lines = content.split("\n").filter(l => l.trim()); + for (const line of lines) { + try { + const obj = JSON.parse(line); + if (obj.type === "message") { + const role = obj.message?.role ?? obj.role; + if (role === "user" || role === "assistant") { + const mc = obj.message?.content ?? obj.content; + let txt = ""; + if (typeof mc === "string") txt = mc; + else if (Array.isArray(mc)) txt = mc.filter((p: any) => p.type === "text" && p.text).map((p: any) => p.text).join("\n"); + else txt = JSON.stringify(mc); + if (role === "user") txt = stripInboundMetadata(txt); + if (txt && txt.length >= 10) messageCount++; + } } - } - } catch { /* skip bad lines */ } - } - } catch { /* skip unreadable */ } + } catch { /* skip bad lines */ } + } + } catch { /* skip unreadable */ } + } } } From 2d3c61f8a076a9a5c885d0f9989438cc3f47c340 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Fri, 20 Mar 2026 04:05:23 +0800 Subject: [PATCH 61/85] feat(memos-local): replace "Days" stat card with "Agents" count Show the number of distinct agents instead of time span in the overview stat cards. The data is already available from the owners array returned by the stats API. Made-with: Cursor --- apps/memos-local-openclaw/src/viewer/html.ts | 22 ++++++-------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 6bd06b987..4cea6a607 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -1219,7 +1219,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
Memories
-
Sessions
-
Embeddings
-
-
Days
+
-
Agents
'; + }else if(conn.removed){ + if(user.username) sh+=''+t('sharing.user')+''+esc(user.username)+''; + sh+='
'+t('sharing.removed.hint')+'
'+ + '
'+ + ''+t('sharing.retryJoin.hint')+'
'; }else if(conn.connected&&user.username){ sh+=''+t('sharing.user')+''+ ''+ @@ -3940,7 +3973,7 @@ async function updateHubUsername(){ if(!input) return; var newName=input.value.trim(); if(!newName||newName.length<2||newName.length>32){ - toast(t('sharing.username.invalid'),'error'); + alertModal(t('sharing.username.invalid')); return; } try{ @@ -3951,17 +3984,17 @@ async function updateHubUsername(){ }); var d=await r.json(); if(d.error==='username_taken'){ - toast(t('sharing.username.taken'),'error'); + alertModal(t('sharing.username.taken'),{danger:true}); return; } if(d.error){ - toast(d.error,'error'); + alertModal(d.error,{danger:true}); return; } toast(t('sharing.username.updated'),'success'); loadSharingStatus(false); }catch(e){ - toast(t('sharing.username.error'),'error'); + alertModal(t('sharing.username.error'),{danger:true}); } } @@ -4148,7 +4181,7 @@ async function loadAdminData(){ var _newMemories=Array.isArray(memoriesR.memories)?memoriesR.memories:[]; var pending=isAdmin?(Array.isArray(pendingR.users)?pendingR.users:[]):[]; var _fp=_newUsers.length+':'+_newTasks.length+':'+_newSkills.length+':'+_newMemories.length+':'+pending.length - +':'+_newUsers.map(function(u){return u.id+'|'+(u.isOnline?1:0)+'|'+(u.role||'')}).join(',') + +':'+_newUsers.map(function(u){return u.id+'|'+(u.isOnline?1:0)+'|'+(u.role||'')+'|'+(u.username||'')+'|'+(u.memoryCount||0)+'|'+(u.taskCount||0)+'|'+(u.skillCount||0)}).join(',') +':'+_newMemories.map(function(m){return m.id}).join(',') +':'+_newTasks.map(function(t){return t.id+'|'+(t.status||'')}).join(',') +':'+_newSkills.map(function(s){return s.id+'|'+(s.status||'')}).join(',') @@ -4278,12 +4311,13 @@ function renderAdminUserCard(u,adminCount){ }else{ actions+=''+t('admin.lastAdminHint')+''; } - var ownerBadge=u.isOwner?' Owner':''; + var badgesHtml='
'+statusLabel+ + ''+esc(u.role||'member').toUpperCase()+''+ + (u.isOwner?'OWNER':'')+ + '
'; - return '
'+ - '
'+titleDisplay+editRow+'
'+statusLabel+ - '
'+ - '
'+esc(u.role||'member')+''+ownerBadge+'
'+ + return '
'+titleDisplay+editRow+'
'+ + badgesHtml+'
'+ contribHtml+infoHtml+ (actions?'
'+actions+'
':'')+ '
'; @@ -4386,13 +4420,24 @@ async function adminSaveEditName(userId){ var inputEl=document.getElementById('au_input_'+userId); if(!inputEl) return; var newName=inputEl.value.trim(); - if(!newName||newName.length<2||newName.length>32){toast(t('toast.invalidUsername'),'warn');return;} + if(!newName||newName.length<2||newName.length>32){ + alertModal(t('toast.invalidUsername'),{title:t('admin.editName')}); + return; + } inputEl.disabled=true; try{ var r=await fetch('/api/sharing/rename-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,username:newName})}); var d=await r.json(); - if(d.ok){toast(t('toast.usernameChanged'),'success');loadAdminData();}else{inputEl.disabled=false;toast(d.error||t('toast.renameFail'),'error');} - }catch(e){inputEl.disabled=false;toast(t('toast.renameFail')+': '+e.message,'error');} + if(d.ok){toast(t('toast.usernameChanged'),'success');adminCancelEditName(userId);loadAdminData();} + else{ + inputEl.disabled=false; + if(d.error==='username_taken'){ + alertModal(t('sharing.username.taken'),{title:t('admin.editName'),danger:true}); + }else{ + alertModal(d.error||t('toast.renameFail'),{title:t('admin.editName'),danger:true}); + } + } + }catch(e){inputEl.disabled=false;alertModal(t('toast.renameFail'),{title:t('admin.editName'),danger:true});} } async function adminRemoveUser(userId,username){ @@ -4470,7 +4515,7 @@ function renderAdminTasks(tasks){ '
'+esc(tk.title||tk.id)+'
'+ '
'+ '
'+ - '\u{1F464} '+esc(tk.ownerName||tk.sourceUserId||'unknown')+''+ + '\u{1F464} '+fmtOwner(tk)+''+ (tk.status?''+esc(tk.status)+'':'')+ (tk.chunkCount!=null?'\u{1F4DD} '+tk.chunkCount+' '+t('admin.chunks')+'':'')+ '
'+ @@ -4532,7 +4577,7 @@ function renderAdminSkills(skills){ '
'+esc(s.name||s.id)+'
'+ '
'+ '
'+ - '\u{1F464} '+esc(s.ownerName||s.sourceUserId||'unknown')+''+ + '\u{1F464} '+fmtOwner(s)+''+ (s.status?''+esc(s.status)+'':'')+ (s.version!=null?'v'+s.version+'':'')+ (qs!=null?'\u2605 '+Number(qs).toFixed(1)+'':'')+ @@ -4597,7 +4642,7 @@ function renderAdminMemories(memories){ '
'+esc(m.summary||m.content?.slice(0,80)||m.id)+'
'+ '
'+ '
'+ - '\u{1F464} '+esc(m.ownerName||m.sourceUserId||'unknown')+''+ + '\u{1F464} '+fmtOwner(m)+''+ (m.role?''+esc(m.role)+'':'')+ (m.kind?''+esc(m.kind)+'':'')+ '
'+ @@ -4641,7 +4686,7 @@ function toggleAdminMemoryCard(cardId,idx){ (m.kind?''+t('admin.kind')+esc(m.kind)+'':'')+ (m.role?''+t('admin.role')+esc(m.role)+'':'')+ (m.visibility?''+t('admin.visibility')+esc(m.visibility)+'':'')+ - ''+t('admin.owner')+esc(m.ownerName||m.sourceUserId||'unknown')+''+ + ''+t('admin.owner')+fmtOwner(m)+''+ (m.groupName?''+t('admin.group')+esc(m.groupName)+'':'')+ ''+new Date(m.updatedAt||m.createdAt||0).toLocaleString(dateLoc())+''+ '
'; @@ -4697,7 +4742,7 @@ async function toggleAdminTaskCard(cardId,idx){ var metaHtml='
'+ (tk.status?''+esc(tk.status)+'':'')+ (tk.visibility?''+t('admin.visibility')+esc(tk.visibility)+'':'')+ - ''+t('admin.owner')+esc(tk.ownerName||'unknown')+''+ + ''+t('admin.owner')+fmtOwner(tk)+''+ (tk.groupName?''+t('admin.group')+esc(tk.groupName)+'':'')+ (task.chunks&&task.chunks.length?'\u{1F4AC} '+task.chunks.length+' '+t('tasks.chunks.label')+'':'')+ (task.startedAt?'\u{1F4C5} '+formatDateTimeSeconds(task.startedAt)+'':'')+ @@ -4784,7 +4829,7 @@ async function toggleAdminSkillCard(cardId,idx){ (localSkill.status?''+esc(localSkill.status)+'':'')+ (sk.visibility?''+t('admin.visibility')+esc(sk.visibility||'hub')+'':'')+ (qs!=null?'\u2605 '+Number(qs).toFixed(1)+'/10':'')+ - ''+t('admin.owner')+esc(sk.ownerName||'unknown')+''+ + ''+t('admin.owner')+fmtOwner(sk)+''+ (sk.groupName?''+t('admin.group')+esc(sk.groupName)+'':'')+ ''+t('admin.updated')+new Date(sk.updatedAt||sk.createdAt||0).toLocaleString(dateLoc())+''+ '
'; @@ -4853,7 +4898,7 @@ function renderSharingMemorySearchResults(data,query){ '
'+(idx+1)+'. '+esc(hit.summary||'(no summary)')+'
'+ '
'+esc(hit.excerpt||'')+'
'+ '
'+ - 'owner: '+esc(hit.ownerName||'unknown')+''+ + 'owner: '+fmtOwner(hit)+''+ (hit.groupName?'group: '+esc(hit.groupName)+'':'')+ 'visibility: '+esc(hit.visibility||'hub')+''+ '
'+ @@ -4938,7 +4983,7 @@ function openHubTaskDetailFromCache(cacheKey,idx){ var meta=[ '\\u{1F310} '+t('scope.hub')+'', task.status?''+esc(task.status)+'':'', - ''+t('admin.owner')+esc(task.ownerName||'unknown')+'', + ''+t('admin.owner')+fmtOwner(task)+'', task.groupName?''+t('admin.group')+esc(task.groupName)+'':'', task.visibility?''+t('admin.visibility')+esc(task.visibility)+'':'', task.chunkCount!=null?'\\u{1F4DD} '+esc(String(task.chunkCount))+' '+t('tasks.chunks.label')+'':'', @@ -4969,14 +5014,14 @@ function openHubSkillDetailFromCache(cacheKey,idx){ skill.status?''+esc(skill.status)+'':'', 'visibility: '+esc(skill.visibility||'hub')+'', qsBadge, - ''+t('admin.owner')+esc(skill.ownerName||'unknown')+'', + ''+t('admin.owner')+fmtOwner(skill)+'', skill.groupName?''+t('admin.group')+esc(skill.groupName)+'':'', (skill.updatedAt||skill.createdAt)?''+t('admin.updated')+new Date(skill.updatedAt||skill.createdAt).toLocaleString(dateLoc())+'':'', ].filter(Boolean); document.getElementById('skillDetailMeta').innerHTML=meta.join(''); document.getElementById('skillDetailDesc').textContent=skill.description||''; document.getElementById('skillFilesList').innerHTML=''; - document.getElementById('skillDetailContent').innerHTML=skill.content?'
'+esc(skill.content)+'
':''; + document.getElementById('skillDetailContent').innerHTML=skill.content?renderSkillMarkdown(skill.content):''; document.getElementById('skillVersionsList').innerHTML=''; document.getElementById('skillRelatedTasks').innerHTML=''; var visBtn=document.getElementById('skillVisibilityBtn'); @@ -5102,7 +5147,8 @@ async function confirmScopeSelection(){ if(st.onConfirm) st.onConfirm(newScope); else loadAll(); }else{ - toast(d.error||t('share.scope.changeFail'),'error'); + var errMsg=d.error==='inactive_memory'?t('share.scope.inactiveDisabled'):(d.message||d.error||t('share.scope.changeFail')); + toast(errMsg,'error'); } }catch(e){toast(t('share.scope.changeFail')+': '+e.message,'error');} } @@ -5353,6 +5399,13 @@ function parseMemoryAddEntries(out){ return results; } +function recallOriginBadge(origin){ + if(origin==='local-shared') return ''+t('recall.origin.localShared')+''; + if(origin==='hub-memory') return ''+t('recall.origin.hubMemory')+''; + if(origin==='hub-remote') return ''+t('recall.origin.hubRemote')+''; + return ''; +} + function buildLogSummary(lg){ let inputObj=null; try{inputObj=JSON.parse(lg.input);}catch(_){} @@ -5377,8 +5430,9 @@ function buildLogSummary(lg){ var scoreClass=c.score>=0.7?'high':c.score>=0.5?'mid':'low'; var shortText=escapeHtml(c.summary||c.content||c.original_excerpt||''); var fullText=escapeHtml(c.content||c.original_excerpt||c.summary||''); + var oBadge=recallOriginBadge(c.origin); html+='
'; - html+='
'+c.score.toFixed(2)+''+(c.role||'user')+''+shortText+'\u25B6
'; + html+='
'+c.score.toFixed(2)+''+(c.role||'user')+''+oBadge+''+shortText+'\u25B6
'; html+='
'+fullText+'
'; html+='
'; }); @@ -5391,8 +5445,9 @@ function buildLogSummary(lg){ var scoreClass=f.score>=0.7?'high':f.score>=0.5?'mid':'low'; var shortText=escapeHtml(f.summary||f.content||f.original_excerpt||''); var fullText=escapeHtml(f.content||f.original_excerpt||f.summary||''); + var oBadge=recallOriginBadge(f.origin); html+='
'; - html+='
'+f.score.toFixed(2)+''+(f.role||'user')+''+shortText+'\u25B6
'; + html+='
'+f.score.toFixed(2)+''+(f.role||'user')+''+oBadge+''+shortText+'\u25B6
'; html+='
'+fullText+'
'; html+='
'; }); @@ -5456,8 +5511,9 @@ function buildRecallDetailHtml(rd){ var scoreClass=c.score>=0.7?'high':c.score>=0.5?'mid':'low'; var shortText=escapeHtml(c.summary||c.content||c.original_excerpt||''); var fullText=escapeHtml(c.content||c.original_excerpt||c.summary||''); + var oBadge=recallOriginBadge(c.origin); html+='
'; - html+='
'+(i+1)+''+c.score.toFixed(3)+''+(c.role||'user')+''+shortText+'\u25B6
'; + html+='
'+(i+1)+''+c.score.toFixed(3)+''+(c.role||'user')+''+oBadge+''+shortText+'\u25B6
'; html+='
'+fullText+'
'; html+='
'; }); @@ -5471,8 +5527,9 @@ function buildRecallDetailHtml(rd){ var scoreClass=f.score>=0.7?'high':f.score>=0.5?'mid':'low'; var shortText=escapeHtml(f.summary||f.content||f.original_excerpt||''); var fullText=escapeHtml(f.content||f.original_excerpt||f.summary||''); + var oBadge=recallOriginBadge(f.origin); html+='
'; - html+='
'+(i+1)+''+f.score.toFixed(3)+''+(f.role||'user')+''+shortText+'\u25B6
'; + html+='
'+(i+1)+''+f.score.toFixed(3)+''+(f.role||'user')+''+oBadge+''+shortText+'\u25B6
'; html+='
'+fullText+'
'; html+='
'; }); @@ -6048,7 +6105,7 @@ async function loadSkills(silent){ '
'+esc(skill.name)+'
'+ '
'+esc(skill.description||'')+'
'+ '
'+ - 'owner: '+esc(skill.ownerName||'unknown')+''+ + 'owner: '+fmtOwner(skill)+''+ (skill.groupName?'group: '+esc(skill.groupName)+'':'')+ 'visibility: '+esc(skill.visibility||'hub')+''+ (skill.version!=null?'v'+skill.version+'':'')+ @@ -6095,12 +6152,12 @@ async function loadHubTasks(){ return '
'+ '
'+ '
'+esc(task.title||'(no title)')+'
'+ - '
\\u{1F310} '+t('scope.hub')+'
'+ + '
'+renderScopeBadge('team')+'
'+ '
'+ (task.summary?'
'+esc(task.summary)+'
':'')+ '
'+ (timeStr?'\\u{1F4C5} '+timeStr+'':'')+ - '\\u{1F464} '+esc(task.ownerName||'unknown')+''+ + '\\u{1F464} '+fmtOwner(task)+''+ (task.chunkCount!=null?'\\u{1F4DD} '+task.chunkCount+' '+t('tasks.chunks.label')+'':'')+ '
'+ '
'; @@ -6133,12 +6190,12 @@ async function loadHubSkills(hubList, localIds){ '
'+esc(skill.name)+'
'+ '
'+esc(skill.description||'')+'
'+ '
'+ - 'owner: '+esc(skill.ownerName||'unknown')+''+ + 'owner: '+fmtOwner(skill)+''+ (skill.groupName?'group: '+esc(skill.groupName)+'':'')+ 'visibility: '+esc(skill.visibility||'hub')+''+ (skill.version!=null?'v'+skill.version+'':'')+ '
'+ - '
'+ + '
'+ '
'; }).join(''); }catch(e){ @@ -6801,6 +6858,8 @@ function renderSkillMarkdown(md){ function closeSkillDetail(event){ if(event && event.target!==document.getElementById('skillDetailOverlay')) return; document.getElementById('skillDetailOverlay').classList.remove('show'); + currentSkillId=''; + currentSkillDetail=null; } async function deleteSkill(skillId){ @@ -7140,20 +7199,20 @@ async function _livePollTick(){ var _savedScrollMap={}; _scrollTargets.forEach(function(id){var el=document.getElementById(id);if(el&&el.scrollTop)_savedScrollMap[id]=el.scrollTop;}); try{ - if(sharingStatusCache&&sharingStatusCache.enabled&&_lastSharingConnStatus!=='rejected') loadSharingStatus(false); - if(!_notifSSEConnected) pollNotifCount(); - pollAdminPending(); - if(_activeView==='admin') loadAdminData(); + if(sharingStatusCache&&sharingStatusCache.enabled&&_lastSharingConnStatus!=='rejected') await loadSharingStatus(false); + if(!_notifSSEConnected) await pollNotifCount(); + await pollAdminPending(); + if(_activeView==='admin') await loadAdminData(); else if(_activeView==='memories'){ var _searchVal=(document.getElementById('searchInput')||{}).value||''; if(!_searchVal.trim()){ - if(memorySearchScope==='hub') loadHubMemories(true); - else{loadStats();loadMemories(null,true);} + if(memorySearchScope==='hub') await loadHubMemories(true); + else{var _pollOwner=memorySearchScope==='local'?_currentAgentOwner:undefined;await loadStats(_pollOwner);await loadMemories(null,true);} } } - else if(_activeView==='tasks') loadTasks(true); - else if(_activeView==='skills') loadSkills(true); - else if(_activeView==='analytics') loadMetrics(); + else if(_activeView==='tasks') await loadTasks(true); + else if(_activeView==='skills') await loadSkills(true); + else if(_activeView==='analytics') await loadMetrics(); }catch(e){} await new Promise(function(r){requestAnimationFrame(r);}); window.scrollTo(0,_savedScrollY); @@ -7497,7 +7556,7 @@ function getFilterParams(){ const scope=memorySearchScope||'local'; if(scope==='local'){ p.set('owner',_currentAgentOwner); - }else{ + }else if(scope==='allLocal'){ const owner=document.getElementById('filterOwner').value; if(owner) p.set('owner',owner); } @@ -7683,7 +7742,7 @@ function renderMemories(items){ const mergeBadge=mc>0?'\\u{1F504} '+t('card.evolved')+' '+mc+t('card.times')+'':''; const updatedAt=(m.updated_at&&m.updated_at>m.created_at)?''+t('card.updated')+' '+new Date(m.updated_at).toLocaleString(dateLoc())+'':''; const ds=m.dedup_status||'active'; - const isInactive=ds==='merged'; + const isInactive=ds==='merged'||ds==='duplicate'; const dedupBadge=ds==='duplicate'?''+t('card.dedupDuplicate')+'':ds==='merged'?''+t('card.dedupMerged')+'':''; const isImported=sid.startsWith('openclaw-import-')||sid.startsWith('openclaw-session-'); const importBadge=isImported?'\u{1F990} '+t('card.imported')+'':''; @@ -7692,8 +7751,8 @@ function renderMemories(items){ const localManaged=!!m.localSharingManaged; const memShared=m.sharingVisibility||null; const isHubScope=memorySearchScope==='hub'; - const memScope=isHubScope?'team':memShared?'team':isPublicMem?'local':'private'; - const memScopeBadge=renderScopeBadge(memScope); + const memScope=memShared?'team':isPublicMem?'local':'private'; + const memScopeBadge=isHubScope?renderScopeBadge('team'):renderScopeBadge(memScope); let dedupInfo=''; if(ds==='duplicate'||ds==='merged'){ const reason=m.dedup_reason?''+t('card.dedupReason')+esc(m.dedup_reason)+'':''; @@ -7878,6 +7937,11 @@ function esc(s){ if(!s)return''; return s.replace(/&/g,'&').replace(//g,'>').replace(/"/g,'"'); } +function fmtOwner(item){ + var name=item.ownerName||item.sourceUserId||'unknown'; + if(item.ownerStatus==='removed') return esc(name)+' '+t('sharing.ownerRemoved')+''; + return esc(name); +} function renderSummaryHtml(raw){ if(!raw)return''; @@ -8608,14 +8672,21 @@ function confirmModal(message,opts){ var okBtn=document.getElementById('confirmOkBtn'); okBtn.textContent=opts.okText||t('confirm.ok')||'\u786E\u5B9A'; okBtn.className='btn-confirm-ok'+(opts.danger?' danger':''); - document.getElementById('confirmCancelBtn').textContent=opts.cancelText||t('confirm.cancel')||'\u53D6\u6D88'; + var cancelBtn=document.getElementById('confirmCancelBtn'); + cancelBtn.textContent=opts.cancelText||t('confirm.cancel')||'\u53D6\u6D88'; + cancelBtn.style.display=opts.hideCancel?'none':''; overlay.classList.add('show'); }); } function confirmModalClose(result){ document.getElementById('confirmOverlay').classList.remove('show'); + document.getElementById('confirmCancelBtn').style.display=''; if(_confirmResolve){var r=_confirmResolve;_confirmResolve=null;r(result);} } +function alertModal(message,opts){ + opts=opts||{}; + return confirmModal(message,Object.assign({},opts,{hideCancel:true,okText:opts.okText||t('confirm.ok')||'\u77E5\u9053\u4E86'})); +} /* ─── Theme ─── */ const VIEWER_THEME_KEY='memos-viewer-theme'; @@ -8681,10 +8752,10 @@ async function checkForUpdate(){ const pkgSpec=d.installCommand?d.installCommand.replace(/^(?:npx\s+)?openclaw\s+plugins\s+install\s+/,''):(d.packageName+'@'+d.latest); var bannerWrap=document.createElement('div'); bannerWrap.id='updateBannerWrap'; - bannerWrap.style.cssText='width:100%;background:linear-gradient(135deg,rgba(99,102,241,.08),rgba(139,92,246,.06));border-bottom:1px solid rgba(99,102,241,.18);backdrop-filter:blur(8px);animation:slideIn .3s ease'; + bannerWrap.style.cssText='background:linear-gradient(135deg,rgba(99,102,241,.08),rgba(139,92,246,.06));border-bottom:1px solid rgba(99,102,241,.18);backdrop-filter:blur(8px);animation:slideIn .3s ease'; var banner=document.createElement('div'); banner.id='updateBanner'; - banner.style.cssText='display:flex;align-items:center;gap:12px;padding:10px 32px;max-width:1400px;margin:0 auto;width:100%;font-size:13px;font-weight:500;box-sizing:border-box;color:var(--pri)'; + banner.style.cssText='display:flex;align-items:center;gap:12px;padding:10px 32px;width:100%;max-width:1400px;margin:0 auto;font-size:13px;font-weight:500;box-sizing:border-box;color:var(--pri)'; var textNode=document.createElement('div'); textNode.style.cssText='display:flex;align-items:center;gap:8px;flex-shrink:0;font-size:13px'; textNode.innerHTML='\u2728 '+t('update.available')+' v'+esc(d.current)+' \u2192 v'+esc(d.latest)+''; @@ -8705,6 +8776,9 @@ async function checkForUpdate(){ btnClose.onmouseenter=function(){this.style.opacity='1'}; btnClose.onmouseleave=function(){this.style.opacity='.5'}; btnClose.onclick=function(){bannerWrap.remove()}; + var spacerL=document.createElement('div'); + spacerL.style.cssText='flex:1'; + banner.appendChild(spacerL); banner.appendChild(textNode); banner.appendChild(statusDiv); banner.appendChild(spacer); diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index cae0d6088..e9b4eef84 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -461,7 +461,7 @@ export class ViewerServer { const where = conditions.length > 0 ? " WHERE " + conditions.join(" AND ") : ""; const totalRow = db.prepare("SELECT COUNT(*) as count FROM chunks" + where).get(...params) as any; - const rawMemories = db.prepare("SELECT * FROM chunks" + where + ` ORDER BY created_at ${sortBy} LIMIT ? OFFSET ?`).all(...params, limit, offset); + const rawMemories = db.prepare("SELECT * FROM chunks" + where + ` ORDER BY CASE WHEN dedup_status IN ('duplicate','merged') THEN 1 ELSE 0 END ASC, created_at ${sortBy} LIMIT ? OFFSET ?`).all(...params, limit, offset); const findMergeSources = db.prepare("SELECT id, summary, role FROM chunks WHERE dedup_target = ? AND (dedup_status = 'merged' OR dedup_status = 'duplicate')"); const chunkIds = rawMemories.map((m: any) => m.id); @@ -1047,11 +1047,21 @@ export class ViewerServer { }); } - private handleSkillDelete(res: http.ServerResponse, urlPath: string): void { + private async handleSkillDelete(res: http.ServerResponse, urlPath: string): Promise { const skillId = urlPath.replace("/api/skill/", ""); const skill = this.store.getSkill(skillId); if (!skill) { res.writeHead(404, { "Content-Type": "application/json" }); res.end(JSON.stringify({ error: "Skill not found" })); return; } - // Remove skill directory from disk + try { + const hub = this.resolveHubConnection(); + if (hub) { + await hubRequestJson(hub.hubUrl, hub.userToken, "/api/v1/hub/skills/unpublish", { + method: "POST", + body: JSON.stringify({ sourceSkillId: skillId }), + }).catch(() => {}); + } + const db = (this.store as any).db; + db.prepare("DELETE FROM hub_skills WHERE source_skill_id = ?").run(skillId); + } catch (_) {} try { if (skill.dirPath && fs.existsSync(skill.dirPath)) { fs.rmSync(skill.dirPath, { recursive: true, force: true }); @@ -1620,6 +1630,9 @@ export class ViewerServer { if (status.user?.status === "rejected") { output.connection.rejected = true; } + if (status.user?.status === "removed") { + output.connection.removed = true; + } if (status.connected && status.hubUrl) { try { const info = await fetch(`${status.hubUrl}/api/v1/hub/info`).then((r) => (r.ok ? r.json() : null)).catch(() => null) as any; @@ -1737,7 +1750,14 @@ export class ViewerServer { }); this.jsonResponse(res, { ok: true, result }); } catch (err) { - this.jsonResponse(res, { ok: false, error: String(err) }); + const errStr = String(err); + if (errStr.includes("username_taken")) { + this.jsonResponse(res, { ok: false, error: "username_taken" }); + } else if (errStr.includes("invalid_params")) { + this.jsonResponse(res, { ok: false, error: "invalid_params" }); + } else { + this.jsonResponse(res, { ok: false, error: errStr }); + } } }); } From 5f4ae71218543fc6ec124e57ce091b9b045d9ae6 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Fri, 20 Mar 2026 14:53:20 +0800 Subject: [PATCH 63/85] fix(memos-local): improve self-join detection with port check and preserve client hub connection on disable - Check both hostname and port when detecting self-join to avoid blocking different-port Hub on same machine - Preserve client hub connection when sharing is disabled (instead of clearing it) for seamless re-enable - Trigger auto-join when re-enabling sharing as client, not only on initial role switch Made-with: Cursor --- .../memos-local-openclaw/src/viewer/server.ts | 23 +++++++++++++------ 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index e9b4eef84..e98563d51 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -1780,7 +1780,8 @@ export class ViewerServer { localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); try { const u = new URL(hubUrl); - if (localIPs.includes(u.hostname)) { + const targetPort = u.port || (u.protocol === "https:" ? "443" : "80"); + if (localIPs.includes(u.hostname) && targetPort === String(this.port)) { return this.jsonResponse(res, { ok: false, error: "cannot_join_self" }); } } catch {} @@ -2592,7 +2593,8 @@ export class ViewerServer { localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); try { const u = new URL(addr.startsWith("http") ? addr : `http://${addr}`); - if (localIPs.includes(u.hostname)) { + const targetPort = u.port || (u.protocol === "https:" ? "443" : "80"); + if (localIPs.includes(u.hostname) && targetPort === String(this.port)) { res.writeHead(400, { "Content-Type": "application/json" }); res.end(JSON.stringify({ error: "cannot_join_self" })); return; @@ -2618,8 +2620,12 @@ export class ViewerServer { const isClient = newEnabled && newRole === "client"; if (wasClient && !isClient) { this.notifyHubLeave(); - this.store.clearClientHubConnection(); - this.log.info("Cleared client hub connection (sharing disabled or role changed)"); + if (newRole !== "client") { + this.store.clearClientHubConnection(); + this.log.info("Cleared client hub connection (role changed away from client)"); + } else { + this.log.info("Sharing disabled but preserving client hub connection for re-enable"); + } } // Detect switching to a different Hub while still in client mode @@ -2645,9 +2651,11 @@ export class ViewerServer { this.log.info("Plugin config updated via Viewer"); this.stopHubHeartbeat(); - // When switching to client mode, immediately send join request + // When switching to client mode or re-enabling sharing as client, send join request const finalSharing = config.sharing as Record | undefined; - if (finalSharing?.role === "client" && oldSharingRole !== "client") { + const nowClient = Boolean(finalSharing?.enabled) && finalSharing?.role === "client"; + const previouslyClient = oldSharingEnabled && oldSharingRole === "client"; + if (nowClient && !previouslyClient) { this.autoJoinOnSave(finalSharing).catch(e => this.log.warn(`Auto-join on save failed: ${e}`)); } @@ -2798,7 +2806,8 @@ export class ViewerServer { const localIPs = this.getLocalIPs(); localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); const parsed = new URL(hubUrl.startsWith("http") ? hubUrl : `http://${hubUrl}`); - if (localIPs.includes(parsed.hostname)) { + const targetPort = parsed.port || (parsed.protocol === "https:" ? "443" : "80"); + if (localIPs.includes(parsed.hostname) && targetPort === String(this.port)) { this.jsonResponse(res, { ok: false, error: "cannot_join_self" }); return; } From 0c1d08ce1d59b8c80e44e0b676974361eb172b85 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Fri, 20 Mar 2026 16:57:46 +0800 Subject: [PATCH 64/85] =?UTF-8?q?feat(memos-local):=20v1.0.4-beta.12=20?= =?UTF-8?q?=E2=80=94=20hub=20data=20isolation,=20notification,=20sharing?= =?UTF-8?q?=20UX?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix embedding model consistency: Client mode no longer stores hub_memories locally or searches them with local embedder. Remote data stays remote, searched via Hub API only. Hub mode retains local hub_memories search since embeddings are generated by the same model. - Add user notifications for approve/reject: Hub server now creates notifications for users when their membership is approved or rejected. - Fix sharing UI onboarding: Settings page auto-switches to Hub tab when sharing is unconfigured, showing the setup guide card. - Fix scope selector visibility: Memory/task/skill scope selectors now appear when hub is connected, auto-refresh data on first connection. - Fix hub group management: Add hub_groups/hub_group_members tables and CRUD methods, enforce group-based search permissions. - Fix chunker type system: Support code_block, error_stack, list, command chunk kinds beyond paragraph. - Fix test stability: Mock LLM in task-processor test, adapt viewer-sharing tests to pending-approval flow, fix viewer-ui function call. Made-with: Cursor --- apps/memos-local-openclaw/index.ts | 293 ++++++++++++++++-- apps/memos-local-openclaw/package.json | 2 +- .../src/client/connector.ts | 50 ++- apps/memos-local-openclaw/src/hub/server.ts | 82 +++-- .../src/hub/user-manager.ts | 48 ++- .../src/ingest/chunker.ts | 32 +- .../memos-local-openclaw/src/recall/engine.ts | 7 +- .../memos-local-openclaw/src/sharing/types.ts | 2 +- .../memos-local-openclaw/src/skill/evolver.ts | 64 +++- .../src/skill/generator.ts | 49 ++- .../src/skill/installer.ts | 111 ++++++- .../src/skill/upgrader.ts | 140 ++++++++- .../src/skill/validator.ts | 79 +++++ .../src/storage/sqlite.ts | 178 ++++++++++- apps/memos-local-openclaw/src/types.ts | 8 + apps/memos-local-openclaw/src/viewer/html.ts | 59 ++-- .../memos-local-openclaw/src/viewer/server.ts | 46 +-- .../tests/skill-runtime-flow.test.ts | 293 ++++++++++++++++++ .../tests/skill-v1-enhancements.test.ts | 270 ++++++++++++++++ .../tests/task-processor.test.ts | 2 + .../tests/viewer-sharing.test.ts | 16 +- .../tests/viewer-ui.test.ts | 21 +- .../skills/memos-memory-guide/SKILL.md | 203 +++++++++--- 23 files changed, 1861 insertions(+), 194 deletions(-) create mode 100644 apps/memos-local-openclaw/tests/skill-runtime-flow.test.ts create mode 100644 apps/memos-local-openclaw/tests/skill-v1-enhancements.test.ts diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 747ac0305..6d6f133d8 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -319,6 +319,21 @@ const memosLocalPlugin = { candidates: det.candidates, filtered: det.hits ?? det.filtered ?? [], }); + } else if (det && det.local && det.hub) { + const localHits = det.local?.hits ?? []; + const hubHits = (det.hub?.hits ?? []).map((h: any) => ({ + score: h.score ?? 0, + role: h.source?.role ?? h.role ?? "assistant", + summary: h.summary ?? "", + original_excerpt: h.excerpt ?? h.summary ?? "", + origin: "hub-remote", + ownerName: h.ownerName ?? "", + groupName: h.groupName ?? "", + })); + outputText = JSON.stringify({ + candidates: [...localHits, ...hubHits], + filtered: [...localHits, ...hubHits], + }); } else { outputText = result?.content?.[0]?.text ?? JSON.stringify(result ?? ""); } @@ -371,27 +386,29 @@ const memosLocalPlugin = { }), }) as { memoryId?: string; visibility?: "public" | "group" }; - const now = Date.now(); - const existing = store.getHubMemoryBySource(hubClient.userId, chunk.id); - store.upsertHubMemory({ - id: response?.memoryId ?? existing?.id ?? `${chunk.id}-hub`, - sourceChunkId: chunk.id, - sourceUserId: hubClient.userId, - role: chunk.role, - content: chunk.content, - summary: chunk.summary ?? "", - kind: chunk.kind, - groupId, - visibility, - createdAt: existing?.createdAt ?? now, - updatedAt: now, - }); + const memoryId = response?.memoryId ?? `${chunk.id}-hub`; + + // Only persist hub_memories locally in Hub mode where this DB owns the data. + // Client mode relies on the remote Hub for storage and search. + if (ctx.config.sharing?.role === "hub") { + const now = Date.now(); + const existing = store.getHubMemoryBySource(hubClient.userId, chunk.id); + store.upsertHubMemory({ + id: memoryId, + sourceChunkId: chunk.id, + sourceUserId: hubClient.userId, + role: chunk.role, + content: chunk.content, + summary: chunk.summary ?? "", + kind: chunk.kind, + groupId, + visibility, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + }); + } - return { - memoryId: response?.memoryId ?? existing?.id ?? `${chunk.id}-hub`, - visibility, - groupId, - }; + return { memoryId, visibility, groupId }; }; const unshareMemoryFromHub = async ( @@ -520,6 +537,7 @@ const memosLocalPlugin = { role: h.source.role, score: h.score, summary: h.summary, + origin: h.origin || "local", }; }); @@ -666,6 +684,7 @@ const memosLocalPlugin = { score: h.score, summary: h.summary, original_excerpt: (h.original_excerpt ?? "").slice(0, 200), + origin: h.origin || "local", }; }), meta: result.meta, @@ -1115,10 +1134,31 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, }; } + const manifest = skillInstaller.getCompanionManifest(resolvedSkillId); + let footer = "\n\n---\n"; + + if (manifest && manifest.hasCompanionFiles) { + const fileSummary = manifest.files + .filter(f => f.type !== "eval") + .map(f => `\`${f.relativePath}\``) + .join(", "); + footer += `**Companion files available:** ${fileSummary}\n`; + footer += `→ call \`skill_files(skillId="${resolvedSkillId}")\` to list all files\n`; + footer += `→ call \`skill_file_get(skillId="${resolvedSkillId}", path="...")\` to read a specific file\n`; + if (manifest.installMode === "install_recommended") { + footer += `→ **Recommended:** call \`skill_install(skillId="${resolvedSkillId}")\` for persistent workspace access (many/large files)\n`; + } + if (manifest.installed && manifest.installedPath) { + footer += `> Already installed at: ${manifest.installedPath}/\n`; + } + } else { + footer += `To install this skill for persistent use: call skill_install(skillId="${resolvedSkillId}")`; + } + return { content: [{ type: "text", - text: `## Skill: ${skill.name} (v${skill.version})\n\n${sv.content}\n\n---\nTo install this skill for persistent use: call skill_install(skillId="${resolvedSkillId}")`, + text: `## Skill: ${skill.name} (v${skill.version})\n\n${sv.content}${footer}`, }], details: { skillId: skill.id, @@ -1126,6 +1166,8 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, version: skill.version, status: skill.status, installed: skill.installed, + companionFiles: manifest?.hasCompanionFiles ?? false, + installMode: manifest?.installMode ?? "inline", }, }; }), @@ -1161,6 +1203,112 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, { name: "skill_install" }, ); + // ─── Tool: skill_files ─── + + api.registerTool( + { + name: "skill_files", + label: "List Skill Companion Files", + description: + "List companion files (scripts, references, evals) for a skill. " + + "Use this after skill_get to see what additional files are available. " + + "Returns file names, sizes, and whether the skill recommends installation.", + parameters: Type.Object({ + skillId: Type.String({ description: "The skill_id to inspect" }), + }), + execute: trackTool("skill_files", async (_toolCallId: any, params: any) => { + const { skillId } = params as { skillId: string }; + ctx.log.debug(`skill_files called for skill=${skillId}`); + + const manifest = skillInstaller.getCompanionManifest(skillId); + if (!manifest) { + return { + content: [{ type: "text", text: `Skill not found: ${skillId}` }], + details: { error: "not_found" }, + }; + } + + if (!manifest.hasCompanionFiles) { + return { + content: [{ type: "text", text: "This skill has no companion files (scripts, references). The SKILL.md from skill_get contains everything." }], + details: manifest, + }; + } + + const lines: string[] = [`## Companion Files (${manifest.files.length} files, ${Math.round(manifest.totalSize / 1024)}KB total)\n`]; + if (manifest.scriptsCount > 0) { + lines.push(`### Scripts (${manifest.scriptsCount})`); + for (const f of manifest.files.filter(f => f.type === "script")) { + lines.push(`- \`${f.relativePath}\` (${f.size} bytes) → call \`skill_file_get(skillId="${skillId}", path="${f.relativePath}")\``); + } + } + if (manifest.referencesCount > 0) { + lines.push(`\n### References (${manifest.referencesCount})`); + for (const f of manifest.files.filter(f => f.type === "reference")) { + lines.push(`- \`${f.relativePath}\` (${f.size} bytes) → call \`skill_file_get(skillId="${skillId}", path="${f.relativePath}")\``); + } + } + if (manifest.evalsCount > 0) { + lines.push(`\n### Evals (${manifest.evalsCount})`); + for (const f of manifest.files.filter(f => f.type === "eval")) { + lines.push(`- \`${f.relativePath}\` (${f.size} bytes)`); + } + } + + if (manifest.installMode === "install_recommended") { + lines.push(`\n> **Recommendation:** This skill has many/large companion files. Consider \`skill_install(skillId="${skillId}")\` for persistent workspace access.`); + } + if (manifest.installed && manifest.installedPath) { + lines.push(`\n> **Installed at:** ${manifest.installedPath}/`); + } + + return { + content: [{ type: "text", text: lines.join("\n") }], + details: manifest, + }; + }), + }, + { name: "skill_files" }, + ); + + // ─── Tool: skill_file_get ─── + + api.registerTool( + { + name: "skill_file_get", + label: "Get Skill Companion File", + description: + "Read the content of a specific companion file (script, reference) from a skill. " + + "Use after skill_files to retrieve a script or reference document. " + + "Pass the relative path like 'scripts/deploy.sh' or 'references/api-notes.md'.", + parameters: Type.Object({ + skillId: Type.String({ description: "The skill_id" }), + path: Type.String({ description: "Relative path within the skill, e.g. 'scripts/deploy.sh'" }), + }), + execute: trackTool("skill_file_get", async (_toolCallId: any, params: any) => { + const { skillId, path: filePath } = params as { skillId: string; path: string }; + ctx.log.debug(`skill_file_get called for skill=${skillId} path=${filePath}`); + + const result = skillInstaller.readCompanionFile(skillId, filePath); + if ("error" in result) { + return { + content: [{ type: "text", text: `Error: ${result.error}` }], + details: result, + }; + } + + const ext = filePath.split(".").pop() || ""; + const lang = { sh: "bash", py: "python", ts: "typescript", js: "javascript", json: "json", md: "markdown", yml: "yaml", yaml: "yaml" }[ext] || ""; + + return { + content: [{ type: "text", text: `## ${filePath}\n\n\`\`\`${lang}\n${result.content}\n\`\`\`` }], + details: { path: filePath, size: result.size }, + }; + }), + }, + { name: "skill_file_get" }, + ); + // ─── Tool: memory_viewer ─── const viewerPort = (pluginCfg as any).viewerPort ?? 18799; @@ -1671,10 +1819,40 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, const result = await engine.search({ query, maxResults: 10, minScore: 0.45, ownerFilter: recallOwnerFilter }); if (result.hits.length === 0) { - ctx.log.debug("auto-recall: no candidates found"); + ctx.log.debug("auto-recall: no memory candidates found"); const dur = performance.now() - recallT0; store.recordToolCall("memory_search", dur, true); store.recordApiLog("memory_search", { type: "auto_recall", query }, JSON.stringify({ candidates: [], filtered: [] }), dur, true); + + // Even without memory hits, try skill recall + const skillAutoRecallEarly = ctx.config.skillEvolution?.autoRecallSkills ?? DEFAULTS.skillAutoRecall; + if (skillAutoRecallEarly) { + try { + const skillLimit = ctx.config.skillEvolution?.autoRecallSkillLimit ?? DEFAULTS.skillAutoRecallLimit; + const skillHits = await engine.searchSkills(query, "mix" as any, getCurrentOwner()); + const topSkills = skillHits.slice(0, skillLimit); + if (topSkills.length > 0) { + const skillLines = topSkills.map((sc, i) => { + const manifest = skillInstaller.getCompanionManifest(sc.skillId); + let badge = ""; + if (manifest?.installed) badge = " [installed]"; + else if (manifest?.installMode === "install_recommended") badge = " [has scripts, install recommended]"; + else if (manifest?.hasCompanionFiles) badge = " [has companion files]"; + return `${i + 1}. **${sc.name}**${badge} — ${sc.description.slice(0, 200)}\n → call \`skill_get(skillId="${sc.skillId}")\` for the full guide`; + }); + const skillContext = "## Relevant skills from past experience\n\n" + + "No direct memory matches were found, but these skills from past tasks may help:\n\n" + + skillLines.join("\n\n") + + "\n\nYou SHOULD call `skill_get` to retrieve the full guide before attempting the task."; + ctx.log.info(`auto-recall-skill (no-memory path): injecting ${topSkills.length} skill(s)`); + try { store.recordApiLog("skill_search", { type: "auto_recall_skill", query }, JSON.stringify(topSkills), dur, true); } catch { /* best-effort */ } + return { prependContext: skillContext }; + } + } catch (err) { + ctx.log.debug(`auto-recall-skill (no-memory path): failed: ${err}`); + } + } + if (query.length > 50) { const noRecallHint = "## Memory system — ACTION REQUIRED\n\n" + @@ -1764,6 +1942,75 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, lines.join("\n\n"), ]; if (tipsText) contextParts.push(tipsText); + + // ─── Skill auto-recall ─── + const skillAutoRecall = ctx.config.skillEvolution?.autoRecallSkills ?? DEFAULTS.skillAutoRecall; + const skillLimit = ctx.config.skillEvolution?.autoRecallSkillLimit ?? DEFAULTS.skillAutoRecallLimit; + let skillSection = ""; + + if (skillAutoRecall) { + try { + const skillCandidateMap = new Map(); + + // Source 1: direct skill search based on user query + try { + const directSkillHits = await engine.searchSkills(query, "mix" as any, getCurrentOwner()); + for (const sh of directSkillHits.slice(0, skillLimit + 2)) { + if (!skillCandidateMap.has(sh.skillId)) { + skillCandidateMap.set(sh.skillId, { name: sh.name, description: sh.description, skillId: sh.skillId, source: "query" }); + } + } + } catch (err) { + ctx.log.debug(`auto-recall-skill: direct search failed: ${err}`); + } + + // Source 2: skills linked to tasks from memory hits + const taskIds = new Set(); + for (const h of filteredHits) { + if (h.taskId) { + const t = store.getTask(h.taskId); + if (t && t.status !== "skipped") taskIds.add(h.taskId); + } + } + for (const tid of taskIds) { + const linked = store.getSkillsByTask(tid); + for (const rs of linked) { + if (!skillCandidateMap.has(rs.skill.id)) { + skillCandidateMap.set(rs.skill.id, { name: rs.skill.name, description: rs.skill.description, skillId: rs.skill.id, source: `task:${tid}` }); + } + } + } + + const skillCandidates = [...skillCandidateMap.values()].slice(0, skillLimit); + + if (skillCandidates.length > 0) { + const skillLines = skillCandidates.map((sc, i) => { + const manifest = skillInstaller.getCompanionManifest(sc.skillId); + let badge = ""; + if (manifest?.installed) badge = " [installed]"; + else if (manifest?.installMode === "install_recommended") badge = " [has scripts, install recommended]"; + else if (manifest?.hasCompanionFiles) badge = " [has companion files]"; + const action = `call \`skill_get(skillId="${sc.skillId}")\``; + return `${i + 1}. **${sc.name}**${badge} — ${sc.description.slice(0, 200)}\n → ${action}`; + }); + skillSection = "\n\n## Relevant skills from past experience\n\n" + + "The following skills were distilled from similar previous tasks. " + + "You SHOULD call `skill_get` to retrieve the full guide before attempting the task.\n\n" + + skillLines.join("\n\n"); + + ctx.log.info(`auto-recall-skill: injecting ${skillCandidates.length} skill(s): ${skillCandidates.map(s => s.name).join(", ")}`); + try { + store.recordApiLog("skill_search", { type: "auto_recall_skill", query }, JSON.stringify(skillCandidates), performance.now() - recallT0, true); + } catch { /* best-effort */ } + } else { + ctx.log.debug("auto-recall-skill: no matching skills found"); + } + } catch (err) { + ctx.log.debug(`auto-recall-skill: failed: ${err}`); + } + } + + if (skillSection) contextParts.push(skillSection); const context = contextParts.join("\n"); const recallDur = performance.now() - recallT0; @@ -1774,7 +2021,7 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, }), recallDur, true); telemetry.trackAutoRecall(filteredHits.length, recallDur); - ctx.log.info(`auto-recall: returning prependContext (${context.length} chars), sufficient=${sufficient}`); + ctx.log.info(`auto-recall: returning prependContext (${context.length} chars), sufficient=${sufficient}, skills=${skillSection ? "yes" : "no"}`); if (!sufficient) { const searchHint = diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 1b49ce5bd..5523616f2 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.10", + "version": "1.0.4-beta.12", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts index c05d3430b..3d7bbb4d9 100644 --- a/apps/memos-local-openclaw/src/client/connector.ts +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -10,6 +10,7 @@ export interface HubSessionInfo { userToken: string; role: UserRole; connectedAt: number; + identityKey?: string; } export interface HubStatusInfo { @@ -54,6 +55,8 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, userToken: result.userToken, role: "member", connectedAt: Date.now(), + identityKey: persisted.identityKey || "", + lastKnownStatus: "active", }); return store.getClientHubConnection()!; } @@ -63,6 +66,12 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, if (result.status === "rejected") { throw new Error("Join request was rejected by the Hub admin."); } + if (result.status === "blocked") { + throw new Error("Your account has been blocked by the Hub admin."); + } + if (result.status === "left" || result.status === "removed") { + log.info(`User status is "${result.status}", will try to rejoin.`); + } } catch (err) { if (err instanceof PendingApprovalError) throw err; log.warn(`registration-status check failed, falling back to autoJoinHub: ${err}`); @@ -78,6 +87,7 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, const hubUrl = normalizeHubUrl(hubAddress); const me = await hubRequestJson(hubUrl, userToken, "/api/v1/hub/me", { method: "GET" }) as any; + const persisted = store.getClientHubConnection(); store.setClientHubConnection({ hubUrl, userId: String(me.id), @@ -85,6 +95,8 @@ export async function connectToHub(store: SqliteStore, config: MemosLocalConfig, userToken, role: String(me.role ?? "member") as UserRole, connectedAt: Date.now(), + identityKey: persisted?.identityKey || String(me.identityKey ?? ""), + lastKnownStatus: "active", }); return store.getClientHubConnection()!; } @@ -95,9 +107,13 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) const hubAddress = conn?.hubUrl || (configHubAddress ? normalizeHubUrl(configHubAddress) : ""); const userToken = conn?.userToken || config.sharing?.client?.userToken || ""; - // If DB has a connection to a different Hub than config, the DB data is stale if (conn && configHubAddress && conn.hubUrl && normalizeHubUrl(configHubAddress) !== conn.hubUrl) { - store.clearClientHubConnection(); + store.setClientHubConnection({ + ...conn, + hubUrl: normalizeHubUrl(configHubAddress), + userToken: "", + lastKnownStatus: "hub_changed", + }); return { connected: false, user: null }; } @@ -129,6 +145,8 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) userToken: result.userToken, role: "member", connectedAt: Date.now(), + identityKey: conn.identityKey || "", + lastKnownStatus: "active", }); const me = await hubRequestJson(normalizeHubUrl(hubAddress), result.userToken, "/api/v1/hub/me", { method: "GET" }) as any; return { @@ -169,12 +187,10 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) const latestRole = String(me.role ?? "member") as UserRole; if (conn && (conn.username !== latestUsername || conn.role !== latestRole)) { store.setClientHubConnection({ - hubUrl: conn.hubUrl, - userId: conn.userId, + ...conn, username: latestUsername, - userToken: conn.userToken, role: latestRole, - connectedAt: conn.connectedAt, + lastKnownStatus: "active", }); } return { @@ -185,12 +201,17 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) username: latestUsername, role: latestRole, status: String(me.status ?? "active"), + groups: Array.isArray(me.groups) ? me.groups : [], }, }; } catch (err: any) { const is401 = typeof err?.message === "string" && err.message.includes("(401)"); if (is401 && conn) { - store.clearClientHubConnection(); + store.setClientHubConnection({ + ...conn, + userToken: "", + lastKnownStatus: "removed", + }); return { connected: false, hubUrl: normalizeHubUrl(hubAddress), @@ -232,12 +253,17 @@ export async function autoJoinHub( } } + const persisted = store.getClientHubConnection(); + const existingIdentityKey = persisted?.identityKey || ""; + log.info(`Joining Hub at ${hubUrl} as "${username}"...`); const result = await hubRequestJson(hubUrl, "", "/api/v1/hub/join", { method: "POST", - body: JSON.stringify({ teamToken, username, deviceName: hostname, clientIp }), + body: JSON.stringify({ teamToken, username, deviceName: hostname, clientIp, identityKey: existingIdentityKey }), }) as any; + const returnedIdentityKey = String(result.identityKey || existingIdentityKey || ""); + if (result.status === "pending") { log.info(`Join request submitted, awaiting admin approval. userId=${result.userId}`); store.setClientHubConnection({ @@ -247,6 +273,8 @@ export async function autoJoinHub( userToken: "", role: "member", connectedAt: Date.now(), + identityKey: returnedIdentityKey, + lastKnownStatus: "pending", }); throw new PendingApprovalError(result.userId); } @@ -255,6 +283,10 @@ export async function autoJoinHub( throw new Error(`Join request was rejected by the Hub admin.`); } + if (result.status === "blocked") { + throw new Error(`Your account has been blocked by the Hub admin.`); + } + if (!result.userToken) { throw new Error(`Hub join failed: ${JSON.stringify(result)}`); } @@ -267,6 +299,8 @@ export async function autoJoinHub( userToken: result.userToken, role: "member", connectedAt: Date.now(), + identityKey: returnedIdentityKey, + lastKnownStatus: "active", }); return store.getClientHubConnection()!; } diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index c2a99fca4..a046e406c 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -219,46 +219,70 @@ export class HubServer { || (req.headers["x-client-ip"] as string)?.trim() || (req.headers["x-forwarded-for"] as string)?.split(",")[0]?.trim() || req.socket.remoteAddress || ""; - const existingUsers = this.opts.store.listHubUsers(); - const existingUser = existingUsers.find(u => u.username === username); + const identityKey = typeof body.identityKey === "string" ? body.identityKey.trim() : ""; + + let existingUser = identityKey + ? this.userManager.findByIdentityKey(identityKey) + : null; + if (!existingUser) { + const existingUsers = this.opts.store.listHubUsers(); + existingUser = existingUsers.find(u => u.username === username && u.status !== "left" && u.status !== "removed") ?? null; + } + if (existingUser) { try { this.opts.store.updateHubUserActivity(existingUser.id, joinIp); } catch { /* best-effort */ } + if (existingUser.status === "active") { const token = issueUserToken( { userId: existingUser.id, username: existingUser.username, role: existingUser.role, status: "active" }, this.authSecret, ); this.userManager.approveUser(existingUser.id, token); - return this.json(res, 200, { status: "active", userId: existingUser.id, userToken: token }); + if (identityKey && !existingUser.identityKey) { + this.opts.store.upsertHubUser({ ...existingUser, identityKey }); + } + return this.json(res, 200, { status: "active", userId: existingUser.id, userToken: token, identityKey: existingUser.identityKey || identityKey }); } if (existingUser.status === "pending") { this.notifyAdmins("user_join_request", "user", username, "", { dedup: true }); - return this.json(res, 200, { status: "pending", userId: existingUser.id }); + return this.json(res, 200, { status: "pending", userId: existingUser.id, identityKey: existingUser.identityKey || identityKey }); } if (existingUser.status === "rejected") { if (body.reapply === true) { this.userManager.resetToPending(existingUser.id); this.notifyAdmins("user_join_request", "user", username, ""); this.opts.log.info(`Hub: rejected user "${username}" (${existingUser.id}) re-applied, reset to pending`); - return this.json(res, 200, { status: "pending", userId: existingUser.id }); + return this.json(res, 200, { status: "pending", userId: existingUser.id, identityKey: existingUser.identityKey || identityKey }); } return this.json(res, 200, { status: "rejected", userId: existingUser.id }); } if (existingUser.status === "removed") { - this.userManager.resetToPending(existingUser.id); - this.notifyAdmins("user_join_request", "user", username, ""); - this.opts.log.info(`Hub: removed user "${username}" (${existingUser.id}) re-applied, reset to pending`); - return this.json(res, 200, { status: "pending", userId: existingUser.id }); + this.userManager.rejoinUser(existingUser.id); + this.notifyAdmins("user_join_request", "user", username, "", { dedup: true }); + this.opts.log.info(`Hub: removed user "${username}" (${existingUser.id}) re-applied via rejoin, reset to pending`); + return this.json(res, 200, { status: "pending", userId: existingUser.id, identityKey: existingUser.identityKey || identityKey }); + } + if (existingUser.status === "left") { + this.userManager.rejoinUser(existingUser.id); + this.notifyAdmins("user_join_request", "user", username, "", { dedup: true }); + this.opts.log.info(`Hub: left user "${username}" (${existingUser.id}) re-applied via rejoin, reset to pending`); + return this.json(res, 200, { status: "pending", userId: existingUser.id, identityKey: existingUser.identityKey || identityKey }); + } + if (existingUser.status === "blocked") { + return this.json(res, 200, { status: "blocked", userId: existingUser.id }); } } + + const generatedIdentityKey = identityKey || randomUUID(); const user = this.userManager.createPendingUser({ username, deviceName: typeof body.deviceName === "string" ? body.deviceName : undefined, + identityKey: generatedIdentityKey, }); try { this.opts.store.updateHubUserActivity(user.id, joinIp); } catch { /* best-effort */ } this.opts.log.info(`Hub: user "${username}" (${user.id}) registered as pending, awaiting admin approval`); this.notifyAdmins("user_join_request", "user", username, ""); - return this.json(res, 200, { status: "pending", userId: user.id }); + return this.json(res, 200, { status: "pending", userId: user.id, identityKey: generatedIdentityKey }); } if (req.method === "POST" && routePath === "/api/v1/hub/registration-status") { @@ -276,6 +300,15 @@ export class HubServer { if (user.status === "rejected") { return this.json(res, 200, { status: "rejected" }); } + if (user.status === "blocked") { + return this.json(res, 200, { status: "blocked" }); + } + if (user.status === "left") { + return this.json(res, 200, { status: "left" }); + } + if (user.status === "removed") { + return this.json(res, 200, { status: "removed" }); + } if (user.status === "active") { const token = issueUserToken( { userId: user.id, username: user.username, role: user.role, status: user.status }, @@ -300,12 +333,10 @@ export class HubServer { } if (req.method === "POST" && routePath === "/api/v1/hub/leave") { - try { - this.opts.store.updateHubUserActivity(auth.userId, "", 0); - } catch { /* best-effort */ } + this.userManager.markUserLeft(auth.userId); this.knownOnlineUsers.delete(auth.userId); this.notifyAdmins("user_offline", "user", auth.username, auth.userId); - this.opts.log.info(`Hub: user "${auth.username}" (${auth.userId}) left voluntarily`); + this.opts.log.info(`Hub: user "${auth.username}" (${auth.userId}) left voluntarily, status set to "left"`); return this.json(res, 200, { ok: true }); } @@ -346,18 +377,33 @@ export class HubServer { if (req.method === "POST" && routePath === "/api/v1/hub/admin/approve-user") { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const body = await this.readJson(req); - const token = issueUserToken({ userId: String(body.userId), username: String(body.username || ""), role: "member", status: "active" }, this.authSecret); - const approved = this.userManager.approveUser(String(body.userId), token); + const userId = String(body.userId); + const username = String(body.username || ""); + const token = issueUserToken({ userId, username, role: "member", status: "active" }, this.authSecret); + const approved = this.userManager.approveUser(userId, token); if (!approved) return this.json(res, 404, { error: "not_found" }); - try { this.opts.store.updateHubUserActivity(String(body.userId), ""); } catch { /* best-effort */ } + try { this.opts.store.updateHubUserActivity(userId, ""); } catch { /* best-effort */ } + try { + this.opts.store.insertHubNotification({ + id: randomUUID(), userId, type: "membership_approved", + resource: "user", title: `Your request to join team "${this.teamName}" has been approved. Welcome!`, + }); + } catch { /* best-effort */ } return this.json(res, 200, { status: "active", token }); } if (req.method === "POST" && routePath === "/api/v1/hub/admin/reject-user") { if (auth.role !== "admin") return this.json(res, 403, { error: "forbidden" }); const body = await this.readJson(req); - const rejected = this.userManager.rejectUser(String(body.userId)); + const userId = String(body.userId); + const rejected = this.userManager.rejectUser(userId); if (!rejected) return this.json(res, 404, { error: "not_found" }); + try { + this.opts.store.insertHubNotification({ + id: randomUUID(), userId, type: "membership_rejected", + resource: "user", title: `Your request to join team "${this.teamName}" has been declined.`, + }); + } catch { /* best-effort */ } return this.json(res, 200, { status: "rejected" }); } diff --git a/apps/memos-local-openclaw/src/hub/user-manager.ts b/apps/memos-local-openclaw/src/hub/user-manager.ts index 862ff3aa9..27fa50209 100644 --- a/apps/memos-local-openclaw/src/hub/user-manager.ts +++ b/apps/memos-local-openclaw/src/hub/user-manager.ts @@ -4,13 +4,24 @@ import type { Logger } from "../types"; import type { UserInfo } from "../sharing/types"; import type { SqliteStore } from "../storage/sqlite"; -type ManagedHubUser = UserInfo & { tokenHash: string; createdAt: number; approvedAt: number | null; lastIp: string; lastActiveAt: number | null }; +type ManagedHubUser = UserInfo & { + tokenHash: string; + createdAt: number; + approvedAt: number | null; + lastIp: string; + lastActiveAt: number | null; + identityKey?: string; + leftAt?: number | null; + removedAt?: number | null; + rejectedAt?: number | null; + rejoinRequestedAt?: number | null; +}; export class HubUserManager { constructor(private store: SqliteStore, private log: Logger) {} - createPendingUser(input: { username: string; deviceName?: string }): ManagedHubUser { - const user = { + createPendingUser(input: { username: string; deviceName?: string; identityKey?: string }): ManagedHubUser { + const user: ManagedHubUser = { id: randomUUID(), username: input.username, deviceName: input.deviceName, @@ -22,11 +33,36 @@ export class HubUserManager { approvedAt: null, lastIp: "", lastActiveAt: null, + identityKey: input.identityKey || "", }; this.store.upsertHubUser(user); return user; } + findByIdentityKey(identityKey: string): ManagedHubUser | null { + if (!identityKey) return null; + return this.store.findHubUserByIdentityKey(identityKey); + } + + markUserLeft(userId: string): boolean { + this.log.info(`Hub: user "${userId}" marked as left`); + return this.store.markHubUserLeft(userId); + } + + rejoinUser(userId: string): ManagedHubUser | null { + const user = this.store.getHubUser(userId); + if (!user) return null; + const updated: ManagedHubUser = { + ...user, + status: "pending" as const, + tokenHash: "", + rejoinRequestedAt: Date.now(), + }; + this.store.upsertHubUser(updated); + this.log.info(`Hub: user "${userId}" (${user.username}) requested rejoin, previous status: ${user.status}`); + return updated; + } + listPendingUsers(): ManagedHubUser[] { return this.store.listHubUsers("pending"); } @@ -105,7 +141,7 @@ export class HubUserManager { isUsernameTaken(username: string, excludeUserId?: string): boolean { const users = this.store.listHubUsers(); - return users.some(u => u.username === username && u.id !== excludeUserId); + return users.some(u => u.username === username && u.id !== excludeUserId && u.status !== "left" && u.status !== "removed"); } updateUsername(userId: string, newUsername: string): ManagedHubUser | null { @@ -119,10 +155,10 @@ export class HubUserManager { rejectUser(userId: string): ManagedHubUser | null { const user = this.store.getHubUser(userId); if (!user) return null; - const updated = { + const updated: ManagedHubUser = { ...user, status: "rejected" as const, - approvedAt: Date.now(), + rejectedAt: Date.now(), }; this.store.upsertHubUser(updated); return updated; diff --git a/apps/memos-local-openclaw/src/ingest/chunker.ts b/apps/memos-local-openclaw/src/ingest/chunker.ts index 2de7630e7..b42fe7919 100644 --- a/apps/memos-local-openclaw/src/ingest/chunker.ts +++ b/apps/memos-local-openclaw/src/ingest/chunker.ts @@ -1,6 +1,8 @@ +export type ChunkKind = "paragraph" | "code_block" | "error_stack" | "list" | "command"; + export interface RawChunk { content: string; - kind: "paragraph"; + kind: ChunkKind; } const MAX_CHUNK_CHARS = 3000; @@ -28,21 +30,25 @@ const COMMAND_LINE_RE = /^(?:\$|>|#)\s+.+$/gm; */ export function chunkText(text: string): RawChunk[] { let remaining = text; - const slots: Array<{ placeholder: string; content: string }> = []; + const slots: Array<{ placeholder: string; content: string; kind: ChunkKind }> = []; let counter = 0; - function ph(content: string): string { + function ph(content: string, kind: ChunkKind = "paragraph"): string { const tag = `\x00SLOT_${counter++}\x00`; - slots.push({ placeholder: tag, content: content.trim() }); + slots.push({ placeholder: tag, content: content.trim(), kind }); return tag; } - remaining = remaining.replace(FENCED_CODE_RE, (m) => ph(m)); + remaining = remaining.replace(FENCED_CODE_RE, (m) => ph(m, "code_block")); remaining = extractBraceBlocks(remaining, ph); - const structural: RegExp[] = [ERROR_STACK_RE, LIST_BLOCK_RE, COMMAND_LINE_RE]; - for (const re of structural) { - remaining = remaining.replace(re, (m) => ph(m)); + const structuralKinds: Array<[RegExp, ChunkKind]> = [ + [ERROR_STACK_RE, "error_stack"], + [LIST_BLOCK_RE, "list"], + [COMMAND_LINE_RE, "command"], + ]; + for (const [re, kind] of structuralKinds) { + remaining = remaining.replace(re, (m) => ph(m, kind)); } const raw: RawChunk[] = []; @@ -57,7 +63,7 @@ export function chunkText(text: string): RawChunk[] { for (const part of parts) { const slot = slots.find((s) => s.placeholder === part); if (slot) { - raw.push({ content: slot.content, kind: "paragraph" }); + raw.push({ content: slot.content, kind: slot.kind }); } else if (part.trim().length >= MIN_CHUNK_CHARS) { raw.push({ content: part.trim(), kind: "paragraph" }); } @@ -69,7 +75,7 @@ export function chunkText(text: string): RawChunk[] { for (const s of slots) { if (!raw.some((c) => c.content === s.content)) { - raw.push({ content: s.content, kind: "paragraph" }); + raw.push({ content: s.content, kind: s.kind }); } } @@ -85,7 +91,7 @@ export function chunkText(text: string): RawChunk[] { */ function extractBraceBlocks( text: string, - ph: (content: string) => string, + ph: (content: string, kind?: ChunkKind) => string, ): string { const lines = text.split("\n"); const result: string[] = []; @@ -119,7 +125,7 @@ function extractBraceBlocks( if (depth <= 0 || (BLOCK_CLOSE_RE.test(line) && depth <= 0)) { const block = blockLines.join("\n"); if (block.trim().length >= MIN_CHUNK_CHARS) { - result.push(ph(block)); + result.push(ph(block, "code_block")); } else { result.push(block); } @@ -135,7 +141,7 @@ function extractBraceBlocks( if (blockLines.length > 0) { const block = blockLines.join("\n"); if (block.trim().length >= MIN_CHUNK_CHARS) { - result.push(ph(block)); + result.push(ph(block, "code_block")); } else { result.push(block); } diff --git a/apps/memos-local-openclaw/src/recall/engine.ts b/apps/memos-local-openclaw/src/recall/engine.ts index 7d6a34233..59ab30c10 100644 --- a/apps/memos-local-openclaw/src/recall/engine.ts +++ b/apps/memos-local-openclaw/src/recall/engine.ts @@ -74,11 +74,14 @@ export class RecallEngine { score: 1 / (i + 1), })); - // Step 1c: Hub memories search (when sharing is enabled and hub_memories exist) + // Step 1c: Hub memories search — only in Hub mode where local DB owns the + // hub_memories data and embeddings were generated by the same Embedder. + // Client mode must use remote API (hubSearchMemories) to avoid cross-model + // embedding mismatch. let hubMemFtsRanked: Array<{ id: string; score: number }> = []; let hubMemVecRanked: Array<{ id: string; score: number }> = []; let hubMemPatternRanked: Array<{ id: string; score: number }> = []; - if (query && this.ctx.config.sharing?.enabled) { + if (query && this.ctx.config.sharing?.enabled && this.ctx.config.sharing.role === "hub") { try { const hubFtsHits = this.store.searchHubMemories(query, { maxResults: candidatePool }); hubMemFtsRanked = hubFtsHits.map(({ hit }, i) => ({ diff --git a/apps/memos-local-openclaw/src/sharing/types.ts b/apps/memos-local-openclaw/src/sharing/types.ts index 2b4d0a7f6..aa97a5de1 100644 --- a/apps/memos-local-openclaw/src/sharing/types.ts +++ b/apps/memos-local-openclaw/src/sharing/types.ts @@ -12,7 +12,7 @@ import type { export type HubScope = "local" | "group" | "all"; export type SharedVisibility = "group" | "public"; export type UserRole = "admin" | "member"; -export type UserStatus = "pending" | "active" | "blocked" | "rejected" | "removed"; +export type UserStatus = "pending" | "active" | "blocked" | "rejected" | "removed" | "left"; export type { ClientModeConfig, HubModeConfig, SharingCapabilities, SharingConfig, SharingRole }; diff --git a/apps/memos-local-openclaw/src/skill/evolver.ts b/apps/memos-local-openclaw/src/skill/evolver.ts index 80f723a9b..42516e8b0 100644 --- a/apps/memos-local-openclaw/src/skill/evolver.ts +++ b/apps/memos-local-openclaw/src/skill/evolver.ts @@ -9,7 +9,7 @@ import { DEFAULTS } from "../types"; import { SkillEvaluator } from "./evaluator"; import { SkillGenerator } from "./generator"; import { SkillUpgrader } from "./upgrader"; -import { SkillInstaller } from "./installer"; +import { SkillInstaller, type SkillInstallMode } from "./installer"; import { buildSkillConfigChain, callLLMWithFallback } from "../shared/llm-call"; export type SkillEvolvedCallback = (skillName: string, upgradeType: "created" | "upgraded") => void; @@ -96,10 +96,19 @@ export class SkillEvolver { return; } + const preferUpgrade = this.ctx.config.skillEvolution?.preferUpgradeExisting ?? DEFAULTS.skillPreferUpgrade; const relatedSkill = await this.findRelatedSkill(task); if (relatedSkill) { await this.handleExistingSkill(task, chunks, relatedSkill); + } else if (preferUpgrade) { + const nameCandidate = await this.findSkillByNameSimilarity(task); + if (nameCandidate) { + this.ctx.log.info(`SkillEvolver: preferUpgrade found name-similar skill "${nameCandidate.name}" for task "${task.title}"`); + await this.handleExistingSkill(task, chunks, nameCandidate); + } else { + await this.handleNewSkill(task, chunks); + } } else { await this.handleNewSkill(task, chunks); } @@ -281,7 +290,11 @@ Use selectedIndex 0 when none is highly relevant.`; if (upgraded) { this.store.linkTaskSkill(task.id, freshSkill.id, "evolved_from", freshSkill.version + 1); - this.installer.syncIfInstalled(freshSkill.name); + if (freshSkill.installed) { + this.installer.syncIfInstalled(freshSkill.name); + } else { + this.autoInstallIfNeeded(freshSkill); + } this.onSkillEvolved?.(freshSkill.name, "upgraded"); } else { this.store.linkTaskSkill(task.id, freshSkill.id, "applied_to", freshSkill.version); @@ -304,6 +317,13 @@ Use selectedIndex 0 when none is highly relevant.`; const evalResult = await this.evaluator.evaluateCreate(task); if (evalResult.shouldGenerate && evalResult.confidence >= minConfidence) { + const existingByName = this.store.getSkillByName(evalResult.suggestedName); + if (existingByName && (existingByName.status === "active" || existingByName.status === "draft")) { + this.ctx.log.info(`SkillEvolver: skill "${evalResult.suggestedName}" already exists, redirecting to upgrade instead of create`); + await this.handleExistingSkill(task, chunks, existingByName); + return; + } + this.ctx.log.info(`SkillEvolver: generating new skill "${evalResult.suggestedName}" — ${evalResult.reason}`); this.store.setTaskSkillMeta(task.id, { skillStatus: "generating", skillReason: evalResult.reason }); @@ -313,10 +333,7 @@ Use selectedIndex 0 when none is highly relevant.`; this.store.setTaskSkillMeta(task.id, { skillStatus: "generated", skillReason: evalResult.reason }); this.onSkillEvolved?.(skill.name, "created"); - const autoInstall = this.ctx.config.skillEvolution?.autoInstall ?? DEFAULTS.skillAutoInstall; - if (autoInstall && skill.status === "active") { - this.installer.install(skill.id); - } + this.autoInstallIfNeeded(skill); } else { const reason = evalResult.reason || `confidence不足 (${evalResult.confidence} < ${minConfidence})`; this.ctx.log.debug(`SkillEvolver: task "${task.title}" not worth generating skill — ${reason}`); @@ -331,6 +348,41 @@ Use selectedIndex 0 when none is highly relevant.`; this.ctx.log.debug(`SkillEvolver: marked ${chunks.length} chunks with skill_id=${skillId}`); } + private async findSkillByNameSimilarity(task: Task): Promise { + const query = task.title.slice(0, 200); + const owner = task.owner ?? "agent:main"; + + try { + const ftsHits = this.store.skillFtsSearch(query, 5, "mix", owner); + for (const hit of ftsHits) { + if (hit.score < 0.5) continue; + const skill = this.store.getSkill(hit.skillId); + if (skill && (skill.status === "active" || skill.status === "draft")) { + return skill; + } + } + } catch { /* best-effort */ } + + return null; + } + + private autoInstallIfNeeded(skill: Skill): void { + if (skill.status !== "active") return; + + const explicitAutoInstall = this.ctx.config.skillEvolution?.autoInstall ?? DEFAULTS.skillAutoInstall; + if (explicitAutoInstall) { + this.installer.install(skill.id); + this.ctx.log.info(`SkillEvolver: auto-installed "${skill.name}" (explicit autoInstall=true)`); + return; + } + + const manifest = SkillInstaller.buildManifest(skill.dirPath, !!skill.installed, skill.name); + if (manifest.installMode === "install_recommended") { + this.installer.install(skill.id); + this.ctx.log.info(`SkillEvolver: auto-installed "${skill.name}" (install_recommended: ${manifest.scriptsCount} scripts, ${Math.round(manifest.totalSize / 1024)}KB)`); + } + } + private readSkillContent(skill: Skill): string | null { const filePath = path.join(skill.dirPath, "SKILL.md"); try { diff --git a/apps/memos-local-openclaw/src/skill/generator.ts b/apps/memos-local-openclaw/src/skill/generator.ts index fa75d6a8c..89cf1e2e6 100644 --- a/apps/memos-local-openclaw/src/skill/generator.ts +++ b/apps/memos-local-openclaw/src/skill/generator.ts @@ -484,14 +484,55 @@ export class SkillGenerator { private buildConversationText(chunks: Chunk[]): string { const lines: string[] = []; + const redact = this.ctx.config.skillEvolution?.redactSensitiveInSkill ?? true; + for (const c of chunks) { - if (c.role !== "user" && c.role !== "assistant") continue; - const roleLabel = c.role === "user" ? "User" : "Assistant"; - lines.push(`[${roleLabel}]: ${c.content}`); + let roleLabel: string; + switch (c.role) { + case "user": roleLabel = "User"; break; + case "assistant": roleLabel = "Assistant"; break; + case "tool": roleLabel = "Tool"; break; + case "system": roleLabel = "System"; break; + default: continue; + } + + let content = c.content; + if (c.role === "system") continue; + + if (c.role === "tool") { + content = this.truncateToolOutput(content); + } + + if (redact) { + content = SkillGenerator.redactSensitive(content); + } + + lines.push(`[${roleLabel}]: ${content}`); } return lines.join("\n\n"); } + private truncateToolOutput(content: string): string { + const MAX_TOOL_OUTPUT = 1500; + if (content.length <= MAX_TOOL_OUTPUT) return content; + const head = content.slice(0, MAX_TOOL_OUTPUT * 0.6); + const tail = content.slice(-MAX_TOOL_OUTPUT * 0.3); + return `${head}\n... (truncated ${content.length - MAX_TOOL_OUTPUT} chars) ...\n${tail}`; + } + + static redactSensitive(text: string): string { + let result = text; + result = result.replace(/\bsk-[a-zA-Z0-9]{20,}\b/g, "sk-***REDACTED***"); + result = result.replace(/\bBearer\s+[a-zA-Z0-9_\-.]{20,}\b/g, "Bearer ***REDACTED***"); + result = result.replace(/\bAKIA[0-9A-Z]{16}\b/g, "AKIA***REDACTED***"); + result = result.replace(/(api[_-]?key|secret|token|password|credential)\s*[:=]\s*["']([^"']{8,})["']/gi, + (match, key) => `${key}="***REDACTED***"`); + result = result.replace(/\/Users\/[a-zA-Z0-9._-]+\//g, "/Users/****/"); + result = result.replace(/\/home\/[a-zA-Z0-9._-]+\//g, "/home/****/"); + result = result.replace(/C:\\Users\\[a-zA-Z0-9._-]+\\/g, "C:\\Users\\****\\"); + return result; + } + private parseDescription(content: string): string { const match = content.match(/description:\s*"([^"]+)"/); if (match) return match[1]; @@ -499,6 +540,4 @@ export class SkillGenerator { if (match2) return match2[1]; return ""; } - - } diff --git a/apps/memos-local-openclaw/src/skill/installer.ts b/apps/memos-local-openclaw/src/skill/installer.ts index e92b48e11..ea0d2fa78 100644 --- a/apps/memos-local-openclaw/src/skill/installer.ts +++ b/apps/memos-local-openclaw/src/skill/installer.ts @@ -3,6 +3,26 @@ import * as path from "path"; import type { SqliteStore } from "../storage/sqlite"; import type { PluginContext } from "../types"; +export type SkillInstallMode = "inline" | "on_demand" | "install_recommended"; + +export interface CompanionFileInfo { + relativePath: string; + size: number; + type: "script" | "reference" | "eval" | "other"; +} + +export interface SkillCompanionManifest { + hasCompanionFiles: boolean; + installMode: SkillInstallMode; + installed: boolean; + installedPath?: string; + files: CompanionFileInfo[]; + totalSize: number; + scriptsCount: number; + referencesCount: number; + evalsCount: number; +} + export class SkillInstaller { private workspaceSkillsDir: string; @@ -13,6 +33,82 @@ export class SkillInstaller { this.workspaceSkillsDir = path.join(ctx.workspaceDir, "skills"); } + getCompanionManifest(skillId: string): SkillCompanionManifest | null { + const skill = this.store.getSkill(skillId); + if (!skill) return null; + return SkillInstaller.buildManifest(skill.dirPath, !!skill.installed, skill.name, this.workspaceSkillsDir); + } + + static buildManifest(dirPath: string, installed: boolean, skillName: string, workspaceSkillsDir?: string): SkillCompanionManifest { + const files: CompanionFileInfo[] = []; + + const scanDir = (subDir: string, type: CompanionFileInfo["type"]) => { + const fullDir = path.join(dirPath, subDir); + if (!fs.existsSync(fullDir)) return; + try { + for (const f of fs.readdirSync(fullDir)) { + const fp = path.join(fullDir, f); + try { + const stat = fs.statSync(fp); + if (stat.isFile()) { + files.push({ relativePath: `${subDir}/${f}`, size: stat.size, type }); + } + } catch { /* best-effort */ } + } + } catch { /* best-effort */ } + }; + + scanDir("scripts", "script"); + scanDir("references", "reference"); + scanDir("evals", "eval"); + + const scriptsCount = files.filter(f => f.type === "script").length; + const referencesCount = files.filter(f => f.type === "reference").length; + const evalsCount = files.filter(f => f.type === "eval").length; + const totalSize = files.reduce((sum, f) => sum + f.size, 0); + const hasCompanionFiles = files.filter(f => f.type !== "eval").length > 0; + + let installMode: SkillInstallMode = "inline"; + if (hasCompanionFiles) { + const executableScripts = files.filter(f => f.type === "script"); + const largeFiles = files.filter(f => f.size > 5000); + if (executableScripts.length >= 3 || largeFiles.length >= 2 || totalSize > 20000) { + installMode = "install_recommended"; + } else { + installMode = "on_demand"; + } + } + + const installedPath = installed && workspaceSkillsDir + ? path.join(workspaceSkillsDir, skillName) + : undefined; + + return { hasCompanionFiles, installMode, installed, installedPath, files, totalSize, scriptsCount, referencesCount, evalsCount }; + } + + readCompanionFile(skillId: string, relativePath: string): { content: string; size: number } | { error: string } { + const skill = this.store.getSkill(skillId); + if (!skill) return { error: "Skill not found" }; + + const normalized = relativePath.replace(/\.\./g, ""); + const fullPath = path.join(skill.dirPath, normalized); + + if (!fullPath.startsWith(skill.dirPath)) { + return { error: "Path traversal not allowed" }; + } + + if (!fs.existsSync(fullPath)) { + return { error: `File not found: ${relativePath}` }; + } + + try { + const content = fs.readFileSync(fullPath, "utf-8"); + return { content, size: content.length }; + } catch (err) { + return { error: `Cannot read file: ${err}` }; + } + } + install(skillId: string): { installed: boolean; path: string; message: string } { const skill = this.store.getSkill(skillId); if (!skill) return { installed: false, path: "", message: "Skill not found" }; @@ -22,8 +118,7 @@ export class SkillInstaller { } const dstDir = path.join(this.workspaceSkillsDir, skill.name); - fs.mkdirSync(dstDir, { recursive: true }); - fs.cpSync(skill.dirPath, dstDir, { recursive: true }); + this.cleanSync(skill.dirPath, dstDir); this.store.updateSkill(skillId, { installed: 1 }); this.ctx.log.info(`Skill installed: "${skill.name}" v${skill.version} → ${dstDir}`); @@ -51,9 +146,17 @@ export class SkillInstaller { if (!skill || !skill.installed) return; const dstDir = path.join(this.workspaceSkillsDir, skill.name); - if (fs.existsSync(dstDir) && fs.existsSync(skill.dirPath)) { - fs.cpSync(skill.dirPath, dstDir, { recursive: true }); + if (fs.existsSync(skill.dirPath)) { + this.cleanSync(skill.dirPath, dstDir); this.ctx.log.info(`Skill synced: "${skill.name}" v${skill.version} → workspace`); } } + + private cleanSync(srcDir: string, dstDir: string): void { + if (fs.existsSync(dstDir)) { + fs.rmSync(dstDir, { recursive: true }); + } + fs.mkdirSync(dstDir, { recursive: true }); + fs.cpSync(srcDir, dstDir, { recursive: true }); + } } diff --git a/apps/memos-local-openclaw/src/skill/upgrader.ts b/apps/memos-local-openclaw/src/skill/upgrader.ts index ffdab23b5..d6368dc42 100644 --- a/apps/memos-local-openclaw/src/skill/upgrader.ts +++ b/apps/memos-local-openclaw/src/skill/upgrader.ts @@ -91,18 +91,30 @@ export class SkillUpgrader { return { upgraded: false, qualityScore: null }; } + const backupDir = skill.dirPath + ".backup-" + Date.now(); + try { fs.cpSync(skill.dirPath, backupDir, { recursive: true }); } catch { /* best-effort */ } + fs.writeFileSync(path.join(skill.dirPath, "SKILL.md"), newContent, "utf-8"); + await this.rebuildCompanionFiles(skill, newContent, task); + const validation = await this.validator.validate(skill.dirPath, { previousContent: currentContent, }); if (!validation.valid) { this.ctx.log.warn(`SkillUpgrader: validation failed for "${skill.name}", reverting: ${validation.errors.join("; ")}`); - fs.writeFileSync(path.join(skill.dirPath, "SKILL.md"), currentContent, "utf-8"); + if (fs.existsSync(backupDir)) { + fs.rmSync(skill.dirPath, { recursive: true }); + fs.renameSync(backupDir, skill.dirPath); + } else { + fs.writeFileSync(path.join(skill.dirPath, "SKILL.md"), currentContent, "utf-8"); + } return { upgraded: false, qualityScore: null }; } + try { if (fs.existsSync(backupDir)) fs.rmSync(backupDir, { recursive: true }); } catch { /* cleanup */ } + const newVersion = skill.version + 1; const newDescription = this.parseDescription(newContent) || skill.description; @@ -216,4 +228,130 @@ export class SkillUpgrader { if (match2) return match2[1]; return ""; } + + private async rebuildCompanionFiles(skill: Skill, newContent: string, task: Task): Promise { + const chain = buildSkillConfigChain(this.ctx); + if (chain.length === 0) return; + + const chunks = this.store.getChunksByTask(task.id); + const conversationText = chunks + .filter(c => c.role === "user" || c.role === "assistant" || c.role === "tool") + .map(c => `[${c.role === "user" ? "User" : c.role === "assistant" ? "Assistant" : "Tool"}]: ${c.content.slice(0, 500)}`) + .join("\n\n") + .slice(0, 6000); + + const scriptsPrompt = `Based on the following upgraded SKILL.md and task record, extract reusable automation scripts. +Rules: +- Only extract if the task record contains concrete shell commands, Python scripts, or TypeScript code that form a complete, reusable automation. +- Each script must be self-contained and runnable. +- If there are no automatable scripts, return an empty array. +- Don't fabricate scripts — only extract what was actually used. + +SKILL.md: +${newContent.slice(0, 4000)} + +Task conversation highlights: +${conversationText} + +Reply with a JSON array only: +[{"filename": "deploy.sh", "content": "#!/bin/bash\\n..."}] +If no scripts, reply with: []`; + + try { + const raw = await callLLMWithFallback(chain, scriptsPrompt, this.ctx.log, "SkillUpgrader.scripts", { + maxTokens: 3000, temperature: 0.1, timeoutMs: 60_000, openclawAPI: this.ctx.openclawAPI, + }); + const scripts = this.parseJSONArray<{ filename: string; content: string }>(raw); + + const scriptsDir = path.join(skill.dirPath, "scripts"); + if (fs.existsSync(scriptsDir)) fs.rmSync(scriptsDir, { recursive: true }); + if (scripts.length > 0) { + fs.mkdirSync(scriptsDir, { recursive: true }); + for (const s of scripts) { + fs.writeFileSync(path.join(scriptsDir, s.filename), s.content, "utf-8"); + } + this.ctx.log.info(`SkillUpgrader: rebuilt ${scripts.length} scripts for "${skill.name}"`); + } + } catch (err) { + this.ctx.log.warn(`SkillUpgrader: companion scripts rebuild failed: ${err}`); + } + + try { + const evalsPrompt = `Based on the following skill, generate 3-4 realistic test prompts that should trigger this skill. +Requirements: +- Write test prompts that a real user would type, mix direct and indirect phrasings +- LANGUAGE RULE: Write in the SAME language as the skill content. + +Skill: +${newContent.slice(0, 4000)} + +Reply with a JSON array only: +[{"id": 1, "prompt": "A realistic user message", "expectations": ["Expected behavior 1"], "trigger_confidence": "high"}]`; + + const raw = await callLLMWithFallback(chain, evalsPrompt, this.ctx.log, "SkillUpgrader.evals", { + maxTokens: 2000, temperature: 0.3, timeoutMs: 60_000, openclawAPI: this.ctx.openclawAPI, + }); + const evals = this.parseJSONArray<{ id: number; prompt: string; expectations: string[] }>(raw); + + const evalsDir = path.join(skill.dirPath, "evals"); + if (fs.existsSync(evalsDir)) fs.rmSync(evalsDir, { recursive: true }); + if (evals.length > 0) { + fs.mkdirSync(evalsDir, { recursive: true }); + fs.writeFileSync( + path.join(evalsDir, "evals.json"), + JSON.stringify({ skill_name: skill.name, evals }, null, 2), + "utf-8", + ); + this.ctx.log.info(`SkillUpgrader: rebuilt ${evals.length} evals for "${skill.name}"`); + } + } catch (err) { + this.ctx.log.warn(`SkillUpgrader: companion evals rebuild failed: ${err}`); + } + + try { + const refsPrompt = `Based on the following upgraded SKILL.md and task record, extract reference documentation worth preserving. +Rules: +- Only extract real reference content that appeared in the task (API docs, config examples, architecture notes). +- Each reference should be a standalone document useful for understanding the skill's domain. +- If there are no meaningful references, return an empty array. +- Don't fabricate content — only extract what was actually discussed or used. +- LANGUAGE RULE: Write in the SAME language as the skill content. + +SKILL.md: +${newContent.slice(0, 4000)} + +Task conversation highlights: +${conversationText} + +Reply with a JSON array only: +[{"filename": "api-notes.md", "content": "# API Reference\\n..."}] +If no references, reply with: []`; + + const raw = await callLLMWithFallback(chain, refsPrompt, this.ctx.log, "SkillUpgrader.references", { + maxTokens: 3000, temperature: 0.1, timeoutMs: 60_000, openclawAPI: this.ctx.openclawAPI, + }); + const refs = this.parseJSONArray<{ filename: string; content: string }>(raw); + + const refsDir = path.join(skill.dirPath, "references"); + if (fs.existsSync(refsDir)) fs.rmSync(refsDir, { recursive: true }); + if (refs.length > 0) { + fs.mkdirSync(refsDir, { recursive: true }); + for (const r of refs) { + fs.writeFileSync(path.join(refsDir, r.filename), r.content, "utf-8"); + } + this.ctx.log.info(`SkillUpgrader: rebuilt ${refs.length} references for "${skill.name}"`); + } + } catch (err) { + this.ctx.log.warn(`SkillUpgrader: companion references rebuild failed: ${err}`); + } + } + + private parseJSONArray(raw: string): T[] { + const match = raw.match(/\[[\s\S]*\]/); + if (!match) return []; + try { + const arr = JSON.parse(match[0]); + return Array.isArray(arr) ? arr : []; + } catch { return []; } + } } diff --git a/apps/memos-local-openclaw/src/skill/validator.ts b/apps/memos-local-openclaw/src/skill/validator.ts index 637859d9f..880c6479d 100644 --- a/apps/memos-local-openclaw/src/skill/validator.ts +++ b/apps/memos-local-openclaw/src/skill/validator.ts @@ -31,6 +31,9 @@ export class SkillValidator { this.validateFormat(dirPath, result); if (!result.valid) return result; + this.checkCompanionConsistency(dirPath, result); + this.scanSecrets(dirPath, result); + if (opts?.previousContent) { this.regressionCheck(dirPath, opts.previousContent, result); } @@ -133,6 +136,82 @@ export class SkillValidator { } } + private checkCompanionConsistency(dirPath: string, result: ValidationResult): void { + const skillMdPath = path.join(dirPath, "SKILL.md"); + const content = fs.readFileSync(skillMdPath, "utf-8"); + + const referencedScripts = [...content.matchAll(/`scripts\/([^`]+)`/g)].map(m => m[1]); + const referencedRefs = [...content.matchAll(/`references\/([^`]+)`/g)].map(m => m[1]); + + const scriptsDir = path.join(dirPath, "scripts"); + const refsDir = path.join(dirPath, "references"); + + for (const f of referencedScripts) { + if (!fs.existsSync(path.join(scriptsDir, f))) { + result.warnings.push(`SKILL.md references scripts/${f} but file does not exist`); + } + } + for (const f of referencedRefs) { + if (!fs.existsSync(path.join(refsDir, f))) { + result.warnings.push(`SKILL.md references references/${f} but file does not exist`); + } + } + + if (fs.existsSync(scriptsDir)) { + try { + const actualScripts = fs.readdirSync(scriptsDir); + for (const f of actualScripts) { + if (!referencedScripts.includes(f)) { + result.warnings.push(`scripts/${f} exists but is not referenced in SKILL.md`); + } + } + } catch { /* best-effort */ } + } + + const evalsPath = path.join(dirPath, "evals", "evals.json"); + if (fs.existsSync(evalsPath)) { + try { + const evalsData = JSON.parse(fs.readFileSync(evalsPath, "utf-8")); + if (!Array.isArray(evalsData?.evals) && !Array.isArray(evalsData)) { + result.warnings.push("evals/evals.json exists but has unexpected structure"); + } + } catch { + result.warnings.push("evals/evals.json exists but is not valid JSON"); + } + } + } + + private static readonly SECRET_PATTERNS: Array<{ label: string; regex: RegExp }> = [ + { label: "API key (sk-...)", regex: /\bsk-[a-zA-Z0-9]{20,}\b/ }, + { label: "Bearer token", regex: /\bBearer\s+[a-zA-Z0-9_\-.]{20,}\b/ }, + { label: "AWS key", regex: /\bAKIA[0-9A-Z]{16}\b/ }, + { label: "Generic secret assignment", regex: /(api[_-]?key|secret|token|password|credential)\s*[:=]\s*["'][^"']{8,}["']/i }, + { label: "Base64 encoded secret (long)", regex: /\b[A-Za-z0-9+/]{40,}={0,2}\b/ }, + ]; + + private scanSecrets(dirPath: string, result: ValidationResult): void { + const filesToScan = ["SKILL.md"]; + const scriptsDir = path.join(dirPath, "scripts"); + if (fs.existsSync(scriptsDir)) { + try { + for (const f of fs.readdirSync(scriptsDir)) filesToScan.push(path.join("scripts", f)); + } catch { /* best-effort */ } + } + + for (const relPath of filesToScan) { + const fullPath = path.join(dirPath, relPath); + if (!fs.existsSync(fullPath)) continue; + try { + const content = fs.readFileSync(fullPath, "utf-8"); + for (const { label, regex } of SkillValidator.SECRET_PATTERNS) { + if (regex.test(content)) { + result.warnings.push(`Potential secret detected in ${relPath}: ${label}`); + } + } + } catch { /* best-effort */ } + } + } + private async assessQuality(dirPath: string, result: ValidationResult): Promise { const chain = buildSkillConfigChain(this.ctx); if (chain.length === 0) return; diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index 6acdc37e1..34b449e2f 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -114,6 +114,8 @@ export class SqliteStore { this.migrateHubTables(); this.migrateHubFtsToTrigram(); this.migrateLocalSharedTasksOwner(); + this.migrateHubUserIdentityFields(); + this.migrateClientHubConnectionIdentityFields(); this.log.debug("Database schema initialized"); } @@ -131,6 +133,49 @@ export class SqliteStore { } catch { /* table may not exist yet */ } } + private migrateHubUserIdentityFields(): void { + try { + const cols = this.db.prepare("PRAGMA table_info(hub_users)").all() as Array<{ name: string }>; + if (cols.length === 0) return; + if (!cols.some(c => c.name === "identity_key")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN identity_key TEXT NOT NULL DEFAULT ''"); + this.db.exec("CREATE INDEX IF NOT EXISTS idx_hub_users_identity_key ON hub_users(identity_key)"); + this.log.info("Migrated: added identity_key to hub_users"); + } + if (!cols.some(c => c.name === "left_at")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN left_at INTEGER"); + this.log.info("Migrated: added left_at to hub_users"); + } + if (!cols.some(c => c.name === "removed_at")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN removed_at INTEGER"); + this.log.info("Migrated: added removed_at to hub_users"); + } + if (!cols.some(c => c.name === "rejected_at")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN rejected_at INTEGER"); + this.log.info("Migrated: added rejected_at to hub_users"); + } + if (!cols.some(c => c.name === "rejoin_requested_at")) { + this.db.exec("ALTER TABLE hub_users ADD COLUMN rejoin_requested_at INTEGER"); + this.log.info("Migrated: added rejoin_requested_at to hub_users"); + } + } catch { /* table may not exist yet */ } + } + + private migrateClientHubConnectionIdentityFields(): void { + try { + const cols = this.db.prepare("PRAGMA table_info(client_hub_connection)").all() as Array<{ name: string }>; + if (cols.length === 0) return; + if (!cols.some(c => c.name === "identity_key")) { + this.db.exec("ALTER TABLE client_hub_connection ADD COLUMN identity_key TEXT NOT NULL DEFAULT ''"); + this.log.info("Migrated: added identity_key to client_hub_connection"); + } + if (!cols.some(c => c.name === "last_known_status")) { + this.db.exec("ALTER TABLE client_hub_connection ADD COLUMN last_known_status TEXT NOT NULL DEFAULT ''"); + this.log.info("Migrated: added last_known_status to client_hub_connection"); + } + } catch { /* table may not exist yet */ } + } + private migrateOwnerFields(): void { const chunkCols = this.db.prepare("PRAGMA table_info(chunks)").all() as Array<{ name: string }>; if (!chunkCols.some((c) => c.name === "owner")) { @@ -762,6 +807,20 @@ export class SqliteStore { CREATE INDEX IF NOT EXISTS idx_hub_users_status ON hub_users(status); CREATE INDEX IF NOT EXISTS idx_hub_users_role ON hub_users(role); + CREATE TABLE IF NOT EXISTS hub_groups ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + created_at INTEGER NOT NULL + ); + + CREATE TABLE IF NOT EXISTS hub_group_members ( + group_id TEXT NOT NULL REFERENCES hub_groups(id) ON DELETE CASCADE, + user_id TEXT NOT NULL REFERENCES hub_users(id) ON DELETE CASCADE, + joined_at INTEGER NOT NULL, + PRIMARY KEY (group_id, user_id) + ); + CREATE TABLE IF NOT EXISTS hub_tasks ( id TEXT PRIMARY KEY, source_task_id TEXT NOT NULL, @@ -1731,16 +1790,18 @@ export class SqliteStore { setClientHubConnection(conn: ClientHubConnection): void { this.db.prepare(` - INSERT INTO client_hub_connection (id, hub_url, user_id, username, user_token, role, connected_at) - VALUES (1, ?, ?, ?, ?, ?, ?) + INSERT INTO client_hub_connection (id, hub_url, user_id, username, user_token, role, connected_at, identity_key, last_known_status) + VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET hub_url = excluded.hub_url, user_id = excluded.user_id, username = excluded.username, user_token = excluded.user_token, role = excluded.role, - connected_at = excluded.connected_at - `).run(conn.hubUrl, conn.userId, conn.username, conn.userToken, conn.role, conn.connectedAt); + connected_at = excluded.connected_at, + identity_key = excluded.identity_key, + last_known_status = excluded.last_known_status + `).run(conn.hubUrl, conn.userId, conn.username, conn.userToken, conn.role, conn.connectedAt, conn.identityKey ?? "", conn.lastKnownStatus ?? ""); } getClientHubConnection(): ClientHubConnection | null { @@ -1847,8 +1908,8 @@ export class SqliteStore { upsertHubUser(user: HubUserRecord): void { this.db.prepare(` - INSERT INTO hub_users (id, username, device_name, role, status, token_hash, created_at, approved_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?) + INSERT INTO hub_users (id, username, device_name, role, status, token_hash, created_at, approved_at, identity_key, left_at, removed_at, rejected_at, rejoin_requested_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET username = excluded.username, device_name = excluded.device_name, @@ -1856,21 +1917,32 @@ export class SqliteStore { status = excluded.status, token_hash = excluded.token_hash, created_at = excluded.created_at, - approved_at = excluded.approved_at - `).run(user.id, user.username, user.deviceName ?? "", user.role, user.status, user.tokenHash, user.createdAt, user.approvedAt); + approved_at = excluded.approved_at, + identity_key = excluded.identity_key, + left_at = excluded.left_at, + removed_at = excluded.removed_at, + rejected_at = excluded.rejected_at, + rejoin_requested_at = excluded.rejoin_requested_at + `).run(user.id, user.username, user.deviceName ?? "", user.role, user.status, user.tokenHash, user.createdAt, user.approvedAt, user.identityKey ?? "", user.leftAt ?? null, user.removedAt ?? null, user.rejectedAt ?? null, user.rejoinRequestedAt ?? null); } getHubUser(userId: string): HubUserRecord | null { const row = this.db.prepare('SELECT * FROM hub_users WHERE id = ?').get(userId) as HubUserRow | undefined; if (!row) return null; - return rowToHubUser(row); + const user = rowToHubUser(row); + user.groups = this.getGroupsForHubUser(userId); + return user; } listHubUsers(status?: UserStatus): HubUserRecord[] { const rows = status ? this.db.prepare('SELECT * FROM hub_users WHERE status = ? ORDER BY created_at').all(status) as HubUserRow[] : this.db.prepare('SELECT * FROM hub_users ORDER BY created_at').all() as HubUserRow[]; - return rows.map(rowToHubUser); + return rows.map(r => { + const user = rowToHubUser(r); + user.groups = this.getGroupsForHubUser(r.id); + return user; + }); } deleteHubUser(userId: string, cleanResources = false): boolean { @@ -1881,7 +1953,18 @@ export class SqliteStore { const result = this.db.prepare('DELETE FROM hub_users WHERE id = ?').run(userId); return result.changes > 0; } - const result = this.db.prepare("UPDATE hub_users SET status = 'removed', token_hash = '' WHERE id = ?").run(userId); + const result = this.db.prepare("UPDATE hub_users SET status = 'removed', token_hash = '', removed_at = ? WHERE id = ?").run(Date.now(), userId); + return result.changes > 0; + } + + findHubUserByIdentityKey(identityKey: string): HubUserRecord | null { + if (!identityKey) return null; + const row = this.db.prepare('SELECT * FROM hub_users WHERE identity_key = ?').get(identityKey) as HubUserRow | undefined; + return row ? rowToHubUser(row) : null; + } + + markHubUserLeft(userId: string): boolean { + const result = this.db.prepare("UPDATE hub_users SET status = 'left', token_hash = '', left_at = ? WHERE id = ?").run(Date.now(), userId); return result.changes > 0; } @@ -1889,6 +1972,35 @@ export class SqliteStore { this.db.prepare('UPDATE hub_users SET last_ip = ?, last_active_at = ? WHERE id = ?').run(ip, timestamp ?? Date.now(), userId); } + // ─── Hub Groups ─── + + upsertHubGroup(group: { id: string; name: string; description?: string; createdAt: number }): void { + this.db.prepare(` + INSERT INTO hub_groups (id, name, description, created_at) + VALUES (?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET name = excluded.name, description = excluded.description + `).run(group.id, group.name, group.description ?? "", group.createdAt); + } + + addHubGroupMember(groupId: string, userId: string, joinedAt: number): void { + this.db.prepare(` + INSERT OR IGNORE INTO hub_group_members (group_id, user_id, joined_at) + VALUES (?, ?, ?) + `).run(groupId, userId, joinedAt); + } + + removeHubGroupMember(groupId: string, userId: string): void { + this.db.prepare('DELETE FROM hub_group_members WHERE group_id = ? AND user_id = ?').run(groupId, userId); + } + + getGroupsForHubUser(userId: string): Array<{ id: string; name: string; description: string }> { + return this.db.prepare(` + SELECT g.id, g.name, g.description FROM hub_groups g + JOIN hub_group_members m ON m.group_id = g.id + WHERE m.user_id = ? + `).all(userId) as Array<{ id: string; name: string; description: string }>; + } + getHubUserContributions(): Record { const result: Record = {}; const memRows = this.db.prepare('SELECT source_user_id, COUNT(*) as cnt FROM hub_memories GROUP BY source_user_id').all() as Array<{ source_user_id: string; cnt: number }>; @@ -2017,16 +2129,21 @@ export class SqliteStore { const limit = options?.maxResults ?? 10; const userId = options?.userId ?? ""; const rows = this.db.prepare(` - SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, '' as group_name, hu.username as owner_name, + SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, + COALESCE(hg.name, '') as group_name, hu.username as owner_name, bm25(hub_chunks_fts) as rank FROM hub_chunks_fts f JOIN hub_chunks hc ON hc.rowid = f.rowid JOIN hub_tasks ht ON ht.id = hc.hub_task_id LEFT JOIN hub_users hu ON hu.id = ht.source_user_id + LEFT JOIN hub_groups hg ON hg.id = ht.group_id WHERE hub_chunks_fts MATCH ? + AND (ht.visibility = 'public' + OR ht.source_user_id = ? + OR EXISTS (SELECT 1 FROM hub_group_members gm WHERE gm.group_id = ht.group_id AND gm.user_id = ?)) ORDER BY rank LIMIT ? - `).all(sanitizeFtsQuery(query), limit) as HubSearchRow[]; + `).all(sanitizeFtsQuery(query), userId, userId, limit) as HubSearchRow[]; return rows.map((row, idx) => ({ hit: row, rank: idx + 1 })); } @@ -2051,7 +2168,10 @@ export class SqliteStore { FROM hub_embeddings he JOIN hub_chunks hc ON hc.id = he.chunk_id JOIN hub_tasks ht ON ht.id = hc.hub_task_id - `).all() as Array<{ chunk_id: string; vector: Buffer; dimensions: number }>; + WHERE ht.visibility = 'public' + OR ht.source_user_id = ? + OR EXISTS (SELECT 1 FROM hub_group_members gm WHERE gm.group_id = ht.group_id AND gm.user_id = ?) + `).all(userId, userId) as Array<{ chunk_id: string; vector: Buffer; dimensions: number }>; return rows.map(r => ({ chunkId: r.chunk_id, vector: new Float32Array(r.vector.buffer, r.vector.byteOffset, r.dimensions), @@ -2060,14 +2180,19 @@ export class SqliteStore { getVisibleHubSearchHitByChunkId(chunkId: string, userId: string): HubSearchRow | null { const row = this.db.prepare(` - SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, '' as group_name, hu.username as owner_name, + SELECT hc.id, hc.content, hc.summary, hc.role, hc.created_at, ht.title as task_title, ht.visibility, + COALESCE(hg.name, '') as group_name, hu.username as owner_name, 0 as rank FROM hub_chunks hc JOIN hub_tasks ht ON ht.id = hc.hub_task_id LEFT JOIN hub_users hu ON hu.id = ht.source_user_id + LEFT JOIN hub_groups hg ON hg.id = ht.group_id WHERE hc.id = ? + AND (ht.visibility = 'public' + OR ht.source_user_id = ? + OR EXISTS (SELECT 1 FROM hub_group_members gm WHERE gm.group_id = ht.group_id AND gm.user_id = ?)) LIMIT 1 - `).get(chunkId) as HubSearchRow | undefined; + `).get(chunkId, userId, userId) as HubSearchRow | undefined; return row ?? null; } @@ -2561,6 +2686,8 @@ interface ClientHubConnection { userToken: string; role: UserRole; connectedAt: number; + identityKey?: string; + lastKnownStatus?: string; } interface ClientHubConnectionRow { @@ -2570,6 +2697,8 @@ interface ClientHubConnectionRow { user_token: string; role: string; connected_at: number; + identity_key?: string; + last_known_status?: string; } function rowToClientHubConnection(row: ClientHubConnectionRow): ClientHubConnection { @@ -2580,6 +2709,8 @@ function rowToClientHubConnection(row: ClientHubConnectionRow): ClientHubConnect userToken: row.user_token, role: row.role as UserRole, connectedAt: row.connected_at, + identityKey: row.identity_key || "", + lastKnownStatus: row.last_known_status || "", }; } @@ -2589,6 +2720,11 @@ interface HubUserRecord extends UserInfo { approvedAt: number | null; lastIp: string; lastActiveAt: number | null; + identityKey?: string; + leftAt?: number | null; + removedAt?: number | null; + rejectedAt?: number | null; + rejoinRequestedAt?: number | null; } interface HubUserRow { @@ -2602,6 +2738,11 @@ interface HubUserRow { approved_at: number | null; last_ip: string; last_active_at: number | null; + identity_key?: string; + left_at?: number | null; + removed_at?: number | null; + rejected_at?: number | null; + rejoin_requested_at?: number | null; } function rowToHubUser(row: HubUserRow): HubUserRecord { @@ -2617,6 +2758,11 @@ function rowToHubUser(row: HubUserRow): HubUserRecord { approvedAt: row.approved_at, lastIp: row.last_ip || "", lastActiveAt: row.last_active_at ?? null, + identityKey: row.identity_key || "", + leftAt: row.left_at ?? null, + removedAt: row.removed_at ?? null, + rejectedAt: row.rejected_at ?? null, + rejoinRequestedAt: row.rejoin_requested_at ?? null, }; } diff --git a/apps/memos-local-openclaw/src/types.ts b/apps/memos-local-openclaw/src/types.ts index b6d6e578c..4cac79131 100644 --- a/apps/memos-local-openclaw/src/types.ts +++ b/apps/memos-local-openclaw/src/types.ts @@ -252,6 +252,10 @@ export interface SkillEvolutionConfig { minConfidence?: number; maxSkillLines?: number; autoInstall?: boolean; + autoRecallSkills?: boolean; + autoRecallSkillLimit?: number; + preferUpgradeExisting?: boolean; + redactSensitiveInSkill?: boolean; /** Optional independent LLM config for skill evaluation/validation. Falls back to main summarizer if not set. */ summarizer?: SummarizerConfig; } @@ -347,6 +351,10 @@ export const DEFAULTS = { skillMinConfidence: 0.7, skillMaxLines: 400, skillAutoInstall: false, + skillAutoRecall: true, + skillAutoRecallLimit: 2, + skillPreferUpgrade: true, + skillRedactSensitive: true, } as const; // ─── Plugin Hooks (OpenClaw integration) ─── diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 648b33c01..a0feb2306 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -1220,7 +1220,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
-
+
@@ -3696,7 +3696,14 @@ function switchView(view){ else if(view==='skills') loadSkills(); else if(view==='analytics') loadMetrics(); else if(view==='logs') loadLogs(); - else if(view==='settings'){loadConfig();loadModelHealth();} + else if(view==='settings'){loadConfig().then(function(){ + var notDismissed=localStorage.getItem('memos-team-guide-dismissed')!=='1'; + var sharingOn=document.getElementById('cfgSharingEnabled'); + var sharingNotEnabled=!sharingOn||!sharingOn.checked; + if(notDismissed&&sharingNotEnabled){ + switchSettingsTab('hub',document.querySelector('.settings-tab-btn[data-tab="hub"]')); + } + });loadModelHealth();} else if(view==='import'){if(!window._migrateRunning) migrateScan(false);} else if(view==='admin'){loadAdminData();} } @@ -3736,6 +3743,13 @@ function onTaskScopeChange(){ var _clientPendingPollTimer=null; var _lastSharingConnStatus=''; +function _updateScopeSelectorsVisibility(hubAvailable){ + var ids=['memorySearchScope','taskSearchScope','skillSearchScope']; + for(var i=0;i...':'')+''; html+=''; html+=''+total+' '+t('pagination.total')+''; - html+='
'; + html+='
'; return html; } @@ -4159,12 +4178,12 @@ async function loadAdminData(){ var fetches; if(isAdmin){ fetches=await Promise.all([ - fetch('/api/sharing/users').then(function(r){return r.json();}), - fetch('/api/admin/shared-tasks').then(function(r){return r.json();}), - fetch('/api/admin/shared-skills').then(function(r){return r.json();}), - fetch('/api/sharing/pending-users').then(function(r){return r.json();}), - fetch('/api/admin/shared-memories').then(function(r){return r.json();}) - ]); + fetch('/api/sharing/users').then(function(r){return r.json();}), + fetch('/api/admin/shared-tasks').then(function(r){return r.json();}), + fetch('/api/admin/shared-skills').then(function(r){return r.json();}), + fetch('/api/sharing/pending-users').then(function(r){return r.json();}), + fetch('/api/admin/shared-memories').then(function(r){return r.json();}) + ]); }else{ fetches=await Promise.all([ Promise.resolve({users:[]}), @@ -6735,8 +6754,8 @@ async function saveHubConfig(){ if(!td.ok){ var errMsg=td.error==='cannot_join_self'?t('sharing.cannotJoinSelf'):(td.error||t('settings.hub.test.fail')); done();toast(errMsg,'error');return; - } - }catch(e){ + } + }catch(e){ done();toast(t('settings.hub.test.fail')+': '+String(e),'error');return; } } @@ -7557,8 +7576,8 @@ function getFilterParams(){ if(scope==='local'){ p.set('owner',_currentAgentOwner); }else if(scope==='allLocal'){ - const owner=document.getElementById('filterOwner').value; - if(owner) p.set('owner',owner); + const owner=document.getElementById('filterOwner').value; + if(owner) p.set('owner',owner); } return p; } @@ -7588,11 +7607,11 @@ async function loadMemories(page,silent){ renderPagination(); }catch(e){ if(!silent){ - list.innerHTML=''; - totalPages=1;totalCount=0; + list.innerHTML=''; + totalPages=1;totalCount=0; _lastMemoriesFingerprint=''; - renderMemories([]); - renderPagination(); + renderMemories([]); + renderPagination(); } } } @@ -7619,9 +7638,9 @@ async function loadHubMemories(silent){ }catch(e){ if(!silent){ _lastMemoriesFingerprint=''; - document.getElementById('searchMeta').textContent='0'+t('search.meta.results'); - renderMemories([]); - document.getElementById('pagination').innerHTML=''; + document.getElementById('searchMeta').textContent='0'+t('search.meta.results'); + renderMemories([]); + document.getElementById('pagination').innerHTML=''; } } } diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index e98563d51..65773c3da 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -1232,7 +1232,7 @@ export class ViewerServer { body: JSON.stringify({ memory: { sourceChunkId: refreshedChunk.id, role: refreshedChunk.role, content: refreshedChunk.content, summary: refreshedChunk.summary, kind: refreshedChunk.kind, groupId: null, visibility: "public" } }), }); if (!isLocalShared) this.store.markMemorySharedLocally(chunkId); - if (hubClient.userId) { + if (hubClient.userId && this.ctx?.config?.sharing?.role === "hub") { const existing = this.store.getHubMemoryBySource(hubClient.userId, chunkId); this.store.upsertHubMemory({ id: (response as any)?.memoryId ?? existing?.id ?? crypto.randomUUID(), @@ -1789,10 +1789,13 @@ export class ViewerServer { const nickname = sharing.client?.nickname; const username = nickname || os.userInfo().username || "user"; const hostname = os.hostname() || "unknown"; + const persisted = this.store.getClientHubConnection(); + const existingIdentityKey = persisted?.identityKey || ""; const result = await hubRequestJson(hubUrl, "", "/api/v1/hub/join", { method: "POST", - body: JSON.stringify({ teamToken, username, deviceName: hostname, reapply: true }), + body: JSON.stringify({ teamToken, username, deviceName: hostname, reapply: true, identityKey: existingIdentityKey }), }) as any; + const returnedIdentityKey = String(result.identityKey || existingIdentityKey || ""); this.store.setClientHubConnection({ hubUrl, userId: String(result.userId || ""), @@ -1800,6 +1803,8 @@ export class ViewerServer { userToken: result.userToken || "", role: "member", connectedAt: Date.now(), + identityKey: returnedIdentityKey, + lastKnownStatus: result.status || "", }); this.jsonResponse(res, { ok: true, status: result.status || "pending" }); } catch (err) { @@ -2075,14 +2080,13 @@ export class ViewerServer { }, }), }); - const hubUserId = hubClient.userId; - if (hubUserId) { + if (hubClient.userId && this.ctx?.config?.sharing?.role === "hub") { const now = Date.now(); - const existing = this.store.getHubMemoryBySource(hubUserId, chunk.id); + const existing = this.store.getHubMemoryBySource(hubClient.userId, chunk.id); this.store.upsertHubMemory({ id: (response as any)?.memoryId ?? existing?.id ?? crypto.randomUUID(), sourceChunkId: chunk.id, - sourceUserId: hubUserId, + sourceUserId: hubClient.userId, role: chunk.role, content: chunk.content, summary: chunk.summary ?? "", @@ -2620,21 +2624,22 @@ export class ViewerServer { const isClient = newEnabled && newRole === "client"; if (wasClient && !isClient) { this.notifyHubLeave(); - if (newRole !== "client") { - this.store.clearClientHubConnection(); - this.log.info("Cleared client hub connection (role changed away from client)"); - } else { - this.log.info("Sharing disabled but preserving client hub connection for re-enable"); + const oldConn = this.store.getClientHubConnection(); + if (oldConn) { + this.store.setClientHubConnection({ ...oldConn, userToken: "", lastKnownStatus: "left" }); } + this.log.info("Client hub connection token cleared (sharing disabled or role changed), identity preserved"); } - // Detect switching to a different Hub while still in client mode if (wasClient && isClient) { const newClientAddr = String((merged.client as Record)?.hubAddress || ""); if (newClientAddr && oldClientHubAddress && normalizeHubUrl(newClientAddr) !== normalizeHubUrl(oldClientHubAddress)) { this.notifyHubLeave(); - this.store.clearClientHubConnection(); - this.log.info("Cleared client hub connection (switched to different Hub)"); + const oldConn = this.store.getClientHubConnection(); + if (oldConn) { + this.store.setClientHubConnection({ ...oldConn, hubUrl: normalizeHubUrl(newClientAddr), userToken: "", lastKnownStatus: "hub_changed" }); + } + this.log.info("Client hub connection token cleared (switched to different Hub), identity preserved"); } } @@ -2678,10 +2683,13 @@ export class ViewerServer { const nickname = String(clientCfg?.nickname || ""); const username = nickname || os.userInfo().username || "user"; const hostname = os.hostname() || "unknown"; + const persisted = this.store.getClientHubConnection(); + const existingIdentityKey = persisted?.identityKey || ""; const result = await hubRequestJson(hubUrl, "", "/api/v1/hub/join", { method: "POST", - body: JSON.stringify({ teamToken, username, deviceName: hostname }), + body: JSON.stringify({ teamToken, username, deviceName: hostname, identityKey: existingIdentityKey }), }) as any; + const returnedIdentityKey = String(result.identityKey || existingIdentityKey || ""); this.store.setClientHubConnection({ hubUrl, userId: String(result.userId || ""), @@ -2689,6 +2697,8 @@ export class ViewerServer { userToken: result.userToken || "", role: "member", connectedAt: Date.now(), + identityKey: returnedIdentityKey, + lastKnownStatus: result.status || "", }); this.log.info(`Auto-join on save: status=${result.status}, userId=${result.userId}`); if (result.userToken) { @@ -2776,10 +2786,10 @@ export class ViewerServer { this.log.warn(`Failed to update hub-auth.json: ${e}`); } } else { - const persisted = this.store.getClientHubConnection(); - if (persisted) { + const persistedConn = this.store.getClientHubConnection(); + if (persistedConn) { this.store.setClientHubConnection({ - ...persisted, + ...persistedConn, username: result.username, userToken: result.userToken, }); diff --git a/apps/memos-local-openclaw/tests/skill-runtime-flow.test.ts b/apps/memos-local-openclaw/tests/skill-runtime-flow.test.ts new file mode 100644 index 000000000..1a1a19394 --- /dev/null +++ b/apps/memos-local-openclaw/tests/skill-runtime-flow.test.ts @@ -0,0 +1,293 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { SkillInstaller, type SkillCompanionManifest } from "../src/skill/installer"; +import type { Logger, PluginContext, MemosLocalConfig } from "../src/types"; + +const noopLog: Logger = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +let tmpDir: string; +let store: SqliteStore; +let ctx: PluginContext; + +function createSkillDir(name: string, opts?: { + scripts?: Array<{ name: string; content: string }>; + references?: Array<{ name: string; content: string }>; + evals?: object; +}): string { + const skillDir = path.join(tmpDir, "skills-store", name); + fs.mkdirSync(skillDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "${name}" +description: "Test skill for ${name}" +version: 1 +--- + +## Steps +1. Do something +`, "utf-8"); + + if (opts?.scripts) { + const scriptsDir = path.join(skillDir, "scripts"); + fs.mkdirSync(scriptsDir, { recursive: true }); + for (const s of opts.scripts) { + fs.writeFileSync(path.join(scriptsDir, s.name), s.content, "utf-8"); + } + } + if (opts?.references) { + const refsDir = path.join(skillDir, "references"); + fs.mkdirSync(refsDir, { recursive: true }); + for (const r of opts.references) { + fs.writeFileSync(path.join(refsDir, r.name), r.content, "utf-8"); + } + } + if (opts?.evals) { + const evalsDir = path.join(skillDir, "evals"); + fs.mkdirSync(evalsDir, { recursive: true }); + fs.writeFileSync(path.join(evalsDir, "evals.json"), JSON.stringify(opts.evals), "utf-8"); + } + return skillDir; +} + +function insertSkillRecord(id: string, name: string, dirPath: string): void { + store.insertSkill({ + id, + name, + description: `Test skill ${name}`, + version: 1, + status: "active", + tags: "", + sourceType: "task", + dirPath, + installed: 0, + owner: "agent:main", + visibility: "private", + qualityScore: 8, + createdAt: Date.now(), + updatedAt: Date.now(), + }); +} + +beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-flow-")); + const dbPath = path.join(tmpDir, "memos.db"); + store = new SqliteStore(dbPath, noopLog); + ctx = { + stateDir: tmpDir, + workspaceDir: tmpDir, + config: {} as MemosLocalConfig, + log: noopLog, + }; +}); + +afterEach(() => { + store.close(); + fs.rmSync(tmpDir, { recursive: true, force: true }); +}); + +// ─── Level 1: Pure document skill (inline) ─── + +describe("Level 1: pure doc skill (inline mode)", () => { + it("should classify a SKILL.md-only skill as inline", () => { + const skillDir = createSkillDir("pure-doc"); + insertSkillRecord("pure-1", "pure-doc", skillDir); + + const installer = new SkillInstaller(store, ctx); + const manifest = installer.getCompanionManifest("pure-1"); + + expect(manifest).not.toBeNull(); + expect(manifest!.hasCompanionFiles).toBe(false); + expect(manifest!.installMode).toBe("inline"); + expect(manifest!.files.length).toBe(0); + expect(manifest!.scriptsCount).toBe(0); + expect(manifest!.referencesCount).toBe(0); + }); + + it("should not consider evals-only as companion files", () => { + const skillDir = createSkillDir("evals-only", { + evals: { skill_name: "evals-only", evals: [{ id: 1, prompt: "test" }] }, + }); + insertSkillRecord("evals-1", "evals-only", skillDir); + + const installer = new SkillInstaller(store, ctx); + const manifest = installer.getCompanionManifest("evals-1"); + + expect(manifest!.hasCompanionFiles).toBe(false); + expect(manifest!.installMode).toBe("inline"); + expect(manifest!.evalsCount).toBe(1); + }); +}); + +// ─── Level 2: On-demand companion files ─── + +describe("Level 2: on_demand companion files", () => { + it("should classify a skill with small scripts as on_demand", () => { + const skillDir = createSkillDir("small-scripts", { + scripts: [ + { name: "deploy.sh", content: "#!/bin/bash\necho hello" }, + ], + references: [ + { name: "notes.md", content: "# Notes\nSome reference." }, + ], + }); + insertSkillRecord("small-1", "small-scripts", skillDir); + + const installer = new SkillInstaller(store, ctx); + const manifest = installer.getCompanionManifest("small-1"); + + expect(manifest!.hasCompanionFiles).toBe(true); + expect(manifest!.installMode).toBe("on_demand"); + expect(manifest!.scriptsCount).toBe(1); + expect(manifest!.referencesCount).toBe(1); + }); + + it("should be able to read companion file content without installing", () => { + const scriptContent = "#!/bin/bash\nset -e\nnpm run build\nnpm run deploy"; + const skillDir = createSkillDir("readable-scripts", { + scripts: [{ name: "deploy.sh", content: scriptContent }], + }); + insertSkillRecord("read-1", "readable-scripts", skillDir); + + const installer = new SkillInstaller(store, ctx); + + const result = installer.readCompanionFile("read-1", "scripts/deploy.sh"); + expect("content" in result).toBe(true); + if ("content" in result) { + expect(result.content).toBe(scriptContent); + expect(result.size).toBe(scriptContent.length); + } + }); + + it("should reject path traversal attempts", () => { + const skillDir = createSkillDir("path-traversal"); + insertSkillRecord("trav-1", "path-traversal", skillDir); + + const installer = new SkillInstaller(store, ctx); + const result = installer.readCompanionFile("trav-1", "../../etc/passwd"); + expect("error" in result).toBe(true); + }); + + it("should return error for non-existent companion file", () => { + const skillDir = createSkillDir("no-file"); + insertSkillRecord("nofile-1", "no-file", skillDir); + + const installer = new SkillInstaller(store, ctx); + const result = installer.readCompanionFile("nofile-1", "scripts/missing.sh"); + expect("error" in result).toBe(true); + }); +}); + +// ─── Level 3: Install recommended ─── + +describe("Level 3: install_recommended (large/complex skill)", () => { + it("should classify a skill with many scripts as install_recommended", () => { + const largeContent = "x".repeat(8000); + const skillDir = createSkillDir("many-scripts", { + scripts: [ + { name: "build.sh", content: largeContent }, + { name: "deploy.sh", content: largeContent }, + { name: "test.sh", content: largeContent }, + ], + }); + insertSkillRecord("many-1", "many-scripts", skillDir); + + const installer = new SkillInstaller(store, ctx); + const manifest = installer.getCompanionManifest("many-1"); + + expect(manifest!.hasCompanionFiles).toBe(true); + expect(manifest!.installMode).toBe("install_recommended"); + expect(manifest!.scriptsCount).toBe(3); + }); + + it("should classify a skill with large total size as install_recommended", () => { + const skillDir = createSkillDir("large-skill", { + scripts: [ + { name: "main.py", content: "x".repeat(12000) }, + ], + references: [ + { name: "api-docs.md", content: "x".repeat(10000) }, + ], + }); + insertSkillRecord("large-1", "large-skill", skillDir); + + const installer = new SkillInstaller(store, ctx); + const manifest = installer.getCompanionManifest("large-1"); + + expect(manifest!.hasCompanionFiles).toBe(true); + expect(manifest!.installMode).toBe("install_recommended"); + expect(manifest!.totalSize).toBeGreaterThan(20000); + }); + + it("install should copy all companion files to workspace", () => { + const skillDir = createSkillDir("install-test", { + scripts: [{ name: "run.sh", content: "#!/bin/bash\necho run" }], + references: [{ name: "arch.md", content: "# Architecture" }], + evals: { skill_name: "install-test", evals: [] }, + }); + insertSkillRecord("inst-1", "install-test", skillDir); + + const installer = new SkillInstaller(store, ctx); + const result = installer.install("inst-1"); + + expect(result.installed).toBe(true); + + const dstDir = path.join(tmpDir, "skills", "install-test"); + expect(fs.existsSync(path.join(dstDir, "SKILL.md"))).toBe(true); + expect(fs.existsSync(path.join(dstDir, "scripts", "run.sh"))).toBe(true); + expect(fs.existsSync(path.join(dstDir, "references", "arch.md"))).toBe(true); + expect(fs.existsSync(path.join(dstDir, "evals", "evals.json"))).toBe(true); + }); + + it("installed manifest should reflect installed state", () => { + const skillDir = createSkillDir("manifest-installed", { + scripts: [{ name: "deploy.sh", content: "echo deploy" }], + }); + insertSkillRecord("manif-1", "manifest-installed", skillDir); + + const installer = new SkillInstaller(store, ctx); + installer.install("manif-1"); + + const manifest = installer.getCompanionManifest("manif-1"); + expect(manifest!.installed).toBe(true); + expect(manifest!.installedPath).toContain("manifest-installed"); + }); +}); + +// ─── Manifest classification rules ─── + +describe("buildManifest classification", () => { + it("should handle missing skill directory gracefully", () => { + const manifest = SkillInstaller.buildManifest("/nonexistent/path", false, "missing"); + expect(manifest.hasCompanionFiles).toBe(false); + expect(manifest.installMode).toBe("inline"); + expect(manifest.files.length).toBe(0); + }); + + it("should count files correctly across types", () => { + const skillDir = createSkillDir("count-test", { + scripts: [ + { name: "a.sh", content: "echo a" }, + { name: "b.py", content: "print('b')" }, + ], + references: [ + { name: "x.md", content: "# X" }, + { name: "y.md", content: "# Y" }, + { name: "z.md", content: "# Z" }, + ], + evals: { evals: [{ id: 1 }] }, + }); + + const manifest = SkillInstaller.buildManifest(skillDir, false, "count-test"); + expect(manifest.scriptsCount).toBe(2); + expect(manifest.referencesCount).toBe(3); + expect(manifest.evalsCount).toBe(1); + expect(manifest.files.length).toBe(6); + }); +}); diff --git a/apps/memos-local-openclaw/tests/skill-v1-enhancements.test.ts b/apps/memos-local-openclaw/tests/skill-v1-enhancements.test.ts new file mode 100644 index 000000000..df6fdbab3 --- /dev/null +++ b/apps/memos-local-openclaw/tests/skill-v1-enhancements.test.ts @@ -0,0 +1,270 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { SqliteStore } from "../src/storage/sqlite"; +import { SkillInstaller } from "../src/skill/installer"; +import { SkillValidator, ValidationResult } from "../src/skill/validator"; +import { SkillGenerator } from "../src/skill/generator"; +import type { Logger, PluginContext, MemosLocalConfig } from "../src/types"; + +const noopLog: Logger = { + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, +}; + +let tmpDir: string; +let store: SqliteStore; +let ctx: PluginContext; + +beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "memos-skill-v1-")); + const dbPath = path.join(tmpDir, "memos.db"); + store = new SqliteStore(dbPath, noopLog); + ctx = { + stateDir: tmpDir, + workspaceDir: tmpDir, + config: { + skillEvolution: { redactSensitiveInSkill: true }, + } as MemosLocalConfig, + log: noopLog, + }; +}); + +afterEach(() => { + store.close(); + fs.rmSync(tmpDir, { recursive: true, force: true }); +}); + +// ─── Installer: clean sync ─── + +describe("SkillInstaller clean sync", () => { + it("should remove old files when reinstalling a skill", () => { + const skillDir = path.join(tmpDir, "skill-source", "test-skill"); + const scriptsDir = path.join(skillDir, "scripts"); + fs.mkdirSync(scriptsDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "test-skill" +description: "A test skill for clean sync" +version: 1 +--- + +## Steps +1. Do something +`, "utf-8"); + fs.writeFileSync(path.join(scriptsDir, "deploy.sh"), "#!/bin/bash\necho deploy", "utf-8"); + + const skillId = "test-skill-id-001"; + store.insertSkill({ + id: skillId, + name: "test-skill", + description: "A test skill", + version: 1, + status: "active", + tags: "", + sourceType: "task", + dirPath: skillDir, + installed: 0, + owner: "agent:main", + visibility: "private", + qualityScore: 8, + createdAt: Date.now(), + updatedAt: Date.now(), + }); + + const installer = new SkillInstaller(store, ctx); + const result = installer.install(skillId); + expect(result.installed).toBe(true); + + const dstDir = path.join(tmpDir, "skills", "test-skill"); + expect(fs.existsSync(path.join(dstDir, "scripts", "deploy.sh"))).toBe(true); + + // Now update the source: remove deploy.sh, add new.sh + fs.unlinkSync(path.join(scriptsDir, "deploy.sh")); + fs.writeFileSync(path.join(scriptsDir, "new.sh"), "#!/bin/bash\necho new", "utf-8"); + + // Sync should do clean install + installer.syncIfInstalled("test-skill"); + + expect(fs.existsSync(path.join(dstDir, "scripts", "new.sh"))).toBe(true); + expect(fs.existsSync(path.join(dstDir, "scripts", "deploy.sh"))).toBe(false); + }); +}); + +// ─── Validator: companion consistency ─── + +describe("SkillValidator companion checks", () => { + it("should warn about missing referenced scripts", async () => { + const skillDir = path.join(tmpDir, "skill-validate"); + fs.mkdirSync(skillDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "deploy-helper" +description: "Helps with deployment tasks" +version: 1 +--- + +## Steps +1. Run \`scripts/deploy.sh\` to deploy +2. Check \`scripts/verify.sh\` for health +3. See \`references/arch.md\` for architecture +`, "utf-8"); + + const validator = new SkillValidator(ctx); + const result = await validator.validate(skillDir, { skipLLM: true }); + + expect(result.valid).toBe(true); + expect(result.warnings.some(w => w.includes("scripts/deploy.sh") && w.includes("does not exist"))).toBe(true); + expect(result.warnings.some(w => w.includes("scripts/verify.sh") && w.includes("does not exist"))).toBe(true); + expect(result.warnings.some(w => w.includes("references/arch.md") && w.includes("does not exist"))).toBe(true); + }); + + it("should warn about orphaned scripts not referenced in SKILL.md", async () => { + const skillDir = path.join(tmpDir, "skill-orphan"); + const scriptsDir = path.join(skillDir, "scripts"); + fs.mkdirSync(scriptsDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "orphan-test" +description: "Test orphaned file detection" +version: 1 +--- + +## Steps +1. Do something without scripts. +This skill has no script references in SKILL.md. +`, "utf-8"); + fs.writeFileSync(path.join(scriptsDir, "old-deploy.sh"), "#!/bin/bash\necho old", "utf-8"); + + const validator = new SkillValidator(ctx); + const result = await validator.validate(skillDir, { skipLLM: true }); + + expect(result.valid).toBe(true); + expect(result.warnings.some(w => w.includes("old-deploy.sh") && w.includes("not referenced"))).toBe(true); + }); + + it("should warn about invalid evals.json structure", async () => { + const skillDir = path.join(tmpDir, "skill-bad-evals"); + const evalsDir = path.join(skillDir, "evals"); + fs.mkdirSync(evalsDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "evals-test" +description: "Test evals validation" +version: 1 +--- + +## Steps +1. Test something +`, "utf-8"); + fs.writeFileSync(path.join(evalsDir, "evals.json"), "this is not json", "utf-8"); + + const validator = new SkillValidator(ctx); + const result = await validator.validate(skillDir, { skipLLM: true }); + + expect(result.warnings.some(w => w.includes("evals.json") && w.includes("not valid JSON"))).toBe(true); + }); +}); + +// ─── Validator: secret scanning ─── + +describe("SkillValidator secret scanning", () => { + it("should detect API keys in SKILL.md", async () => { + const skillDir = path.join(tmpDir, "skill-secret"); + fs.mkdirSync(skillDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "secret-test" +description: "Test secret detection" +version: 1 +--- + +## Configuration +Set your API key: +api_key="sk-abcdef1234567890abcdefghij" +`, "utf-8"); + + const validator = new SkillValidator(ctx); + const result = await validator.validate(skillDir, { skipLLM: true }); + + expect(result.warnings.some(w => w.includes("secret") || w.includes("API key"))).toBe(true); + }); + + it("should detect secrets in scripts directory", async () => { + const skillDir = path.join(tmpDir, "skill-script-secret"); + const scriptsDir = path.join(skillDir, "scripts"); + fs.mkdirSync(scriptsDir, { recursive: true }); + fs.writeFileSync(path.join(skillDir, "SKILL.md"), `--- +name: "script-secret" +description: "Test script secret detection" +version: 1 +--- + +## Steps +1. Run deploy +`, "utf-8"); + fs.writeFileSync(path.join(scriptsDir, "deploy.sh"), `#!/bin/bash +export TOKEN="Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.12345678901234567890" +`, "utf-8"); + + const validator = new SkillValidator(ctx); + const result = await validator.validate(skillDir, { skipLLM: true }); + + expect(result.warnings.some(w => w.includes("secret") || w.includes("Bearer") || w.includes("token"))).toBe(true); + }); +}); + +// ─── Generator: redaction ─── + +describe("SkillGenerator redactSensitive", () => { + it("should redact OpenAI API keys", () => { + const input = "Use api key sk-abcdef1234567890abcdefghij for testing"; + const result = SkillGenerator.redactSensitive(input); + expect(result).not.toContain("sk-abcdef1234567890abcdefghij"); + expect(result).toContain("sk-***REDACTED***"); + }); + + it("should redact Bearer tokens", () => { + const input = "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.123456789012345678"; + const result = SkillGenerator.redactSensitive(input); + expect(result).not.toContain("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"); + expect(result).toContain("Bearer ***REDACTED***"); + }); + + it("should redact AWS access key IDs", () => { + const input = "AWS key: AKIAIOSFODNN7EXAMPLE"; + const result = SkillGenerator.redactSensitive(input); + expect(result).not.toContain("AKIAIOSFODNN7EXAMPLE"); + expect(result).toContain("AKIA***REDACTED***"); + }); + + it("should redact user paths", () => { + const input = "File at /Users/johndoe/projects/secret"; + const result = SkillGenerator.redactSensitive(input); + expect(result).not.toContain("johndoe"); + expect(result).toContain("/Users/****/"); + }); + + it("should redact generic secrets in assignments", () => { + const input = `config.api_key="my-super-secret-key-123"`; + const result = SkillGenerator.redactSensitive(input); + expect(result).not.toContain("my-super-secret-key-123"); + expect(result).toContain("***REDACTED***"); + }); + + it("should not alter text without secrets", () => { + const input = "This is a normal text about deploying a React app with npm."; + const result = SkillGenerator.redactSensitive(input); + expect(result).toBe(input); + }); +}); + +// ─── Config defaults ─── + +describe("Skill config defaults", () => { + it("should have correct default values from DEFAULTS", async () => { + const { DEFAULTS } = await import("../src/types"); + expect(DEFAULTS.skillAutoRecall).toBe(true); + expect(DEFAULTS.skillAutoRecallLimit).toBe(2); + expect(DEFAULTS.skillPreferUpgrade).toBe(true); + expect(DEFAULTS.skillRedactSensitive).toBe(true); + }); +}); diff --git a/apps/memos-local-openclaw/tests/task-processor.test.ts b/apps/memos-local-openclaw/tests/task-processor.test.ts index 60e2cf4dc..c5cabc67a 100644 --- a/apps/memos-local-openclaw/tests/task-processor.test.ts +++ b/apps/memos-local-openclaw/tests/task-processor.test.ts @@ -285,6 +285,8 @@ describe("TaskProcessor", () => { const now = Date.now(); const gap = 121 * 60 * 1000; + vi.spyOn((processor as any).summarizer, "judgeNewTopic").mockResolvedValue(null); + insertTestChunk({ id: "d1", sessionKey: "s1", role: "user", content: "what is my name and who am I please tell me", createdAt: now }); insertTestChunk({ id: "d2", sessionKey: "s1", role: "assistant", content: "I do not have any information about your name or identity in my memory at this time", createdAt: now + 1 }); insertTestChunk({ id: "d3", sessionKey: "s1", role: "user", content: "what is my name and who am I please tell me", createdAt: now + 2 }); diff --git a/apps/memos-local-openclaw/tests/viewer-sharing.test.ts b/apps/memos-local-openclaw/tests/viewer-sharing.test.ts index 87b2e9184..7151d2e54 100644 --- a/apps/memos-local-openclaw/tests/viewer-sharing.test.ts +++ b/apps/memos-local-openclaw/tests/viewer-sharing.test.ts @@ -60,8 +60,17 @@ describe("viewer sharing endpoints", () => { }); const joinJson = await join.json(); expect(join.status).toBe(200); - expect(joinJson.status).toBe("active"); - expect(joinJson.userToken).toBeTruthy(); + expect(joinJson.status).toBe("pending"); + expect(joinJson.userId).toBeTruthy(); + + const approveBob = await fetch("http://127.0.0.1:19831/api/v1/hub/admin/approve-user", { + method: "POST", + headers: { "content-type": "application/json", authorization: `Bearer ${adminToken}` }, + body: JSON.stringify({ userId: joinJson.userId, username: "bob" }), + }); + expect(approveBob.status).toBe(200); + const bobToken = (await approveBob.json()).token; + expect(bobToken).toBeTruthy(); viewer = new ViewerServer({ store: viewerStore, @@ -100,7 +109,8 @@ describe("viewer sharing endpoints", () => { }); const eveJson = await joinEve.json(); expect(joinEve.status).toBe(200); - expect(eveJson.userToken).toBeTruthy(); + expect(eveJson.status).toBe("pending"); + expect(eveJson.userId).toBeTruthy(); }); it("serves split sharing memory and skill search payloads", async () => { diff --git a/apps/memos-local-openclaw/tests/viewer-ui.test.ts b/apps/memos-local-openclaw/tests/viewer-ui.test.ts index deb2c406d..b8b77d94f 100644 --- a/apps/memos-local-openclaw/tests/viewer-ui.test.ts +++ b/apps/memos-local-openclaw/tests/viewer-ui.test.ts @@ -3,15 +3,16 @@ import { viewerHTML } from "../src/viewer/html"; describe("viewer UI T12", () => { it("contains the full v4 sharing UI entry points", () => { - expect(viewerHTML).toContain('id="memorySearchScope"'); - expect(viewerHTML).toContain('id="taskShareActions"'); - expect(viewerHTML).toContain('id="skillSearchInput"'); - expect(viewerHTML).toContain('id="hubSkillsList"'); - expect(viewerHTML).toContain('id="settingsSharingConfig"'); - expect(viewerHTML).toContain('/api/sharing/status'); - expect(viewerHTML).toContain('renderSharingMemorySearchResults'); - expect(viewerHTML).toContain('/api/sharing/search/skills'); - expect(viewerHTML).toContain('/api/sharing/tasks/share'); - expect(viewerHTML).toContain('/api/sharing/skills/pull'); + const html = viewerHTML(); + expect(html).toContain('id="memorySearchScope"'); + expect(html).toContain('id="taskShareActions"'); + expect(html).toContain('id="skillSearchInput"'); + expect(html).toContain('id="hubSkillsList"'); + expect(html).toContain('id="settingsSharingConfig"'); + expect(html).toContain('/api/sharing/status'); + expect(html).toContain('renderSharingMemorySearchResults'); + expect(html).toContain('/api/sharing/search/skills'); + expect(html).toContain('/api/sharing/tasks/share'); + expect(html).toContain('/api/sharing/skills/pull'); }); }); diff --git a/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md b/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md index c59b85550..c7897bb49 100644 --- a/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md +++ b/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md @@ -1,86 +1,211 @@ --- name: memos-memory-guide -description: Use the MemOS Local memory system to search and use the user's past conversations. Use this skill whenever the user refers to past chats, their own preferences or history, or when you need to answer from prior context. When auto-recall returns nothing (long or unclear user query), generate your own short search query and call memory_search. Use task_summary when you need full task context, skill_get for experience guides, and memory_timeline to expand around a memory hit. +description: "Use the MemOS Local memory system to search and use the user's past conversations. Use this skill whenever the user refers to past chats, their own preferences or history, or when you need to answer from prior context. When auto-recall returns nothing (long or unclear user query), generate your own short search query and call memory_search. Available tools: memory_search, memory_get, memory_write_public, memory_share, memory_unshare, task_summary, skill_get, skill_search, skill_install, skill_publish, skill_unpublish, network_memory_detail, network_skill_pull, network_team_info, memory_timeline, memory_viewer." --- # MemOS Local Memory — Agent Guide -This skill describes how to use the MemOS memory tools so you can reliably search and use the user's long-term conversation history. +This skill describes how to use the MemOS memory tools so you can reliably search and use the user's long-term conversation history, query team-shared data, share tasks, and discover or pull reusable skills. + +Two sharing planes exist and must not be confused: + +- **Local agent sharing:** visible to agents in the same OpenClaw workspace only. +- **Team sharing:** visible to teammates through the configured team server. ## How memory is provided each turn - **Automatic recall (hook):** At the start of each turn, the system runs a memory search using the user's current message and injects relevant past memories into your context. You do not need to call any tool for that. -- **When that is not enough:** If the user's message is very long, vague, or the automatic search returns **no memories**, you should **generate your own short, focused query** and call `memory_search` yourself. For example: - - User sent a long paragraph → extract 1–2 key topics or a short question and search with that. - - Auto-recall said "no memories" or you see no memory block → call `memory_search` with a query you derive (e.g. the user's name, a topic they often mention, or a rephrased question). -- **When you need more detail:** Search results only give excerpts and IDs. Use the tools below to fetch full task context, skill content, or surrounding messages. +- **When that is not enough:** If the user's message is very long, vague, or the automatic search returns **no memories**, you should **generate your own short, focused query** and call `memory_search` yourself. +- **Memory isolation:** Each agent can only see its own local private memories and local `public` memories. Team-shared data only appears when you search with `scope="group"` or `scope="all"`. ## Tools — what they do and when to call ### memory_search -- **What it does:** Searches the user's stored conversation memory by a natural-language query. Returns a list of relevant excerpts with `chunkId` and optionally `task_id`. +- **What it does:** Search long-term conversation memory for past conversations, user preferences, decisions, and experiences. Returns relevant excerpts with `chunkId` and optionally `task_id`. Only returns memories belonging to the current agent or marked as public. - **When to call:** - - The automatic recall did not run or returned nothing (e.g. no `` block, or a note that no memories were found). - - The user's query is long or unclear — **generate a short query yourself** (keywords, rephrased question, or a clear sub-question) and call `memory_search(query="...")`. - - You need to search with a different angle (e.g. filter by `role='user'` to find what the user said, or use a more specific query). -- **Parameters:** `query` (required), optional `minScore`, `role` (e.g. `"user"`). -- **Output:** List of items with role, excerpt, `chunkId`, and sometimes `task_id`. Use those IDs with the tools below when you need more context. + - The automatic recall did not run or returned nothing. + - The user's query is long or unclear — **generate a short query yourself** and call `memory_search(query="...")`. + - You need to search with a different angle (e.g. filter by `role='user'`). +- **Parameters:** + - `query` (string, **required**) — Natural language search query. + - `scope` (string, optional) — `'local'` (default) for current agent + local shared memories, or `'group'` / `'all'` to include team-shared memories. + - `maxResults` (number, optional) — Increase when the first search is too narrow. + - `minScore` (number, optional) — Lower slightly if recall is too strict. + - `role` (string, optional) — Filter local results by `'user'`, `'assistant'`, `'tool'`, or `'system'`. + +### memory_get + +- **What it does:** Get the full original text of a memory chunk. Use to verify exact details from a search hit. +- **When to call:** A `memory_search` hit looks relevant but you need to see the complete original content, not just the summary/excerpt. +- **Parameters:** + - `chunkId` (string, **required**) — The chunkId from a search hit. + - `maxChars` (number, optional) — Max characters to return (default 4000, max 12000). + +### memory_write_public + +- **What it does:** Create a brand new local shared memory. These memories are visible to all agents in the same OpenClaw workspace during `memory_search`. This does **not** publish anything to the team server. +- **When to call:** In multi-agent or collaborative scenarios, when you want to create a new persistent shared note from scratch (e.g. shared decisions, conventions, configurations, workflows). Do not use it if you already have a specific memory chunk to expose. +- **Parameters:** + - `content` (string, **required**) — The content to write to local shared memory. + - `summary` (string, optional) — Short summary of the content. + +### memory_share + +- **What it does:** Share an existing memory either with local OpenClaw agents, to the team, or to both. +- **When to call:** You already have a useful memory chunk and want to expose it beyond the current agent. +- **Do not use when:** You are creating a new shared note from scratch. In that case use `memory_write_public`. +- **Parameters:** + - `chunkId` (string, **required**) — Existing memory chunk ID. + - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'both'`. + - `visibility` (string, optional) — Team visibility when target includes team: `'public'` (default) or `'group'`. + - `groupId` (string, optional) — Optional team group ID when `visibility='group'`. + +### memory_unshare + +- **What it does:** Remove an existing memory from local agent sharing, team sharing, or both. +- **When to call:** A memory should no longer be visible outside the current agent or should be removed from the team. +- **Parameters:** + - `chunkId` (string, **required**) — Existing memory chunk ID. + - `target` (string, optional) — `'agents'`, `'hub'`, or `'all'` (default). + - `privateOwner` (string, optional) — Rare fallback only for older public memories that have no recorded original owner. ### task_summary -- **What it does:** Returns the full task summary for a given `task_id`: title, status, and the complete narrative summary of that conversation task (steps, decisions, URLs, commands, etc.). -- **When to call:** A `memory_search` hit included a `task_id` and you need the full story of that task (e.g. what was done, what the user decided, what failed or succeeded). -- **Parameters:** `taskId` (from a search hit). -- **Effect:** You get one coherent summary of the whole task instead of isolated excerpts. +- **What it does:** Get the detailed summary of a complete task: title, status, narrative summary, and related skills. Use when `memory_search` returns a hit with a `task_id` and you need the full story. Preserves critical information: URLs, file paths, commands, error codes, step-by-step instructions. +- **When to call:** A `memory_search` hit included a `task_id` and you need the full context of that task. +- **Parameters:** + - `taskId` (string, **required**) — The task_id from a memory_search hit. ### skill_get -- **What it does:** Returns the content of a learned skill (experience guide) by `skillId` or by `taskId`. If you pass `taskId`, the system finds the skill linked to that task. -- **When to call:** A search hit has a `task_id` and the task is the kind that has a "how to do this again" guide (e.g. a workflow the user has run before). Use this to follow the same approach or reuse steps. -- **Parameters:** `skillId` (direct) or `taskId` (lookup). -- **Effect:** You receive the full SKILL.md-style guide. You can then call `skill_install(skillId)` if the user or you want that skill loaded for future turns. +- **What it does:** Retrieve a proven skill (experience guide) by `skillId` or by `taskId`. If you pass a `taskId`, the system will find the associated skill automatically. +- **When to call:** A search hit has a `task_id` and the task has a "how to do this again" guide. Use this to follow the same approach or reuse steps. +- **Parameters:** + - `skillId` (string, optional) — Direct skill ID. + - `taskId` (string, optional) — Task ID — will look up the skill linked to this task. + - At least one of `skillId` or `taskId` must be provided. + +### skill_search + +- **What it does:** Search available skills by natural language. Searches your own skills, local shared skills, or both. It can also include team skills. +- **When to call:** The current task requires a capability or guide you don't have. Use `skill_search` to find one first; after finding it, use `skill_get` to read it, then `skill_install` to load it for future turns. +- **Parameters:** + - `query` (string, **required**) — Natural language description of the needed skill. + - `scope` (string, optional) — `'mix'` (default, self + local shared), `'self'`, `'public'` (local shared only), or `'group'` / `'all'` to include team results. ### skill_install -- **What it does:** Installs a skill (by `skillId`) into the workspace so it is loaded in future sessions. -- **When to call:** After `skill_get` when the skill is useful for ongoing use (e.g. the user's recurring workflow). Optional; only when you want the skill to be permanently available. +- **What it does:** Install a learned skill into the agent workspace so it becomes permanently available. After installation, the skill will be loaded automatically in future sessions. +- **When to call:** After `skill_get` when the skill is useful for ongoing use. +- **Parameters:** + - `skillId` (string, **required**) — The skill ID to install. + +### skill_publish + +- **What it does:** Share a skill with local agents, or publish it to the team. +- **When to call:** You have a useful skill that other agents or your team could benefit from. +- **Parameters:** + - `skillId` (string, **required**) — The skill ID to publish. + - `target` (string, optional) — `'agents'` (default) or `'hub'`. + - `visibility` (string, optional) — When `target='hub'`, use `'public'` (default) or `'group'`. + - `groupId` (string, optional) — Optional team group ID when `target='hub'` and `visibility='group'`. + - `scope` (string, optional) — Backward-compatible alias for old calls. Prefer `target` + `visibility` in new calls. + +### skill_unpublish + +- **What it does:** Stop local agent sharing, remove a team-published copy, or do both. +- **When to call:** You want to stop sharing a previously published skill. +- **Parameters:** + - `skillId` (string, **required**) — The skill ID to unpublish. + - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'all'`. + +### network_memory_detail + +- **What it does:** Fetches the full content behind a team search hit. +- **When to call:** A `memory_search` result came from the team and you need the full shared memory content. +- **Parameters:** `remoteHitId`. + +### task_share / task_unshare + +- **What they do:** Share a local task to the team, or remove it later. +- **When to call:** A task is valuable to your group or to the whole team and should be discoverable via shared search. +- **Parameters:** `taskId`, plus sharing visibility/scope when required. + +### network_skill_pull + +- **What it does:** Pulls a team-shared skill bundle down into local storage. +- **When to call:** `skill_search` found a useful team skill and you want to use it locally or offline. - **Parameters:** `skillId`. +### network_team_info + +- **What it does:** Returns current team server connection information, user, role, and groups. +- **When to call:** You need to confirm whether team sharing is configured or which groups the current client belongs to. +- **Call this first before:** `memory_share(... target='hub'|'both')`, `memory_unshare(... target='hub'|'all')`, `task_share`, `task_unshare`, `skill_publish(... target='hub')`, `skill_unpublish(... target='hub'|'all')`, or `network_skill_pull`. +- **Parameters:** none. + ### memory_timeline -- **What it does:** Expands context around a single memory chunk: returns the surrounding conversation messages (±N turns) so you see what was said before and after that excerpt. -- **When to call:** A `memory_search` hit is relevant but you need the surrounding dialogue (e.g. who said what next, or the exact follow-up question). -- **Parameters:** `chunkId` (from a search hit), optional `window` (default 2). -- **Effect:** You get a short, linear slice of the conversation around that chunk. +- **What it does:** Expand context around a memory search hit. Pass the `chunkId` from a search result to read the surrounding conversation messages. +- **When to call:** A `memory_search` hit is relevant but you need the surrounding dialogue. +- **Parameters:** + - `chunkId` (string, **required**) — The chunkId from a memory_search hit. + - `window` (number, optional) — Context window ±N messages, default 2. ### memory_viewer -- **What it does:** Returns the URL of the MemOS Memory Viewer (web UI) where the user can browse, search, and manage their memories. -- **When to call:** The user asks how to view their memories, open the memory dashboard, or manage stored data. +- **What it does:** Show the MemOS Memory Viewer URL. Call this when the user asks how to view, browse, manage, or check their memories. Returns the URL the user can open in their browser. +- **When to call:** The user asks where to see or manage their memories. - **Parameters:** None. -- **Effect:** You can tell the user to open that URL in a browser. ## Quick decision flow 1. **No memories in context or auto-recall reported nothing** - → Call `memory_search` with a **self-generated short query** (e.g. key topic or rephrased question). + → Call `memory_search(query="...")` with a **self-generated short query**. + +2. **Need to see the full original text of a search hit** + → Call `memory_get(chunkId="...")`. + +3. **Search returned hits with `task_id` and you need full context** + → Call `task_summary(taskId="...")`. + +4. **Task has an experience guide you want to follow** + → Call `skill_get(taskId="...")` or `skill_get(skillId="...")`. Optionally `skill_install(skillId="...")` for future use. -2. **Search returned hits with `task_id` and you need full context** - → Call `task_summary(taskId)`. +5. **You need the exact surrounding conversation of a hit** + → Call `memory_timeline(chunkId="...")`. -3. **Task has an experience guide you want to follow** - → Call `skill_get(taskId=...)` (or `skill_get(skillId=...)` if you have the id). Optionally `skill_install(skillId)` for future use. +6. **You need a capability/guide that you don't have** + → Call `skill_search(query="...", scope="mix")` to discover available skills. -4. **You need the exact surrounding conversation of a hit** - → Call `memory_timeline(chunkId=...)`. +7. **You have new shared knowledge useful to all local agents** + → Call `memory_write_public(content="...")`. -5. **User asks where to see or manage their memories** +8. **You already have an existing memory chunk and want to expose or hide it** + → Call `memory_share(chunkId="...", target="agents|hub|both")` or `memory_unshare(chunkId="...", target="agents|hub|all")`. + +9. **You are about to do anything team-sharing-related** + → Call `network_team_info()` first if team server availability is uncertain. + +10. **You want to share/stop sharing a skill with local agents or team** + → Prefer `skill_publish(skillId="...", target="agents|hub", visibility=...)` and `skill_unpublish(skillId="...", target="agents|hub|all")`. + +11. **User asks where to see or manage their memories** → Call `memory_viewer()` and share the URL. ## Writing good search queries - Prefer **short, focused** queries (a few words or one clear question). -- Use **concrete terms**: names, topics, tools, or decisions (e.g. "preferred editor", "deploy script", "API key setup"). +- Use **concrete terms**: names, topics, tools, or decisions. - If the user's message is long, **derive one or two sub-queries** rather than pasting the whole message. -- Use `role='user'` when you specifically want to find what the user said (e.g. preferences, past questions). +- Use `role='user'` when you specifically want to find what the user said. + +## Memory ownership and agent isolation + +Each memory is tagged with an `owner` (e.g. `agent:main`, `agent:sales-bot`). This is handled **automatically** — you do not need to pass any owner parameter. + +- **Your memories:** All tools (`memory_search`, `memory_get`, `memory_timeline`) automatically scope queries to your agent's own memories. +- **Local shared memories:** Memories marked as local shared are visible to all agents in the same OpenClaw workspace. Use `memory_write_public` to create them, or `memory_share(target='agents')` to expose an existing chunk. +- **Cross-agent isolation:** You cannot see memories owned by other agents (unless they are public). +- **How it works:** The system identifies your agent ID from the OpenClaw runtime context and applies owner filtering automatically on every search, recall, and retrieval. From fc0df30577fabe973044bf28bb0d76b585042dd7 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Fri, 20 Mar 2026 16:58:24 +0800 Subject: [PATCH 65/85] fix(memos-local): add groups field to HubStatusInfo type Made-with: Cursor --- apps/memos-local-openclaw/src/client/connector.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts index 3d7bbb4d9..8961a98f4 100644 --- a/apps/memos-local-openclaw/src/client/connector.ts +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -21,6 +21,7 @@ export interface HubStatusInfo { username: string; role: UserRole; status: UserStatus | string; + groups?: Array<{ id: string; name: string }>; }; } From e3aec4505143d5479efffbc89dd45e964f35e818 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Fri, 20 Mar 2026 17:31:22 +0800 Subject: [PATCH 66/85] fix(memos-local): fix token expiry misdiagnosed as user removal, improve notification and admin panel refresh MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix 401 misdiagnosis: token expiry was incorrectly treated as "removed" status. Now queries registration-status for real state and auto-renews token if active. - Add membership_approved/rejected notification types with i18n support - Reconnect SSE and load notifications immediately on pending→active transition - Shorten client pending poll interval from 10s to 5s - Add user.status to admin fingerprint to catch status changes - Force admin data refresh on view switch and after approve/reject/remove/role-change - Add notifPollImmediate for instant notification check on new SSE connections Made-with: Cursor --- .../src/client/connector.ts | 57 +++++++++--- apps/memos-local-openclaw/src/viewer/html.ts | 88 +++++++++++++------ .../memos-local-openclaw/src/viewer/server.ts | 29 +++++- 3 files changed, 130 insertions(+), 44 deletions(-) diff --git a/apps/memos-local-openclaw/src/client/connector.ts b/apps/memos-local-openclaw/src/client/connector.ts index 8961a98f4..55df55671 100644 --- a/apps/memos-local-openclaw/src/client/connector.ts +++ b/apps/memos-local-openclaw/src/client/connector.ts @@ -208,20 +208,55 @@ export async function getHubStatus(store: SqliteStore, config: MemosLocalConfig) } catch (err: any) { const is401 = typeof err?.message === "string" && err.message.includes("(401)"); if (is401 && conn) { - store.setClientHubConnection({ - ...conn, - userToken: "", - lastKnownStatus: "removed", - }); + const teamToken = config.sharing?.client?.teamToken ?? ""; + if (hubAddress && teamToken) { + try { + const regResult = await hubRequestJson(normalizeHubUrl(hubAddress), "", "/api/v1/hub/registration-status", { + method: "POST", + body: JSON.stringify({ teamToken, userId: conn.userId }), + }) as any; + if (regResult.status === "active" && regResult.userToken) { + store.setClientHubConnection({ + ...conn, + hubUrl: normalizeHubUrl(hubAddress), + userToken: regResult.userToken, + connectedAt: Date.now(), + lastKnownStatus: "active", + }); + try { + const me = await hubRequestJson(normalizeHubUrl(hubAddress), regResult.userToken, "/api/v1/hub/me", { method: "GET" }) as any; + return { + connected: true, + hubUrl: normalizeHubUrl(hubAddress), + user: { + id: String(me.id), + username: String(me.username ?? ""), + role: String(me.role ?? "member") as UserRole, + status: String(me.status ?? "active"), + groups: Array.isArray(me.groups) ? me.groups : [], + }, + }; + } catch { /* fall through to token-only return */ } + return { + connected: true, + hubUrl: normalizeHubUrl(hubAddress), + user: { id: conn.userId, username: conn.username || "", role: conn.role as UserRole || "member", status: "active" }, + }; + } + const realStatus = regResult.status as string; + store.setClientHubConnection({ ...conn, userToken: "", lastKnownStatus: realStatus }); + return { + connected: false, + hubUrl: normalizeHubUrl(hubAddress), + user: { id: conn.userId, username: conn.username || "", role: "member", status: realStatus }, + }; + } catch { /* registration-status also failed, fall through */ } + } + store.setClientHubConnection({ ...conn, userToken: "", lastKnownStatus: "token_expired" }); return { connected: false, hubUrl: normalizeHubUrl(hubAddress), - user: { - id: conn.userId, - username: conn.username || "", - role: "member", - status: "removed", - }, + user: { id: conn.userId, username: conn.username || "", role: "member", status: "token_expired" }, }; } return { connected: false, user: null }; diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index a0feb2306..b9f0c6f7c 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -976,8 +976,6 @@ input,textarea,select{font-family:inherit;font-size:inherit} .team-guide-steps li::marker{color:var(--pri);font-weight:700;font-size:11px} .team-guide-opt .btn-guide{font-size:11px;padding:5px 14px;border-radius:6px;font-weight:600;border:1px solid rgba(99,102,241,.25);background:rgba(99,102,241,.08);color:var(--pri);cursor:pointer;transition:background .15s,border-color .15s} .team-guide-opt .btn-guide:hover{background:rgba(99,102,241,.14);border-color:var(--pri)} -.team-guide-dismiss{position:absolute;top:10px;right:12px;background:none;border:none;color:var(--text-muted);font-size:15px;cursor:pointer;padding:4px;line-height:1;opacity:.5;transition:opacity .15s} -.team-guide-dismiss:hover{opacity:1} [data-theme="light"] .team-guide{background:linear-gradient(135deg,rgba(6,182,212,.03),rgba(79,70,229,.02));border-color:rgba(6,182,212,.15)} [data-theme="light"] .team-guide-opt{box-shadow:0 1px 3px rgba(0,0,0,.03)} [data-theme="light"] .team-guide-opt:hover{box-shadow:0 4px 16px rgba(0,0,0,.04)} @@ -1641,9 +1639,8 @@ input,textarea,select{font-family:inherit;font-size:inherit}
- +
-
\u{1F680} Get Started with Team Collaboration
MemOS supports team memory sharing. Choose one of the following options to enable collaboration, or continue using local-only mode.
@@ -2126,6 +2123,8 @@ const I18N={ 'notif.userJoin':'New user requests to join the team', 'notif.userOnline':'User came online', 'notif.userOffline':'User went offline', + 'notif.membershipApproved':'Your team join request has been approved', + 'notif.membershipRejected':'Your team join request has been declined', 'notif.clearAll':'Clear all', 'notif.timeAgo.just':'just now', 'notif.timeAgo.min':'{n}m ago', @@ -2448,6 +2447,9 @@ const I18N={ 'sharing.pendingApproval.hint':'Your join request has been submitted. Please wait for the team admin to approve.', 'sharing.rejected.hint':'Your join request was rejected by the team admin. Please contact the admin or retry.', 'sharing.removed.hint':'You have been removed from the team by the admin. You can re-apply to join.', + 'sharing.joinTeam':'Join Team', + 'sharing.joinSent.pending':'Join request sent! Waiting for admin approval.', + 'sharing.joinSent.active':'Successfully joined the team!', 'sharing.retryJoin':'Retry Join', 'sharing.retryJoin.hint':'Clears local data and re-submits the join request', 'sharing.retryJoin.confirm':'This will clear your current connection and re-submit a join request. Continue?', @@ -2840,6 +2842,8 @@ const I18N={ 'notif.userJoin':'有新用户申请加入团队', 'notif.userOnline':'用户上线了', 'notif.userOffline':'用户下线了', + 'notif.membershipApproved':'你的团队加入申请已通过', + 'notif.membershipRejected':'你的团队加入申请已被拒绝', 'notif.clearAll':'清除全部', 'notif.timeAgo.just':'刚刚', 'notif.timeAgo.min':'{n}分钟前', @@ -3162,6 +3166,9 @@ const I18N={ 'sharing.pendingApproval.hint':'加入申请已提交,请等待团队管理员审核通过。', 'sharing.rejected.hint':'您的加入申请已被团队管理员拒绝,请联系管理员或重新申请。', 'sharing.removed.hint':'您已被管理员从团队中移除,可以重新申请加入。', + 'sharing.joinTeam':'加入团队', + 'sharing.joinSent.pending':'加入申请已发送,等待管理员审批。', + 'sharing.joinSent.active':'成功加入团队!', 'sharing.retryJoin':'重新申请', 'sharing.retryJoin.hint':'清除本地连接数据并重新提交加入申请', 'sharing.retryJoin.confirm':'这将清除当前连接数据并重新提交加入申请,是否继续?', @@ -3606,6 +3613,9 @@ function selectSharingRole(role){ var tn=document.getElementById('cfgHubTeamName'); if(!tn.value.trim()) tn.value='My Team'; } + var card=document.getElementById('settingsSharingConfig'); + var saveBtn=card&&card.querySelector('.settings-actions .btn-primary'); + if(saveBtn&&typeof _hubSaveBtnLabel==='function') saveBtn.textContent=_hubSaveBtnLabel(); } var _cachedLocalIP=''; function updateHubShareInfo(){ @@ -3697,15 +3707,14 @@ function switchView(view){ else if(view==='analytics') loadMetrics(); else if(view==='logs') loadLogs(); else if(view==='settings'){loadConfig().then(function(){ - var notDismissed=localStorage.getItem('memos-team-guide-dismissed')!=='1'; var sharingOn=document.getElementById('cfgSharingEnabled'); var sharingNotEnabled=!sharingOn||!sharingOn.checked; - if(notDismissed&&sharingNotEnabled){ + if(sharingNotEnabled){ switchSettingsTab('hub',document.querySelector('.settings-tab-btn[data-tab="hub"]')); } });loadModelHealth();} else if(view==='import'){if(!window._migrateRunning) migrateScan(false);} - else if(view==='admin'){loadAdminData();} + else if(view==='admin'){_lastAdminFingerprint='';loadAdminData();} } function onMemoryScopeChange(){ @@ -3775,10 +3784,13 @@ async function loadSharingStatus(forcePending){ if(_lastSharingConnStatus==='pending'&&curStatus==='connected'){ toast(t('sharing.approved.toast'),'success'); loadMemories();loadTasks();loadSkills(); + if(_notifSSE){_notifSSE.close();_notifSSE=null;_notifSSEConnected=false;} + connectNotifSSE(); + loadNotifications(); } _lastSharingConnStatus=curStatus; if(curStatus==='pending'&&!_clientPendingPollTimer){ - _clientPendingPollTimer=setInterval(function(){loadSharingStatus(false);},10000); + _clientPendingPollTimer=setInterval(function(){loadSharingStatus(false);},5000); } if(curStatus!=='pending'&&_clientPendingPollTimer){ clearInterval(_clientPendingPollTimer); @@ -4059,7 +4071,7 @@ async function approveSharingUser(userId,username){ try{ const r=await fetch('/api/sharing/approve-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,username:username})}); const d=await r.json(); - if(d.ok){toast(t('toast.userApproved'),'success');loadSharingPendingUsers();loadSharingStatus(true);} else {toast(d.error||t('toast.approveFail'),'error');} + if(d.ok){toast(t('toast.userApproved'),'success');loadSharingPendingUsers();loadSharingStatus(true);_lastAdminFingerprint='';loadAdminData();} else {toast(d.error||t('toast.approveFail'),'error');} }catch(e){toast(t('toast.approveFail')+': '+e.message,'error');} } @@ -4067,24 +4079,17 @@ async function rejectSharingUser(userId,username){ try{ const r=await fetch('/api/sharing/reject-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,username:username})}); const d=await r.json(); - if(d.ok){toast(t('toast.userRejected'),'success');loadSharingPendingUsers();} else {toast(d.error||t('toast.rejectFail'),'error');} + if(d.ok){toast(t('toast.userRejected'),'success');loadSharingPendingUsers();_lastAdminFingerprint='';loadAdminData();} else {toast(d.error||t('toast.rejectFail'),'error');} }catch(e){toast(t('toast.rejectFail')+': '+e.message,'error');} } /* ─── Team Setup Guide ─── */ -var TEAM_GUIDE_DISMISSED_KEY='memos-team-guide-dismissed'; function updateTeamGuide(sharingData){ var el=document.getElementById('teamSetupGuide'); if(!el) return; - if(localStorage.getItem(TEAM_GUIDE_DISMISSED_KEY)==='1'){el.style.display='none';return;} var isConfigured=sharingData&&sharingData.enabled; el.style.display=isConfigured?'none':'block'; } -function dismissTeamGuide(){ - localStorage.setItem(TEAM_GUIDE_DISMISSED_KEY,'1'); - var el=document.getElementById('teamSetupGuide'); - if(el) el.style.display='none'; -} function guideGoToHub(role){ switchSettingsTab('hub',document.querySelector('.settings-tab-btn[data-tab="hub"]')); var chk=document.getElementById('cfgSharingEnabled'); @@ -4200,7 +4205,7 @@ async function loadAdminData(){ var _newMemories=Array.isArray(memoriesR.memories)?memoriesR.memories:[]; var pending=isAdmin?(Array.isArray(pendingR.users)?pendingR.users:[]):[]; var _fp=_newUsers.length+':'+_newTasks.length+':'+_newSkills.length+':'+_newMemories.length+':'+pending.length - +':'+_newUsers.map(function(u){return u.id+'|'+(u.isOnline?1:0)+'|'+(u.role||'')+'|'+(u.username||'')+'|'+(u.memoryCount||0)+'|'+(u.taskCount||0)+'|'+(u.skillCount||0)}).join(',') + +':'+_newUsers.map(function(u){return u.id+'|'+(u.isOnline?1:0)+'|'+(u.role||'')+'|'+(u.status||'')+'|'+(u.username||'')+'|'+(u.memoryCount||0)+'|'+(u.taskCount||0)+'|'+(u.skillCount||0)}).join(',') +':'+_newMemories.map(function(m){return m.id}).join(',') +':'+_newTasks.map(function(t){return t.id+'|'+(t.status||'')}).join(',') +':'+_newSkills.map(function(s){return s.id+'|'+(s.status||'')}).join(',') @@ -4396,7 +4401,7 @@ async function adminApproveUser(userId,username){ try{ var r=await fetch('/api/sharing/approve-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,username:username})}); var d=await r.json(); - if(d.ok){toast(t('toast.userApproved'),'success');loadAdminData();}else{toast(d.error||t('toast.approveFail'),'error');} + if(d.ok){toast(t('toast.userApproved'),'success');_lastAdminFingerprint='';loadAdminData();}else{toast(d.error||t('toast.approveFail'),'error');} }catch(e){toast(t('toast.approveFail')+': '+e.message,'error');} } async function adminRejectUser(userId){ @@ -4404,7 +4409,7 @@ async function adminRejectUser(userId){ try{ var r=await fetch('/api/sharing/reject-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId})}); var d=await r.json(); - if(d.ok){toast(t('toast.userRejected'),'success');loadAdminData();}else{toast(d.error||t('toast.rejectFail'),'error');} + if(d.ok){toast(t('toast.userRejected'),'success');_lastAdminFingerprint='';loadAdminData();}else{toast(d.error||t('toast.rejectFail'),'error');} }catch(e){toast(t('toast.rejectFail')+': '+e.message,'error');} } async function adminToggleRole(userId,newRole){ @@ -4413,7 +4418,7 @@ async function adminToggleRole(userId,newRole){ try{ var r=await fetch('/api/sharing/change-role',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,role:newRole})}); var d=await r.json(); - if(d.ok){toast(t('toast.roleChanged'),'success');loadAdminData();} + if(d.ok){toast(t('toast.roleChanged'),'success');_lastAdminFingerprint='';loadAdminData();} else if(d.error==='cannot_demote_owner'){toast(t('admin.ownerHint'),'error');} else{toast(d.error||t('toast.roleChangeFail'),'error');} }catch(e){toast(t('toast.roleChangeFail')+': '+e.message,'error');} @@ -4465,7 +4470,7 @@ async function adminRemoveUser(userId,username){ try{ var r=await fetch('/api/sharing/remove-user',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,cleanResources:clean})}); var d=await r.json(); - if(d.ok){toast(t('toast.userRemoved'),'success');loadAdminData();} + if(d.ok){toast(t('toast.userRemoved'),'success');_lastAdminFingerprint='';loadAdminData();} else if(d.error==='cannot_remove_owner'){toast(t('admin.ownerHint'),'error');} else{toast(d.error||t('toast.removeFail'),'error');} }catch(e){toast(t('toast.removeFail')+': '+e.message,'error');} @@ -6597,16 +6602,16 @@ async function doSaveConfig(cfg, btnEl, savedId){ function done(){btnEl.disabled=false;btnEl.textContent=t('settings.save');} try{ const r=await fetch('/api/config',{method:'PUT',headers:{'Content-Type':'application/json'},body:JSON.stringify(cfg)}); - if(r.status===401){done();toast(t('settings.session.expired'),'error');return false;} + if(r.status===401){done();toast(t('settings.session.expired'),'error');return null;} if(!r.ok) throw new Error(await r.text()); + var data=await r.json().catch(function(){return {ok:true};}); flashSaved(savedId); - toast(t('settings.saved'),'success'); done(); - return true; + return data; }catch(e){ toast(t('settings.save.fail')+': '+e.message,'error'); done(); - return false; + return null; } } @@ -6701,11 +6706,19 @@ async function saveModelsConfig(){ await doSaveConfig(cfg, saveBtn, 'modelsSaved'); } +function _hubSaveBtnLabel(){ + var on=document.getElementById('cfgSharingEnabled'); + if(on&&on.checked&&_sharingRole==='client'){ + var prevClient=sharingStatusCache&&sharingStatusCache.enabled&&sharingStatusCache.role==='client'; + return prevClient?t('settings.save'):t('sharing.joinTeam'); + } + return t('settings.save'); +} async function saveHubConfig(){ var card=document.getElementById('settingsSharingConfig'); var saveBtn=card.querySelector('.settings-actions .btn-primary'); saveBtn.disabled=true;saveBtn.textContent=t('settings.test.loading'); - function done(){saveBtn.disabled=false;saveBtn.textContent=t('settings.save');} + function done(){saveBtn.disabled=false;saveBtn.textContent=_hubSaveBtnLabel();} const cfg={}; var sharingEnabled=document.getElementById('cfgSharingEnabled').checked; @@ -6772,14 +6785,25 @@ async function saveHubConfig(){ if(!(await confirmModal(switchMsg,{danger:true}))){done();return;} } - var ok=await doSaveConfig(cfg, saveBtn, 'hubSaved'); - if(ok){ + var result=await doSaveConfig(cfg, saveBtn, 'hubSaved'); + if(result){ if(sharingEnabled&&_sharingRole==='hub'){ var adminNameEl=document.getElementById('cfgHubAdminName'); if(adminNameEl&&adminNameEl.value.trim()){ try{await fetch('/api/sharing/update-username',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({username:adminNameEl.value.trim()})});}catch(e){} } } + if(sharingEnabled&&_sharingRole==='client'&&result.joinStatus){ + if(result.joinStatus==='pending'){ + toast(t('sharing.joinSent.pending'),'success'); + }else if(result.joinStatus==='active'){ + toast(t('sharing.joinSent.active'),'success'); + }else{ + toast(t('settings.saved'),'success'); + } + }else{ + toast(t('settings.saved'),'success'); + } _lastSidebarFingerprint=''; _lastSettingsFingerprint=''; _lastSharingConnStatus=''; @@ -7354,6 +7378,12 @@ function notifTypeText(n){ if(n.type==='user_offline'){ return t('notif.userOffline'); } + if(n.type==='membership_approved'){ + return t('notif.membershipApproved'); + } + if(n.type==='membership_rejected'){ + return t('notif.membershipRejected'); + } return n.message||n.type; } diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index 65773c3da..adee752cf 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -2440,6 +2440,7 @@ export class ViewerServer { res.write("data: {\"type\":\"connected\"}\n\n"); this.notifSSEClients.push(res); if (!this.notifPollTimer) this.startNotifPoll(); + else this.notifPollImmediate(); req.on("close", () => { this.notifSSEClients = this.notifSSEClients.filter((c) => c !== res); if (this.notifSSEClients.length === 0) this.stopNotifPoll(); @@ -2475,6 +2476,20 @@ export class ViewerServer { if (this.notifPollTimer) { clearInterval(this.notifPollTimer); this.notifPollTimer = undefined; } } + private notifPollImmediate(): void { + const hub = this.resolveHubConnection(); + if (!hub) return; + hubRequestJson(hub.hubUrl, hub.userToken, "/api/v1/hub/notifications?unread=1") + .then((data: any) => { + const count = data?.unreadCount ?? 0; + if (count !== this.lastKnownNotifCount) { + this.lastKnownNotifCount = count; + this.broadcastNotifSSE({ type: "update", unreadCount: count }); + } + }) + .catch(() => {}); + } + private startHubHeartbeat(): void { this.stopHubHeartbeat(); const sendHeartbeat = async () => { @@ -2660,11 +2675,16 @@ export class ViewerServer { const finalSharing = config.sharing as Record | undefined; const nowClient = Boolean(finalSharing?.enabled) && finalSharing?.role === "client"; const previouslyClient = oldSharingEnabled && oldSharingRole === "client"; + let joinStatus: string | undefined; if (nowClient && !previouslyClient) { - this.autoJoinOnSave(finalSharing).catch(e => this.log.warn(`Auto-join on save failed: ${e}`)); + try { + joinStatus = await this.autoJoinOnSave(finalSharing); + } catch (e) { + this.log.warn(`Auto-join on save failed: ${e}`); + } } - this.jsonResponse(res, { ok: true }); + this.jsonResponse(res, { ok: true, joinStatus }); } catch (e) { this.log.warn(`handleSaveConfig error: ${e}`); res.writeHead(500, { "Content-Type": "application/json" }); @@ -2673,11 +2693,11 @@ export class ViewerServer { }); } - private async autoJoinOnSave(sharing: Record): Promise { + private async autoJoinOnSave(sharing: Record): Promise { const clientCfg = sharing.client as Record | undefined; const hubAddress = String(clientCfg?.hubAddress || ""); const teamToken = String(clientCfg?.teamToken || ""); - if (!hubAddress || !teamToken) return; + if (!hubAddress || !teamToken) return undefined; const hubUrl = normalizeHubUrl(hubAddress); const os = await import("os"); const nickname = String(clientCfg?.nickname || ""); @@ -2704,6 +2724,7 @@ export class ViewerServer { if (result.userToken) { this.startHubHeartbeat(); } + return result.status; } private async notifyHubLeave(): Promise { From 077e2c97f0024a347a5b47abcb33641970f0a71e Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Fri, 20 Mar 2026 17:32:20 +0800 Subject: [PATCH 67/85] chore(memos-local): bump version to 1.0.4-beta.13 Made-with: Cursor --- apps/memos-local-openclaw/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 5523616f2..531edc964 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.12", + "version": "1.0.4-beta.13", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", From 9e10feed6bdfcb78437e67aa27936f3f35575c51 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Fri, 20 Mar 2026 22:44:16 +0800 Subject: [PATCH 68/85] =?UTF-8?q?feat(memos-local):=20v1.0.4-beta.14=20?= =?UTF-8?q?=E2=80=94=20auto-recall=20hub=20fallback,=20scope=20auto-upgrad?= =?UTF-8?q?e,=20and=20sharing=20UX=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Auto-recall: add hubFallback() to search remote Hub when local yields no/insufficient results (3-layer fallback) - memory_search: auto-upgrade scope from "local" to "all" when sharing is enabled - config: parse client nickname field for join requests - hub/server: sync userManager.approveUser on registration-status token renewal - storage: tasks query includes shared public tasks for original owner visibility - viewer: role switch clears stale panels, rejected/removed show retry button, admin tab visibility control, adminToggleRole refreshes all panels, memory scope persists in localStorage Made-with: Cursor --- apps/memos-local-openclaw/index.ts | 81 +++++++++++++++---- apps/memos-local-openclaw/package.json | 2 +- apps/memos-local-openclaw/src/config.ts | 3 +- apps/memos-local-openclaw/src/hub/server.ts | 1 + .../src/storage/sqlite.ts | 5 +- apps/memos-local-openclaw/src/viewer/html.ts | 40 ++++++++- 6 files changed, 110 insertions(+), 22 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 6d6f133d8..a088c23c1 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -466,7 +466,10 @@ const memosLocalPlugin = { }; const role = rawRole === "user" || rawRole === "assistant" || rawRole === "tool" || rawRole === "system" ? rawRole : undefined; const minScore = typeof rawMinScore === "number" ? Math.max(0.35, Math.min(1, rawMinScore)) : undefined; - const searchScope = resolveMemorySearchScope(rawScope); + let searchScope = resolveMemorySearchScope(rawScope); + if (searchScope === "local" && ctx.config?.sharing?.enabled) { + searchScope = "all"; + } const searchLimit = typeof maxResults === "number" ? Math.max(1, Math.min(20, Math.round(maxResults))) : 10; const agentId = currentAgentId; @@ -1818,6 +1821,38 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, ctx.log.debug(`auto-recall: query="${query.slice(0, 80)}"`); const result = await engine.search({ query, maxResults: 10, minScore: 0.45, ownerFilter: recallOwnerFilter }); + + // Hub fallback helper: search team shared memories when local search has no relevant results + const hubFallback = async (): Promise => { + if (!ctx.config?.sharing?.enabled) return []; + try { + const hubResult = await hubSearchMemories(store, ctx, { query, maxResults: 10, scope: "all" }); + if (hubResult.hits.length === 0) return []; + ctx.log.debug(`auto-recall: hub fallback returned ${hubResult.hits.length} hit(s)`); + return hubResult.hits.map((h) => ({ + summary: h.summary, + original_excerpt: h.excerpt || h.summary, + ref: { sessionKey: "", chunkId: h.remoteHitId, turnId: "", seq: 0 }, + score: 0.9, + taskId: null, + skillId: null, + origin: "hub-remote" as const, + source: { ts: h.source.ts, role: h.source.role, sessionKey: "" }, + })); + } catch (err) { + ctx.log.debug(`auto-recall: hub fallback failed (${err})`); + return []; + } + }; + + if (result.hits.length === 0) { + // Local found nothing — try hub before giving up + const hubHits = await hubFallback(); + if (hubHits.length > 0) { + result.hits.push(...hubHits); + ctx.log.debug(`auto-recall: local empty, using ${hubHits.length} hub hit(s)`); + } + } if (result.hits.length === 0) { ctx.log.debug("auto-recall: no memory candidates found"); const dur = performance.now() - recallT0; @@ -1881,22 +1916,36 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, const indexSet = new Set(filterResult.relevant); filteredHits = result.hits.filter((_, i) => indexSet.has(i + 1)); } else { - ctx.log.debug("auto-recall: LLM filter returned no relevant hits"); - const dur = performance.now() - recallT0; - store.recordToolCall("memory_search", dur, true); - store.recordApiLog("memory_search", { type: "auto_recall", query }, JSON.stringify({ - candidates: result.hits.map(h => ({ score: h.score, role: h.source.role, summary: h.summary, content: h.original_excerpt, origin: h.origin || "local" })), - filtered: [] - }), dur, true); - if (query.length > 50) { - const noRecallHint = - "## Memory system — ACTION REQUIRED\n\n" + - "Auto-recall found no relevant results for a long query. " + - "You MUST call `memory_search` now with a shortened query (2-5 key words) before answering. " + - "Do NOT skip this step. Do NOT answer without searching first."; - return { prependContext: noRecallHint }; + ctx.log.debug("auto-recall: LLM filter returned no relevant local hits, trying hub fallback"); + const hubHits = await hubFallback(); + if (hubHits.length > 0) { + ctx.log.debug(`auto-recall: hub fallback provided ${hubHits.length} hit(s) after local filter yielded 0`); + filteredHits = hubHits; + } else { + const dur = performance.now() - recallT0; + store.recordToolCall("memory_search", dur, true); + store.recordApiLog("memory_search", { type: "auto_recall", query }, JSON.stringify({ + candidates: result.hits.map(h => ({ score: h.score, role: h.source.role, summary: h.summary, content: h.original_excerpt, origin: h.origin || "local" })), + filtered: [] + }), dur, true); + if (query.length > 50) { + const noRecallHint = + "## Memory system — ACTION REQUIRED\n\n" + + "Auto-recall found no relevant results for a long query. " + + "You MUST call `memory_search` now with a shortened query (2-5 key words) before answering. " + + "Do NOT skip this step. Do NOT answer without searching first."; + return { prependContext: noRecallHint }; + } + return; } - return; + } + } + + if (!sufficient && filteredHits.length > 0 && ctx.config?.sharing?.enabled) { + const hubSupp = await hubFallback(); + if (hubSupp.length > 0) { + ctx.log.debug(`auto-recall: local insufficient, supplementing with ${hubSupp.length} hub hit(s)`); + filteredHits.push(...hubSupp); } } diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 531edc964..3a28c5942 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.13", + "version": "1.0.4-beta.14", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", diff --git a/apps/memos-local-openclaw/src/config.ts b/apps/memos-local-openclaw/src/config.ts index c745ce14e..150b09cc4 100644 --- a/apps/memos-local-openclaw/src/config.ts +++ b/apps/memos-local-openclaw/src/config.ts @@ -128,7 +128,8 @@ export function resolveConfig(raw: Partial | undefined, stateD userToken: cfg.sharing?.client?.userToken ?? "", teamToken: cfg.sharing?.client?.teamToken ?? "", pendingUserId: cfg.sharing?.client?.pendingUserId ?? "", - } : { hubAddress: "", userToken: "", teamToken: "", pendingUserId: "" }; + nickname: cfg.sharing?.client?.nickname ?? "", + } : { hubAddress: "", userToken: "", teamToken: "", pendingUserId: "", nickname: "" }; return { enabled, role, hub, client, capabilities: sharingCapabilities }; })(), }; diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index a046e406c..972a57ef4 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -314,6 +314,7 @@ export class HubServer { { userId: user.id, username: user.username, role: user.role, status: user.status }, this.authSecret, ); + this.userManager.approveUser(user.id, token); return this.json(res, 200, { status: "active", userToken: token }); } return this.json(res, 200, { status: user.status }); diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index 34b449e2f..c8ef00841 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -1490,7 +1490,10 @@ export class SqliteStore { const conditions: string[] = []; const params: unknown[] = []; if (opts.status) { conditions.push("status = ?"); params.push(opts.status); } - if (opts.owner) { conditions.push("owner = ?"); params.push(opts.owner); } + if (opts.owner) { + conditions.push("(owner = ? OR (owner = 'public' AND id IN (SELECT task_id FROM local_shared_tasks WHERE original_owner = ?)))"); + params.push(opts.owner, opts.owner); + } const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : ""; const countRow = this.db.prepare(`SELECT COUNT(*) as c FROM tasks ${whereClause}`).get(...params) as { c: number }; diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index b9f0c6f7c..fb4470ab4 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -3599,14 +3599,16 @@ function selectSharingRole(role){ var tp=document.getElementById('sharingTeamPanel'); var ap=document.getElementById('sharingAdminPanel'); if(role==='client'){ - if(sp) sp.style.display='none'; + if(sp) { sp.style.display='none'; sp.innerHTML=''; } if(tp) tp.style.display='none'; if(ap) ap.style.display='none'; }else{ - if(sp) sp.style.display=''; + if(sp) { sp.style.display='none'; sp.innerHTML=''; } if(tp) tp.style.display=''; if(ap) ap.style.display=''; } + _lastSettingsFingerprint=''; + setTimeout(function(){ loadSharingStatus(true); },200); if(role==='hub'){ var tk=document.getElementById('cfgHubTeamToken'); if(!tk.value.trim()) tk.value=_genToken(18); @@ -3719,6 +3721,7 @@ function switchView(view){ function onMemoryScopeChange(){ memorySearchScope=document.getElementById('memorySearchScope')?.value||'local'; + try{localStorage.setItem('memos_memorySearchScope',memorySearchScope);}catch(e){} currentPage=1; activeSession=null;activeRole=''; _lastMemoriesFingerprint=''; @@ -3844,6 +3847,7 @@ function renderSharingSidebar(data){ html+=''+t('sharing.sidebar.identity')+''+esc(conn.user.username||'-')+''; if(conn.teamName) html+=''+t('sharing.team')+''+esc(conn.teamName)+''; html+='
'; + html+='
'; statusEl.innerHTML=html; hintEl.textContent=t('sharing.rejected.hint'); }else if(conn.removed&&conn.user){ @@ -3852,6 +3856,7 @@ function renderSharingSidebar(data){ html+=''+t('sharing.sidebar.identity')+''+esc(conn.user.username||'-')+''; if(conn.teamName) html+=''+t('sharing.team')+''+esc(conn.teamName)+''; html+='
'; + html+='
'; statusEl.innerHTML=html; hintEl.textContent=t('sharing.removed.hint'); }else if(conn.connected&&conn.user){ @@ -3888,6 +3893,9 @@ function renderSharingSettings(data){ if(!data||!data.enabled){ statusEl.innerHTML='';teamEl.innerHTML='';adminEl.innerHTML=''; if(panelsWrap) panelsWrap.style.display='none'; + var adminNavTab0=document.querySelector('.tab[data-view="admin"]'); + if(adminNavTab0) adminNavTab0.style.display='none'; + if(_activeView==='admin') switchView('memories'); return; } if(panelsWrap) panelsWrap.style.display=''; @@ -3895,9 +3903,20 @@ function renderSharingSettings(data){ var user=conn.user||{}; var actualRole=data.role||_sharingRole||'client'; if(data.role) _sharingRole=data.role; + var prevIsAdmin=!!window._isHubAdmin; var isAdmin=(data.admin&&data.admin.canManageUsers)||(conn.connected&&user.role==='admin')||(actualRole==='hub'); window._isHubAdmin=isAdmin; if(isAdmin) startAdminPoll(); + var adminNavTab=document.querySelector('.tab[data-view="admin"]'); + if(adminNavTab){ + var showTab=(actualRole==='hub')||(conn.connected); + adminNavTab.style.display=showTab?'':'none'; + if(!showTab&&_activeView==='admin') switchView('memories'); + } + if(prevIsAdmin&&!isAdmin&&_activeView==='admin'){ + _lastAdminFingerprint=''; + loadAdminData(); + } var hubAdminBtn=document.getElementById('hubAdminEntryBtn'); if(actualRole==='hub'){ @@ -4418,7 +4437,14 @@ async function adminToggleRole(userId,newRole){ try{ var r=await fetch('/api/sharing/change-role',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({userId:userId,role:newRole})}); var d=await r.json(); - if(d.ok){toast(t('toast.roleChanged'),'success');_lastAdminFingerprint='';loadAdminData();} + if(d.ok){ + toast(t('toast.roleChanged'),'success'); + _lastAdminFingerprint=''; + _lastSettingsFingerprint=''; + _lastSidebarFingerprint=''; + await loadSharingStatus(false); + loadAdminData(); + } else if(d.error==='cannot_demote_owner'){toast(t('admin.ownerHint'),'error');} else{toast(d.error||t('toast.roleChangeFail'),'error');} }catch(e){toast(t('toast.roleChangeFail')+': '+e.message,'error');} @@ -8840,6 +8866,14 @@ async function checkForUpdate(){ } /* ─── Init ─── */ +try{ + var savedScope=localStorage.getItem('memos_memorySearchScope'); + if(savedScope&&(savedScope==='local'||savedScope==='allLocal'||savedScope==='hub')){ + memorySearchScope=savedScope; + var scopeEl=document.getElementById('memorySearchScope'); + if(scopeEl) scopeEl.value=savedScope; + } +}catch(e){} document.getElementById('modalOverlay').addEventListener('click',e=>{if(e.target.id==='modalOverlay')closeModal()}); document.getElementById('searchInput').addEventListener('keydown',e=>{if(e.key==='Escape'){e.target.value='';currentPage=1;if(memorySearchScope==='hub')loadHubMemories();else loadMemories();}}); applyI18n(); From 4307d02bcfedd5895d5ddcb36ecf44e775a9c684 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Sun, 22 Mar 2026 19:22:56 +0800 Subject: [PATCH 69/85] feat(memos-local): dual-instance isolation, team sharing state management, and admin notifications MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Hub port auto-derivation (gatewayPort + 11) to avoid port conflicts in multi-instance setups - Hub port retry on EADDRINUSE (up to 3 retries) - Role change notifications (role_promoted / role_demoted) sent to affected users - Withdraw-pending API for canceling pending join requests when switching roles - Complete client connection cleanup (clearClientHubConnection) on role switch - Frontend toast guards: pending→connected/rejected only fire for client role - Resource notification display: localized titles with resource name as detail - Self-removal prevention in admin user management panel - Faster restart overlay (waitDown max 8 attempts instead of 60) - Config path resolution via OPENCLAW_CONFIG_PATH / OPENCLAW_STATE_DIR Made-with: Cursor --- apps/memos-local-openclaw/index.ts | 11 +- apps/memos-local-openclaw/src/hub/server.ts | 78 ++++- .../src/ingest/providers/index.ts | 4 +- .../src/shared/llm-call.ts | 3 +- apps/memos-local-openclaw/src/viewer/html.ts | 281 +++++++++++++----- .../memos-local-openclaw/src/viewer/server.ts | 186 +++++++++--- 6 files changed, 440 insertions(+), 123 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index a088c23c1..77f1f6d3c 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -180,7 +180,7 @@ const memosLocalPlugin = { } let pluginCfg = (api.pluginConfig ?? {}) as Record; - const stateDir = api.resolvePath("~/.openclaw"); + const stateDir = process.env.OPENCLAW_STATE_DIR || api.resolvePath("~/.openclaw"); // Fallback: read config from file if not provided by OpenClaw const configPath = path.join(stateDir, "state", "memos-local", "config.json"); @@ -1314,7 +1314,8 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, // ─── Tool: memory_viewer ─── - const viewerPort = (pluginCfg as any).viewerPort ?? 18799; + const gatewayPort = (api.config as any)?.gateway?.port ?? 18789; + const viewerPort = (pluginCfg as any).viewerPort ?? (gatewayPort + 10); api.registerTool( { @@ -2297,6 +2298,8 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, // ─── Memory Viewer (web UI) ─── + const derivedHubPort = gatewayPort + 11; + const viewer = new ViewerServer({ store, embedder, @@ -2304,10 +2307,10 @@ Groups: ${groupNames.length > 0 ? groupNames.join(", ") : "(none)"}`, log: ctx.log, dataDir: stateDir, ctx, + defaultHubPort: derivedHubPort, }); - const hubServer = ctx.config.sharing?.enabled && ctx.config.sharing.role === "hub" - ? new HubServer({ store, log: ctx.log, config: ctx.config, dataDir: stateDir, embedder }) + ? new HubServer({ store, log: ctx.log, config: ctx.config, dataDir: stateDir, embedder, defaultHubPort: derivedHubPort }) : null; // ─── Service lifecycle ─── diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index 972a57ef4..fc4dc1fb1 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -14,6 +14,7 @@ type HubServerOptions = { config: MemosLocalConfig; dataDir: string; embedder?: Embedder; + defaultHubPort?: number; }; type HubAuthState = { @@ -79,18 +80,31 @@ export class HubServer { } }); + const MAX_PORT_RETRIES = 3; + let hubPort = this.port; await new Promise((resolve, reject) => { - const onError = (err: Error) => { - this.server?.off("listening", onListening); - reject(err); + let retries = 0; + const onError = (err: NodeJS.ErrnoException) => { + if (err.code === "EADDRINUSE" && retries < MAX_PORT_RETRIES) { + retries++; + hubPort = this.port + retries; + this.opts.log.warn(`Hub port ${hubPort - 1} in use, trying ${hubPort}`); + this.server!.listen(hubPort, "0.0.0.0"); + } else { + this.server?.off("listening", onListening); + reject(err); + } }; const onListening = () => { this.server?.off("error", onError); + if (hubPort !== this.port) { + this.opts.log.info(`Hub started on fallback port ${hubPort} (configured: ${this.port})`); + } resolve(); }; - this.server!.once("error", onError); + this.server!.on("error", onError); this.server!.once("listening", onListening); - this.server!.listen(this.port, "0.0.0.0"); + this.server!.listen(hubPort, "0.0.0.0"); }); const bootstrap = this.userManager.ensureBootstrapAdmin( @@ -109,19 +123,37 @@ export class HubServer { this.initOnlineTracking(); this.offlineCheckTimer = setInterval(() => this.checkOfflineUsers(), HubServer.OFFLINE_CHECK_INTERVAL_MS); - return `http://127.0.0.1:${this.port}`; + return `http://127.0.0.1:${hubPort}`; } async stop(): Promise { if (this.offlineCheckTimer) { clearInterval(this.offlineCheckTimer); this.offlineCheckTimer = undefined; } if (!this.server) return; + + try { + const activeUsers = this.opts.store.listHubUsers("active"); + const ownerId = this.authState.bootstrapAdminUserId || ""; + for (const u of activeUsers) { + if (u.id === ownerId) continue; + try { + this.opts.store.insertHubNotification({ + id: randomUUID(), userId: u.id, type: "hub_shutdown", + resource: "system", title: `Team server "${this.teamName}" has been shut down by the admin.`, + }); + } catch { /* best-effort */ } + } + } catch { /* best-effort */ } + const server = this.server; this.server = undefined; await new Promise((resolve) => server.close(() => resolve())); } private get port(): number { - return this.opts.config.sharing?.hub?.port ?? 18800; + const configured = this.opts.config.sharing?.hub?.port; + const derived = this.opts.defaultHubPort; + if (derived && (!configured || configured === 18800)) return derived; + return configured ?? 18800; } private get teamName(): string { @@ -320,6 +352,22 @@ export class HubServer { return this.json(res, 200, { status: user.status }); } + if (req.method === "POST" && routePath === "/api/v1/hub/withdraw-pending") { + const body = await this.readJson(req); + if (!body || body.teamToken !== this.teamToken) { + return this.json(res, 403, { error: "invalid_team_token" }); + } + const userId = String(body.userId || ""); + if (!userId) return this.json(res, 400, { error: "missing_user_id" }); + const user = this.opts.store.getHubUser(userId); + if (!user) return this.json(res, 200, { ok: true }); + if (user.status === "pending") { + this.userManager.markUserLeft(userId); + this.opts.log.info(`Hub: user "${user.username}" (${userId}) withdrew pending application`); + } + return this.json(res, 200, { ok: true }); + } + // All endpoints below require authentication + rate limiting const auth = this.authenticate(req); if (!auth) return this.json(res, 401, { error: "unauthorized" }); @@ -336,7 +384,7 @@ export class HubServer { if (req.method === "POST" && routePath === "/api/v1/hub/leave") { this.userManager.markUserLeft(auth.userId); this.knownOnlineUsers.delete(auth.userId); - this.notifyAdmins("user_offline", "user", auth.username, auth.userId); + this.notifyAdmins("user_left", "user", auth.username, auth.userId); this.opts.log.info(`Hub: user "${auth.username}" (${auth.userId}) left voluntarily, status set to "left"`); return this.json(res, 200, { ok: true }); } @@ -441,6 +489,13 @@ export class HubServer { const updatedUser = { ...user, role: newRole as "admin" | "member" }; this.opts.store.upsertHubUser(updatedUser); this.opts.log.info(`Hub: admin "${auth.userId}" changed role of "${userId}" to "${newRole}"`); + try { + const notifType = newRole === "admin" ? "role_promoted" : "role_demoted"; + this.opts.store.insertHubNotification({ + id: randomUUID(), userId, type: notifType, + resource: "user", title: `Your role in team "${this.teamName}" has been changed to ${newRole}.`, + }); + } catch { /* best-effort */ } return this.json(res, 200, { ok: true, role: newRole }); } @@ -478,9 +533,16 @@ export class HubServer { if (!userId) return this.json(res, 400, { error: "missing_user_id" }); if (userId === auth.userId) return this.json(res, 400, { error: "cannot_remove_self" }); if (userId === this.authState.bootstrapAdminUserId) return this.json(res, 403, { error: "cannot_remove_owner", message: "The hub owner cannot be removed" }); + try { + this.opts.store.insertHubNotification({ + id: randomUUID(), userId, type: "membership_removed", + resource: "user", title: `You have been removed from team "${this.teamName}" by the admin.`, + }); + } catch { /* best-effort */ } const cleanResources = body?.cleanResources === true; const deleted = this.opts.store.deleteHubUser(userId, cleanResources); if (!deleted) return this.json(res, 404, { error: "not_found" }); + this.knownOnlineUsers.delete(userId); this.opts.log.info(`Hub: admin "${auth.userId}" removed user "${userId}" (cleanResources=${cleanResources})`); return this.json(res, 200, { ok: true }); } diff --git a/apps/memos-local-openclaw/src/ingest/providers/index.ts b/apps/memos-local-openclaw/src/ingest/providers/index.ts index 8513dc1fa..85d0814c8 100644 --- a/apps/memos-local-openclaw/src/ingest/providers/index.ts +++ b/apps/memos-local-openclaw/src/ingest/providers/index.ts @@ -49,8 +49,8 @@ function normalizeEndpointForProvider( function loadOpenClawFallbackConfig(log: Logger): SummarizerConfig | undefined { try { const home = process.env.HOME ?? process.env.USERPROFILE ?? ""; - const ocHome = process.env.OPENCLAW_STATE_DIR || path.join(home, ".openclaw"); - const cfgPath = path.join(ocHome, "openclaw.json"); + const cfgPath = process.env.OPENCLAW_CONFIG_PATH + || path.join(process.env.OPENCLAW_STATE_DIR || path.join(home, ".openclaw"), "openclaw.json"); if (!fs.existsSync(cfgPath)) return undefined; const raw = JSON.parse(fs.readFileSync(cfgPath, "utf-8")); diff --git a/apps/memos-local-openclaw/src/shared/llm-call.ts b/apps/memos-local-openclaw/src/shared/llm-call.ts index 9734c43ab..2b875661e 100644 --- a/apps/memos-local-openclaw/src/shared/llm-call.ts +++ b/apps/memos-local-openclaw/src/shared/llm-call.ts @@ -37,7 +37,8 @@ function defaultEndpointForProvider(provider: SummaryProvider, baseUrl: string): export function loadOpenClawFallbackConfig(log: Logger): SummarizerConfig | undefined { try { const home = process.env.HOME ?? process.env.USERPROFILE ?? ""; - const cfgPath = path.join(home, ".openclaw", "openclaw.json"); + const cfgPath = process.env.OPENCLAW_CONFIG_PATH + || path.join(process.env.OPENCLAW_STATE_DIR || path.join(home, ".openclaw"), "openclaw.json"); if (!fs.existsSync(cfgPath)) return undefined; const raw = JSON.parse(fs.readFileSync(cfgPath, "utf-8")); diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index fb4470ab4..2b51313ce 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -1654,7 +1654,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
  • Ask your team admin for the Server Address and Team Token
  • Enable sharing above, select "Client" mode
  • Fill in Server Address and Team Token, click "Test Connection"
  • -
  • Save settings and restart the OpenClaw gateway (page refreshes automatically)
  • +
  • Click "Save & Apply" — the service restarts automatically (page refreshes)
  • @@ -1666,7 +1666,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
    Be the team server. Run it on this device so others can connect and share memories with you.
    1. Enable sharing above, select "Server" mode
    2. -
    3. Set a team name, save settings, and restart the gateway (page refreshes automatically)
    4. +
    5. Set a team name, click "Save & Apply" — the service restarts automatically
    6. Share the Server Address and Team Token with your team members
    7. Approve join requests in the Admin Panel
    @@ -1727,7 +1727,7 @@ input,textarea,select{font-family:inherit;font-size:inherit}
    Quick Setup (3 steps)
    1. Ask your team admin for the Server Address and Team Token
    2. Fill them in below, click "Test Connection" to verify
    -
    3. Click "Save Settings", then restart OpenClaw gateway (page refreshes automatically)
    +
    3. Click "Save & Apply" — the service will restart and page refreshes automatically
    @@ -2123,8 +2123,13 @@ const I18N={ 'notif.userJoin':'New user requests to join the team', 'notif.userOnline':'User came online', 'notif.userOffline':'User went offline', + 'notif.userLeft':'User has left the team', 'notif.membershipApproved':'Your team join request has been approved', 'notif.membershipRejected':'Your team join request has been declined', + 'notif.membershipRemoved':'You have been removed from the team by the admin', + 'notif.hubShutdown':'The team server has been shut down', + 'notif.rolePromoted':'You have been promoted to admin', + 'notif.roleDemoted':'You have been changed to member', 'notif.clearAll':'Clear all', 'notif.timeAgo.just':'just now', 'notif.timeAgo.min':'{n}m ago', @@ -2275,12 +2280,12 @@ const I18N={ 'settings.test.ok':'Connected', 'settings.test.fail':'Failed', 'settings.session.expired':'Session expired, please refresh the page to log in again', - 'settings.save':'Save Settings', + 'settings.save':'Save & Apply', 'settings.reset':'Reset', 'settings.saved':'Saved', - 'settings.restart.hint':'Some changes require restarting the OpenClaw gateway to take effect.', - 'settings.restart.autoRefresh':'Page will refresh automatically after the gateway restarts...', - 'settings.restart.waiting':'Configuration saved. Waiting for gateway to restart...', + 'settings.restart.hint':'Changes will take effect after the service restarts automatically.', + 'settings.restart.autoRefresh':'Service restarting, page will refresh automatically...', + 'settings.restart.waiting':'Configuration saved. Service is restarting...', 'settings.save.fail':'Failed to save settings', 'settings.save.emb.required':'Embedding model is required. Please configure an embedding model before saving.', 'settings.save.emb.fail':'Embedding model test failed, cannot save', @@ -2407,16 +2412,16 @@ const I18N={ 'settings.hub.tokenCopied':'Team Token copied!', 'settings.hub.hubSteps.title':'Quick Setup (3 steps)', 'settings.hub.hubSteps.s1':'Fill in Team Name below (or keep default)', - 'settings.hub.hubSteps.s2':'Click "Save Settings", then restart OpenClaw gateway', + 'settings.hub.hubSteps.s2':'Click "Save & Apply" — the service will restart automatically', 'settings.hub.hubSteps.s3':'Share the Server Address and Team Token below with your team members', 'settings.hub.clientSteps.title':'Quick Setup (3 steps)', 'settings.hub.clientSteps.s1':'Ask your team admin for the Server Address and Team Token', 'settings.hub.clientSteps.s2':'Fill them in below, click "Test Connection" to verify', - 'settings.hub.clientSteps.s3':'Click "Save Settings", then restart OpenClaw gateway (page refreshes automatically)', + 'settings.hub.clientSteps.s3':'Click "Save & Apply" — the service will restart and page refreshes automatically', 'settings.hub.shareInfo.title':'Share this info with your team members:', 'settings.hub.shareInfo.yourIP':'your-IP', 'settings.hub.shareInfo.clickCopy':'Click to copy', - 'settings.hub.restartAlert':'Team sharing config saved! Please restart the OpenClaw gateway for changes to take effect.\\n\\nRun: openclaw gateway stop && openclaw gateway start', + 'settings.hub.restartAlert':'Team sharing config saved! The service will restart automatically to apply changes.', 'settings.hub.hubAddress':'Server Address', 'settings.hub.hubAddress.hint':'Team server address, e.g. 192.168.1.100:18800', 'settings.hub.teamTokenClient':'Team Token', @@ -2437,6 +2442,10 @@ const I18N={ 'sidebar.hub':'\u{1F310} Team Sharing', 'sharing.sidebar.connected':'Connected', 'sharing.sidebar.disconnected':'Disconnected', + 'sharing.sidebar.hubRunning':'Hub Running', + 'sharing.sidebar.teamName':'Team', + 'sharing.sidebar.members':'Members', + 'sharing.sidebar.online':'online', 'sharing.sidebar.pending':'Pending Approval', 'sharing.sidebar.rejected':'Rejected', 'sharing.sidebar.starting':'Starting...', @@ -2453,6 +2462,11 @@ const I18N={ 'sharing.retryJoin':'Retry Join', 'sharing.retryJoin.hint':'Clears local data and re-submits the join request', 'sharing.retryJoin.confirm':'This will clear your current connection and re-submit a join request. Continue?', + 'sharing.leaveTeam':'Leave Team', + 'sharing.leaveTeam.confirm':'You are about to leave team "{team}".\\n\\nWhat will happen:\\n\\u2022 You will disconnect from the team server\\n\\u2022 The team admin will be notified that you left\\n\\u2022 You will no longer receive shared memories, tasks, or skills\\n\\u2022 Your local data is preserved and not affected\\n\\u2022 You can rejoin later if the admin approves\\n\\nAre you sure?', + 'sharing.leaveTeam.success':'You have left the team. Sharing has been disabled.', + 'sharing.leaveTeam.fail':'Failed to leave team', + 'sharing.team.default':'the team', 'sharing.retryJoin.success':'Join request re-submitted. Waiting for admin approval.', 'sharing.retryJoin.fail':'Failed to retry join', 'sharing.ownerRemoved':'(removed)', @@ -2506,6 +2520,7 @@ const I18N={ 'admin.editName':'Edit Name', 'admin.lastAdminHint':'Last admin — cannot remove or demote', 'admin.ownerHint':'Hub owner — cannot be demoted or removed', + 'admin.selfHint':'This is you', 'admin.editNamePrompt':'Enter new username:', 'confirm.promoteAdmin':'Promote this user to admin? They will be able to manage all team members and resources.', 'confirm.demoteMember':'Demote this admin to member?', @@ -2567,6 +2582,8 @@ const I18N={ 'toast.userApproved':'User approved', 'sharing.approved.toast':'Your join request has been approved!', 'sharing.rejected.toast':'Your join request was rejected by the admin.', + 'sharing.hubOffline.toast':'Team server is offline. Will reconnect automatically when it comes back.', + 'sharing.hubReconnected.toast':'Team server is back online! Connection restored.', 'toast.userRejected':'User rejected', 'toast.approveFail':'Approve failed', 'toast.rejectFail':'Reject failed', @@ -2726,9 +2743,10 @@ const I18N={ 'update.dismiss':'Dismiss', 'sharing.disable.confirm.hub':'You are about to shut down the team server.\\n\\nWhat will happen:\\n\\u2022 All connected team members will be disconnected\\n\\u2022 They will no longer be able to sync memories, tasks, or skills\\n\\u2022 Shared data is preserved and will be available when you re-enable\\n\\nAre you sure?', 'sharing.disable.confirm.client':'You are about to disconnect from the team.\\n\\nWhat will happen:\\n\\u2022 You will no longer receive shared memories, tasks, or skills from the team\\n\\u2022 Your local data is preserved and will not be affected\\n\\u2022 You can reconnect later by re-enabling sharing\\n\\nAre you sure?', - 'sharing.disable.restartAlert':'Sharing has been disabled. Please restart the OpenClaw gateway for the change to take effect.\\n\\nRun: openclaw gateway stop && openclaw gateway start', - 'sharing.switch.hubToClient':'You are about to switch from Server to Client mode.\\n\\nWhat will happen:\\n\\u2022 The Hub server will shut down after restart\\n\\u2022 All connected team members will be disconnected\\n\\u2022 Shared data on the Hub is preserved for future use\\n\\u2022 You will join the specified remote team as a client\\n\\nAre you sure?', - 'sharing.switch.clientToHub':'You are about to switch from Client to Server mode.\\n\\nWhat will happen:\\n\\u2022 You will disconnect from the current team\\n\\u2022 A new Hub server will start after restart\\n\\u2022 Your local data is not affected\\n\\nAre you sure?', + 'sharing.disable.restartAlert':'Sharing has been disabled. The service will restart automatically to apply the change.', + 'sharing.switch.hubToClient':'You are about to switch from Server to Client mode.\\n\\nWhat will happen:\\n\\u2022 The Hub server will shut down after the service restarts\\n\\u2022 All connected team members will be disconnected\\n\\u2022 Shared data on the Hub is preserved for future use\\n\\u2022 You will join the specified remote team as a client\\n\\nAre you sure?', + 'sharing.switch.clientToHub':'You are about to switch from Client to Server mode.\\n\\nWhat will happen:\\n\\u2022 You will disconnect from the current team\\n\\u2022 A new Hub server will start after the service restarts\\n\\u2022 Your local data is not affected\\n\\nAre you sure?', + 'sharing.switch.hubAddress':'You are about to leave the current team and join a different one.\\n\\nWhat will happen:\\n\\u2022 You will disconnect from the current team server\\n\\u2022 The current team admin will be notified that you left\\n\\u2022 You will join the new team server as a new member\\n\\u2022 Your local data is not affected\\n\\nAre you sure?', 'admin.notEnabled.title':'Team sharing is not enabled', 'admin.notEnabled.desc':'The Admin Panel is used to manage team members, shared memories, tasks, and skills. To use this feature, you need to enable team sharing first.', 'admin.notEnabled.setupHub':'Set Up as Team Server', @@ -2746,12 +2764,12 @@ const I18N={ 'guide.join.s1':'Ask your team admin for the Server Address and Team Token', 'guide.join.s2':'Go to Settings \u2192 Team Sharing, enable sharing, select "Client" mode', 'guide.join.s3':'Fill in Server Address and Team Token, click "Test Connection"', - 'guide.join.s4':'Save settings and restart the OpenClaw gateway (page refreshes automatically)', + 'guide.join.s4':'Click "Save & Apply" — the service restarts automatically (page refreshes)', 'guide.join.btn':'\u2192 Configure Client Mode', 'guide.hub.title':'Start Your Own Team Server', 'guide.hub.desc':'Be the team server. Run it on this device so others can connect and share memories with you.', 'guide.hub.s1':'Go to Settings \u2192 Team Sharing, enable sharing, select "Server" mode', - 'guide.hub.s2':'Set a team name, save settings, and restart the gateway (page refreshes automatically)', + 'guide.hub.s2':'Set a team name, click "Save & Apply" — the service restarts automatically', 'guide.hub.s3':'Share the Server Address and Team Token with your team members', 'guide.hub.s4':'Approve join requests in the Admin Panel', 'guide.hub.btn':'\u2192 Configure Server Mode' @@ -2842,8 +2860,13 @@ const I18N={ 'notif.userJoin':'有新用户申请加入团队', 'notif.userOnline':'用户上线了', 'notif.userOffline':'用户下线了', + 'notif.userLeft':'用户已退出团队', 'notif.membershipApproved':'你的团队加入申请已通过', 'notif.membershipRejected':'你的团队加入申请已被拒绝', + 'notif.membershipRemoved':'你已被管理员移出团队', + 'notif.hubShutdown':'团队服务已关闭', + 'notif.rolePromoted':'你已被提升为管理员', + 'notif.roleDemoted':'你已被设为普通成员', 'notif.clearAll':'清除全部', 'notif.timeAgo.just':'刚刚', 'notif.timeAgo.min':'{n}分钟前', @@ -2994,12 +3017,12 @@ const I18N={ 'settings.test.ok':'连接成功', 'settings.test.fail':'连接失败', 'settings.session.expired':'登录已过期,请刷新页面重新登录', - 'settings.save':'保存设置', + 'settings.save':'保存并应用', 'settings.reset':'重置', 'settings.saved':'已保存', - 'settings.restart.hint':'部分设置修改后需要重启 OpenClaw 网关才能生效。', - 'settings.restart.autoRefresh':'网关重启后页面将自动刷新...', - 'settings.restart.waiting':'配置已保存,正在等待网关重启...', + 'settings.restart.hint':'修改将在服务自动重启后生效。', + 'settings.restart.autoRefresh':'服务重启中,页面将自动刷新...', + 'settings.restart.waiting':'配置已保存,服务正在重启...', 'settings.save.fail':'保存设置失败', 'settings.save.emb.required':'嵌入模型为必填项,请先配置嵌入模型再保存。', 'settings.save.emb.fail':'嵌入模型测试失败,无法保存', @@ -3126,16 +3149,16 @@ const I18N={ 'settings.hub.tokenCopied':'团队令牌已复制!', 'settings.hub.hubSteps.title':'快速配置(3 步)', 'settings.hub.hubSteps.s1':'填写下方团队名称(或保持默认)', - 'settings.hub.hubSteps.s2':'点击"保存设置",然后重启 OpenClaw 网关', + 'settings.hub.hubSteps.s2':'点击「保存并应用」,服务将自动重启', 'settings.hub.hubSteps.s3':'将下方的服务器地址和团队令牌分享给团队成员', 'settings.hub.clientSteps.title':'快速配置(3 步)', 'settings.hub.clientSteps.s1':'向团队管理员获取服务器地址和团队令牌', 'settings.hub.clientSteps.s2':'填入下方,点击"测试连接"验证连通性', - 'settings.hub.clientSteps.s3':'点击「保存设置」,然后重启 OpenClaw 网关(页面会自动刷新)', + 'settings.hub.clientSteps.s3':'点击「保存并应用」,服务将自动重启(页面会自动刷新)', 'settings.hub.shareInfo.title':'请将以下信息分享给团队成员:', 'settings.hub.shareInfo.yourIP':'你的IP', 'settings.hub.shareInfo.clickCopy':'点击复制', - 'settings.hub.restartAlert':'团队共享配置已保存!请重启 OpenClaw 网关使配置生效。\\n\\n执行命令:openclaw gateway stop && openclaw gateway start', + 'settings.hub.restartAlert':'团队共享配置已保存!服务将自动重启以应用更改。', 'settings.hub.hubAddress':'服务器地址', 'settings.hub.hubAddress.hint':'团队服务器地址,如 192.168.1.100:18800', 'settings.hub.teamTokenClient':'团队令牌', @@ -3156,6 +3179,10 @@ const I18N={ 'sidebar.hub':'\u{1F310} 团队共享', 'sharing.sidebar.connected':'已连接', 'sharing.sidebar.disconnected':'已断开', + 'sharing.sidebar.hubRunning':'服务运行中', + 'sharing.sidebar.teamName':'团队', + 'sharing.sidebar.members':'成员', + 'sharing.sidebar.online':'在线', 'sharing.sidebar.pending':'等待审核', 'sharing.sidebar.rejected':'已拒绝', 'sharing.sidebar.starting':'启动中...', @@ -3172,6 +3199,11 @@ const I18N={ 'sharing.retryJoin':'重新申请', 'sharing.retryJoin.hint':'清除本地连接数据并重新提交加入申请', 'sharing.retryJoin.confirm':'这将清除当前连接数据并重新提交加入申请,是否继续?', + 'sharing.leaveTeam':'退出团队', + 'sharing.leaveTeam.confirm':'你即将退出团队「{team}」。\\n\\n退出后将会:\\n\\u2022 断开与团队服务器的连接\\n\\u2022 团队管理员会收到你退出的通知\\n\\u2022 你将无法再接收团队共享的记忆、任务和技能\\n\\u2022 你的本地数据不受影响,会完整保留\\n\\u2022 之后可以重新申请加入(需管理员审批)\\n\\n确定要退出吗?', + 'sharing.leaveTeam.success':'你已退出团队,团队共享已关闭。', + 'sharing.leaveTeam.fail':'退出团队失败', + 'sharing.team.default':'该团队', 'sharing.retryJoin.success':'加入申请已重新提交,请等待管理员审核。', 'sharing.retryJoin.fail':'重新申请失败', 'sharing.ownerRemoved':'(已移除)', @@ -3225,6 +3257,7 @@ const I18N={ 'admin.editName':'编辑名称', 'admin.lastAdminHint':'唯一管理员 — 无法删除或降级', 'admin.ownerHint':'Hub 创建者 — 不可降级或移除', + 'admin.selfHint':'这是你自己', 'admin.editNamePrompt':'请输入新用户名:', 'confirm.promoteAdmin':'确定要将此用户提升为管理员吗?管理员可以管理所有团队成员和资源。', 'confirm.demoteMember':'确定要将此管理员降为普通成员吗?', @@ -3286,6 +3319,8 @@ const I18N={ 'toast.userApproved':'用户已批准', 'sharing.approved.toast':'您的加入申请已通过审核!', 'sharing.rejected.toast':'您的加入申请已被管理员拒绝。', + 'sharing.hubOffline.toast':'团队服务已离线,恢复后将自动重新连接。', + 'sharing.hubReconnected.toast':'团队服务已恢复上线,连接已自动恢复!', 'toast.userRejected':'用户已拒绝', 'toast.approveFail':'批准失败', 'toast.rejectFail':'拒绝失败', @@ -3445,9 +3480,10 @@ const I18N={ 'update.dismiss':'关闭', 'sharing.disable.confirm.hub':'你即将关闭团队服务。\\n\\n关闭后将会:\\n\\u2022 所有已连接的团队成员将断开连接\\n\\u2022 他们将无法继续同步记忆、任务和技能\\n\\u2022 已共享的数据会保留,重新开启后仍可使用\\n\\n确定要关闭吗?', 'sharing.disable.confirm.client':'你即将断开与团队的连接。\\n\\n断开后将会:\\n\\u2022 你将无法再接收团队共享的记忆、任务和技能\\n\\u2022 你的本地数据不受影响,会完整保留\\n\\u2022 之后可以随时重新开启共享来恢复连接\\n\\n确定要断开吗?', - 'sharing.disable.restartAlert':'共享已关闭。请重启 OpenClaw 网关使更改生效。\\n\\n执行命令:openclaw gateway stop && openclaw gateway start', - 'sharing.switch.hubToClient':'你即将从服务端模式切换为客户端模式。\\n\\n切换后将会:\\n\\u2022 Hub 服务将在重启后关闭\\n\\u2022 所有已连接的团队成员将断开连接\\n\\u2022 Hub 上的共享数据会保留,以后可恢复使用\\n\\u2022 你将作为客户端加入指定的远程团队\\n\\n确定要切换吗?', - 'sharing.switch.clientToHub':'你即将从客户端模式切换为服务端模式。\\n\\n切换后将会:\\n\\u2022 你将断开与当前团队的连接\\n\\u2022 重启后将启动新的 Hub 服务\\n\\u2022 你的本地数据不受影响\\n\\n确定要切换吗?', + 'sharing.disable.restartAlert':'共享已关闭,服务将自动重启以应用更改。', + 'sharing.switch.hubToClient':'你即将从服务端模式切换为客户端模式。\\n\\n切换后将会:\\n\\u2022 Hub 服务将在服务重启后关闭\\n\\u2022 所有已连接的团队成员将断开连接\\n\\u2022 Hub 上的共享数据会保留,以后可恢复使用\\n\\u2022 你将作为客户端加入指定的远程团队\\n\\n确定要切换吗?', + 'sharing.switch.clientToHub':'你即将从客户端模式切换为服务端模式。\\n\\n切换后将会:\\n\\u2022 你将断开与当前团队的连接\\n\\u2022 服务重启后将启动新的 Hub 服务\\n\\u2022 你的本地数据不受影响\\n\\n确定要切换吗?', + 'sharing.switch.hubAddress':'你即将离开当前团队并加入新的团队。\\n\\n操作后将会:\\n\\u2022 你将断开与当前团队服务器的连接\\n\\u2022 当前团队管理员会收到你离开的通知\\n\\u2022 你将作为新成员加入新的团队服务器\\n\\u2022 你的本地数据不受影响\\n\\n确定要切换吗?', 'admin.notEnabled.title':'团队共享尚未开启', 'admin.notEnabled.desc':'管理面板用于管理团队成员、共享的记忆、任务和技能。使用此功能前,需要先开启团队共享。', 'admin.notEnabled.setupHub':'配置为团队服务端', @@ -3465,12 +3501,12 @@ const I18N={ 'guide.join.s1':'向团队管理员索取服务器地址和团队令牌', 'guide.join.s2':'前往「设置 → 团队共享」,开启共享,选择「客户端」模式', 'guide.join.s3':'填写服务器地址和团队令牌,点击「测试连接」', - 'guide.join.s4':'保存设置并重启 OpenClaw 网关(页面会自动刷新)', + 'guide.join.s4':'点击「保存并应用」,服务将自动重启(页面会自动刷新)', 'guide.join.btn':'\u2192 配置客户端模式', 'guide.hub.title':'自建团队服务', 'guide.hub.desc':'将本机作为团队服务端,让其他成员连接过来共享记忆。', 'guide.hub.s1':'前往「设置 → 团队共享」,开启共享,选择「服务端」模式', - 'guide.hub.s2':'设置团队名称,保存设置后重启网关(页面会自动刷新)', + 'guide.hub.s2':'设置团队名称,点击「保存并应用」,服务将自动重启', 'guide.hub.s3':'将服务器地址和团队令牌分享给团队成员', 'guide.hub.s4':'在管理面板中审批加入请求', 'guide.hub.btn':'\u2192 配置服务端模式' @@ -3576,12 +3612,28 @@ async function doReset(){ } var _sharingRole='client'; +var _loadedClientHubAddress=''; function _genToken(len){ var a=new Uint8Array(len||18);crypto.getRandomValues(a); return btoa(String.fromCharCode.apply(null,a)).replace(/\\+/g,'-').replace(/\\//g,'_').replace(/=+$/,''); } -function onSharingToggle(){ - var on=document.getElementById('cfgSharingEnabled').checked; +async function onSharingToggle(){ + var chk=document.getElementById('cfgSharingEnabled'); + var on=chk.checked; + if(!on && sharingStatusCache && sharingStatusCache.enabled){ + var prevRole=sharingStatusCache.role; + var confirmMsg=prevRole==='hub'?t('sharing.disable.confirm.hub'):t('sharing.disable.confirm.client'); + if(!(await confirmModal(confirmMsg,{danger:true}))){ + chk.checked=true; + return; + } + var cfg={sharing:{enabled:false,role:prevRole}}; + chk.disabled=true; + var result=await doSaveConfig(cfg, null, 'hubSaved'); + chk.disabled=false; + if(!result){chk.checked=true;return;} + return; + } document.getElementById('sharingConfigPanel').style.display=on?'block':'none'; var pw=document.getElementById('sharingPanelsWrap'); if(pw) pw.style.display=on?'':'none'; @@ -3652,13 +3704,6 @@ async function testHubConnection(){ if(!addr){result.innerHTML='\u274C '+t('settings.hub.test.noAddr')+'';return;} btn.disabled=true;result.innerHTML=t('settings.hub.test.testing'); try{ - var ipsData=await fetch('/api/local-ips').then(function(r){return r.json();}); - var localAddrs=['127.0.0.1','localhost','0.0.0.0'].concat(ipsData.ips||[]); - var parsed=new URL(addr.indexOf('://')>-1?addr:'http://'+addr); - if(localAddrs.indexOf(parsed.hostname)>=0){ - result.innerHTML='\u274C '+t('sharing.cannotJoinSelf')+''; - btn.disabled=false;return; - } }catch(e){} try{ var url=addr.match(/^https?:\\/\\//)?addr:'http://'+addr; @@ -3781,16 +3826,26 @@ async function loadSharingStatus(forcePending){ var curStatus=conn.rejected?'rejected':conn.pendingApproval?'pending':conn.connected?'connected':'none'; var hubActive=d.role==='hub'||curStatus==='connected'; _updateScopeSelectorsVisibility(hubActive); - if(_lastSharingConnStatus==='pending'&&curStatus==='rejected'){ + if(_lastSharingConnStatus==='pending'&&curStatus==='rejected'&&d.role==='client'){ toast(t('sharing.rejected.toast'),'error'); } - if(_lastSharingConnStatus==='pending'&&curStatus==='connected'){ + if(_lastSharingConnStatus==='pending'&&curStatus==='connected'&&d.role==='client'){ toast(t('sharing.approved.toast'),'success'); loadMemories();loadTasks();loadSkills(); if(_notifSSE){_notifSSE.close();_notifSSE=null;_notifSSEConnected=false;} connectNotifSSE(); loadNotifications(); } + if(_lastSharingConnStatus==='connected'&&curStatus==='none'&&d.role==='client'){ + toast(t('sharing.hubOffline.toast'),'error'); + } + if(_lastSharingConnStatus==='none'&&curStatus==='connected'&&d.role==='client'){ + toast(t('sharing.hubReconnected.toast'),'success'); + loadMemories();loadTasks();loadSkills(); + if(_notifSSE){_notifSSE.close();_notifSSE=null;_notifSSEConnected=false;} + connectNotifSSE(); + loadNotifications(); + } _lastSharingConnStatus=curStatus; if(curStatus==='pending'&&!_clientPendingPollTimer){ _clientPendingPollTimer=setInterval(function(){loadSharingStatus(false);},5000); @@ -3815,7 +3870,8 @@ function renderSharingSidebar(data){ var badgeEl=document.getElementById('sharingSidebarConnBadge'); if(!statusEl||!hintEl) return; var conn=data&&data.connection||{}; - var fp=JSON.stringify({e:!!data&&!!data.enabled,r:data&&data.role,pa:!!conn.pendingApproval,rj:!!conn.rejected,c:!!conn.connected,u:conn.user&&conn.user.username,tn:conn.teamName,cc:!!data&&!!data.clientConfigured,hu:data&&data.hubUrl}); + var hs=data&&data.hubStats||{}; + var fp=JSON.stringify({e:!!data&&!!data.enabled,r:data&&data.role,pa:!!conn.pendingApproval,rj:!!conn.rejected,c:!!conn.connected,u:conn.user&&conn.user.username,tn:conn.teamName,cc:!!data&&!!data.clientConfigured,hu:data&&data.hubUrl,tm:hs.totalMembers,om:hs.onlineMembers,pm:hs.pendingMembers}); if(fp===_lastSidebarFingerprint) return; _lastSidebarFingerprint=fp; if(!data||!data.enabled){ @@ -3830,8 +3886,16 @@ function renderSharingSidebar(data){ badgeEl.innerHTML=''+esc(text)+''; } if(data.role==='hub'){ - setBadge('#34d399',t('sharing.sidebar.connected'),true); - statusEl.innerHTML=''; + setBadge('#34d399',t('sharing.sidebar.hubRunning'),true); + var hs=data.hubStats||{}; + var html='
    '; + if(conn.teamName) html+=''+t('sharing.sidebar.teamName')+''+esc(conn.teamName)+''; + html+=''+t('sharing.sidebar.members')+''+(hs.totalMembers||0)+' / '+t('sharing.sidebar.online')+' '+(hs.onlineMembers||0)+''; + if(hs.pendingMembers>0){ + html+=''+t('sharing.sidebar.pending')+''+hs.pendingMembers+''; + } + html+='
    '; + statusEl.innerHTML=html; hintEl.textContent=''; }else if(conn.pendingApproval&&conn.user){ setBadge('#fbbf24',t('sharing.sidebar.pending'),false); @@ -3902,7 +3966,6 @@ function renderSharingSettings(data){ var conn=data.connection||{}; var user=conn.user||{}; var actualRole=data.role||_sharingRole||'client'; - if(data.role) _sharingRole=data.role; var prevIsAdmin=!!window._isHubAdmin; var isAdmin=(data.admin&&data.admin.canManageUsers)||(conn.connected&&user.role==='admin')||(actualRole==='hub'); window._isHubAdmin=isAdmin; @@ -3966,7 +4029,10 @@ function renderSharingSettings(data){ ''+ ''; sh+=''+t('sharing.team')+''+esc(conn.teamName||'-')+''; - sh+='
    '; + sh+='
    '+ + '
    '+ + ''+ + '
    '; }else{ sh+='
    '+t('sharing.disconnected.hint')+'
    '+ '
    '+ @@ -4018,6 +4084,22 @@ async function retryHubJoin(){ }catch(e){toast(t('sharing.retryJoin.fail')+': '+e.message,'error');} } +async function leaveTeam(){ + var teamName=(sharingStatusCache&&sharingStatusCache.connection&&sharingStatusCache.connection.teamName)||''; + var msg=t('sharing.leaveTeam.confirm').replace('{team}',teamName||t('sharing.team.default')); + if(!(await confirmModal(msg,{danger:true}))) return; + try{ + var r=await fetch('/api/sharing/leave',{method:'POST',headers:{'Content-Type':'application/json'},body:'{}'}); + var d=await r.json(); + if(d.ok){ + toast(t('sharing.leaveTeam.success'),'success'); + showRestartOverlay(t('settings.restart.waiting')); + }else{ + toast(d.error||t('sharing.leaveTeam.fail'),'error'); + } + }catch(e){toast(t('sharing.leaveTeam.fail')+': '+e.message,'error');} +} + async function updateHubUsername(){ var input=document.getElementById('hubUsernameInput'); if(!input) return; @@ -4106,8 +4188,7 @@ async function rejectSharingUser(userId,username){ function updateTeamGuide(sharingData){ var el=document.getElementById('teamSetupGuide'); if(!el) return; - var isConfigured=sharingData&&sharingData.enabled; - el.style.display=isConfigured?'none':'block'; + el.style.display='block'; } function guideGoToHub(role){ switchSettingsTab('hub',document.querySelector('.settings-tab-btn[data-tab="hub"]')); @@ -4307,9 +4388,10 @@ function auRelativeTime(ts){ return t('notif.timeAgo.day').replace('{n}',Math.floor(diff/86400000)); } -function renderAdminUserCard(u,adminCount){ +function renderAdminUserCard(u,adminCount,myUserId){ var uid=escAttr(u.id); var uname=escAttr(u.username||''); + var isSelf=!!(myUserId&&u.id===myUserId); var online=!!u.isOnline; var statusCls=online?'online':'offline'; @@ -4343,7 +4425,9 @@ function renderAdminUserCard(u,adminCount){ var infoHtml='
    '+infoRows.join('')+'
    '; var actions=''; - if(u.isOwner){ + if(isSelf){ + actions+=''+t('admin.selfHint')+''; + }else if(u.isOwner){ actions+=''+t('admin.ownerHint')+''; }else if(u.role!=='admin'){ actions+=''; @@ -4395,6 +4479,7 @@ function renderAdminUsers(users,pending){ offlineUsers.sort(function(a,b){return (b.lastActiveAt||0)-(a.lastActiveAt||0);}); var sorted=onlineUsers.concat(offlineUsers); var adminCount=users.filter(function(x){return x.role==='admin';}).length; + var myUserId=sharingStatusCache&&sharingStatusCache.connection&&sharingStatusCache.connection.user?sharingStatusCache.connection.user.id:null; if(sorted.length===0){ html+='
    \u{1F465}'+t('admin.noActiveUsers')+'
    '; @@ -4403,13 +4488,13 @@ function renderAdminUsers(users,pending){ if(onlineUsers.length===0){ html+='
    \u2014
    '; }else{ - for(var i=0;i('+offlineUsers.length+')
    '; if(offlineUsers.length===0){ html+='
    \u2014
    '; }else{ - for(var j=0;j-1?clientAddr:'http://'+clientAddr); - if(localAddrs.indexOf(parsed.hostname)>=0){ - done();toast(t('sharing.cannotJoinSelf'),'error');return; - } }catch(e){} try{ var testUrl=clientAddr.indexOf('://')>-1?clientAddr:'http://'+clientAddr; @@ -6810,6 +6894,12 @@ async function saveHubConfig(){ var switchMsg=prevRole==='hub'?t('sharing.switch.hubToClient'):t('sharing.switch.clientToHub'); if(!(await confirmModal(switchMsg,{danger:true}))){done();return;} } + if(prevSharingEnabled&&sharingEnabled&&prevRole==='client'&&_sharingRole==='client'){ + var newAddr=(document.getElementById('cfgClientHubAddress').value||'').trim(); + if(_loadedClientHubAddress&&newAddr&&newAddr!==_loadedClientHubAddress){ + if(!(await confirmModal(t('sharing.switch.hubAddress'),{danger:true}))){done();return;} + } + } var result=await doSaveConfig(cfg, saveBtn, 'hubSaved'); if(result){ @@ -7378,7 +7468,12 @@ function notifTimeAgo(ts){ function notifIcon(resource,type){ if(type==='user_online') return '\\u{1F7E2}'; if(type==='user_offline') return '\\u{1F534}'; + if(type==='user_left') return '\\u{1F6AA}'; if(type==='user_join_request') return '\\u{1F464}'; + if(type==='membership_removed') return '\\u{26D4}'; + if(type==='hub_shutdown') return '\\u{1F6D1}'; + if(type==='role_promoted') return '\\u{2B06}'; + if(type==='role_demoted') return '\\u{2B07}'; if(resource==='memory') return '\\u{1F4DD}'; if(resource==='task') return '\\u{1F4CB}'; if(resource==='skill') return '\\u{1F9E0}'; @@ -7404,12 +7499,27 @@ function notifTypeText(n){ if(n.type==='user_offline'){ return t('notif.userOffline'); } + if(n.type==='user_left'){ + return t('notif.userLeft'); + } if(n.type==='membership_approved'){ return t('notif.membershipApproved'); } if(n.type==='membership_rejected'){ return t('notif.membershipRejected'); } + if(n.type==='membership_removed'){ + return t('notif.membershipRemoved'); + } + if(n.type==='hub_shutdown'){ + return t('notif.hubShutdown'); + } + if(n.type==='role_promoted'){ + return t('notif.rolePromoted'); + } + if(n.type==='role_demoted'){ + return t('notif.roleDemoted'); + } return n.message||n.type; } @@ -7444,6 +7554,21 @@ function renderNotifBadge(){ } } +var _notifKnownTypes={membership_approved:1,membership_rejected:1,membership_removed:1,hub_shutdown:1,user_left:1,user_online:1,user_offline:1,user_join_request:1,role_promoted:1,role_demoted:1,resource_removed:1,resource_shared:1,resource_unshared:1}; +function notifDisplayTitle(n){ + if(_notifKnownTypes[n.type]) return notifTypeText(n); + return n.title||notifTypeText(n); +} +function notifDisplayDetail(n){ + if(_notifKnownTypes[n.type]){ + if(n.type==='resource_removed'||n.type==='resource_shared'||n.type==='resource_unshared') return n.title||''; + var m=n.title&&n.title.match(/["\u201C]([^"\u201D]+)["\u201D]/); + if(m) return m[1]; + if(n.type==='user_left'||n.type==='user_online'||n.type==='user_offline'||n.type==='user_join_request') return n.title||''; + return ''; + } + return n.title||''; +} function renderNotifPanel(){ var body=document.getElementById('notifPanelBody'); if(!body) return; @@ -7453,11 +7578,12 @@ function renderNotifPanel(){ } body.innerHTML=_notifCache.map(function(n){ var cls='notif-item'+(n.read?'':' unread'); + var detail=notifDisplayDetail(n); return '
    '+ '
    '+notifIcon(n.resource,n.type)+'
    '+ '
    '+ - '
    '+esc(notifTypeText(n))+'
    '+ - '
    '+esc(n.title)+'
    '+ + '
    '+esc(notifDisplayTitle(n))+'
    '+ + (detail?'
    '+esc(detail)+'
    ':'')+ '
    '+notifTimeAgo(n.createdAt)+'
    '+ '
    '+ '
    '+ @@ -8743,7 +8869,7 @@ function confirmModal(message,opts){ _confirmResolve=resolve; var overlay=document.getElementById('confirmOverlay'); document.getElementById('confirmTitle').textContent=opts.title||t('confirm.title')||'\u786E\u8BA4'; - document.getElementById('confirmBody').textContent=message||''; + document.getElementById('confirmBody').innerText=message||''; var okBtn=document.getElementById('confirmOkBtn'); okBtn.textContent=opts.okText||t('confirm.ok')||'\u786E\u5B9A'; okBtn.className='btn-confirm-ok'+(opts.danger?' danger':''); @@ -8784,14 +8910,35 @@ function showRestartOverlay(msg){ /* ─── Update check ─── */ function waitForGatewayAndReload(maxAttempts,attempt){ attempt=attempt||0; + var phase=arguments.length>2?arguments[2]:'waitDown'; + var MAX_WAIT_DOWN=8; function forceReload(){window.location.href=window.location.pathname+'?_t='+Date.now();} if(attempt>=maxAttempts){forceReload();return;} + var delay=phase==='waitDown'?1500:2500; setTimeout(function(){ fetch('/api/auth/status').then(function(r){ - if(r.ok||r.status===401||r.status===403) forceReload(); - else waitForGatewayAndReload(maxAttempts,attempt+1); - }).catch(function(){waitForGatewayAndReload(maxAttempts,attempt+1);}); - },3000); + if(phase==='waitDown'){ + if(r.ok||r.status===401||r.status===403){ + if(attempt>=MAX_WAIT_DOWN){ + forceReload(); + }else{ + waitForGatewayAndReload(maxAttempts,attempt+1,'waitDown'); + } + }else{ + waitForGatewayAndReload(maxAttempts,0,'waitUp'); + } + }else{ + if(r.ok||r.status===401||r.status===403) forceReload(); + else waitForGatewayAndReload(maxAttempts,attempt+1,'waitUp'); + } + }).catch(function(){ + if(phase==='waitDown'){ + waitForGatewayAndReload(maxAttempts,0,'waitUp'); + }else{ + waitForGatewayAndReload(maxAttempts,attempt+1,'waitUp'); + } + }); + },delay); } function doUpdateInstall(packageSpec,btnEl,statusEl){ btnEl.disabled=true; diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index adee752cf..98a9179c2 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -34,6 +34,7 @@ export interface ViewerServerOptions { log: Logger; dataDir: string; ctx?: PluginContext; + defaultHubPort?: number; } interface AuthState { @@ -51,6 +52,8 @@ export class ViewerServer { private readonly authFile: string; private readonly auth: AuthState; private readonly ctx?: PluginContext; + private readonly cookieName: string; + private readonly defaultHubPort: number; private static readonly SESSION_TTL = 24 * 60 * 60 * 1000; private static readonly PLUGIN_VERSION: string = (() => { @@ -99,17 +102,31 @@ export class ViewerServer { this.ctx = opts.ctx; this.authFile = path.join(opts.dataDir, "viewer-auth.json"); this.auth = { passwordHash: null, sessions: new Map() }; + this.cookieName = `memos_token_${opts.port}`; + this.defaultHubPort = opts.defaultHubPort ?? 18800; this.resetToken = crypto.randomBytes(16).toString("hex"); this.loadAuth(); } + private getHubPort(): number { + const configured = this.ctx?.config?.sharing?.hub?.port; + if (configured && configured !== 18800) return configured; + return this.defaultHubPort; + } + start(): Promise { + const MAX_PORT_RETRIES = 5; return new Promise((resolve, reject) => { + let retries = 0; this.server = http.createServer((req, res) => this.handleRequest(req, res)); this.server.on("error", (err: NodeJS.ErrnoException) => { - if (err.code === "EADDRINUSE") { - this.log.warn(`Viewer port ${this.port} in use, trying ${this.port + 1}`); - this.server!.listen(this.port + 1, "0.0.0.0"); + if (err.code === "EADDRINUSE" && retries < MAX_PORT_RETRIES) { + retries++; + const nextPort = this.port + retries; + this.log.warn(`Viewer port ${this.port + retries - 1} in use, trying ${nextPort}`); + this.server!.listen(nextPort, "0.0.0.0"); + } else if (err.code === "EADDRINUSE") { + reject(new Error(`Viewer failed to find open port after ${MAX_PORT_RETRIES} retries (tried ${this.port}–${this.port + MAX_PORT_RETRIES})`)); } else { reject(err); } @@ -187,7 +204,8 @@ export class ViewerServer { private isValidSession(req: http.IncomingMessage): boolean { const cookie = req.headers.cookie ?? ""; - const match = cookie.match(/memos_token=([a-f0-9]+)/); + const re = new RegExp(`${this.cookieName}=([a-f0-9]+)`); + const match = cookie.match(re); if (!match) return false; const expiry = this.auth.sessions.get(match[1]); if (!expiry) return false; @@ -270,6 +288,7 @@ export class ViewerServer { else if (p === "/api/sharing/remove-user" && req.method === "POST") this.handleSharingRemoveUser(req, res); else if (p === "/api/sharing/change-role" && req.method === "POST") this.handleSharingChangeRole(req, res); else if (p === "/api/sharing/retry-join" && req.method === "POST") this.handleRetryJoin(req, res); + else if (p === "/api/sharing/leave" && req.method === "POST") this.handleLeaveTeam(req, res); else if (p === "/api/sharing/search/memories" && req.method === "POST") this.handleSharingMemorySearch(req, res); else if (p === "/api/sharing/memories/list" && req.method === "GET") this.serveSharingMemoryList(res, url); else if (p === "/api/sharing/tasks/list" && req.method === "GET") this.serveSharingTaskList(res, url); @@ -350,7 +369,7 @@ export class ViewerServer { const token = this.createSession(); res.writeHead(200, { "Content-Type": "application/json", - "Set-Cookie": `memos_token=${token}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, + "Set-Cookie": `${this.cookieName}=${token}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, }); res.end(JSON.stringify({ ok: true, message: "Password set successfully" })); } catch (err) { @@ -372,7 +391,7 @@ export class ViewerServer { const token = this.createSession(); res.writeHead(200, { "Content-Type": "application/json", - "Set-Cookie": `memos_token=${token}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, + "Set-Cookie": `${this.cookieName}=${token}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, }); res.end(JSON.stringify({ ok: true })); } catch (err) { @@ -384,11 +403,12 @@ export class ViewerServer { private handleLogout(req: http.IncomingMessage, res: http.ServerResponse): void { const cookie = req.headers.cookie ?? ""; - const match = cookie.match(/memos_token=([a-f0-9]+)/); + const re = new RegExp(`${this.cookieName}=([a-f0-9]+)`); + const match = cookie.match(re); if (match) this.auth.sessions.delete(match[1]); res.writeHead(200, { "Content-Type": "application/json", - "Set-Cookie": "memos_token=; Path=/; HttpOnly; Max-Age=0", + "Set-Cookie": `${this.cookieName}=; Path=/; HttpOnly; Max-Age=0`, }); res.end(JSON.stringify({ ok: true })); } @@ -415,7 +435,7 @@ export class ViewerServer { const sessionToken = this.createSession(); res.writeHead(200, { "Content-Type": "application/json", - "Set-Cookie": `memos_token=${sessionToken}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, + "Set-Cookie": `${this.cookieName}=${sessionToken}; Path=/; HttpOnly; SameSite=Strict; Max-Age=86400`, }); res.end(JSON.stringify({ ok: true, message: "Password reset successfully" })); } catch (err) { @@ -1522,6 +1542,7 @@ export class ViewerServer { // ─── Config API ─── private getOpenClawConfigPath(): string { + if (process.env.OPENCLAW_CONFIG_PATH) return process.env.OPENCLAW_CONFIG_PATH; const home = process.env.HOME || process.env.USERPROFILE || ""; const ocHome = process.env.OPENCLAW_STATE_DIR || path.join(home, ".openclaw"); return path.join(ocHome, "openclaw.json"); @@ -1611,7 +1632,20 @@ export class ViewerServer { base.connection.teamName = info?.teamName ?? sharing.hub?.teamName ?? null; base.connection.apiVersion = info?.apiVersion ?? null; } catch { /* ignore */ } - this.jsonResponse(res, base); + + const hubStats: any = { totalMembers: 0, onlineMembers: 0, pendingMembers: 0 }; + try { + const activeUsers = this.store.listHubUsers("active"); + const pendingUsers = this.store.listHubUsers("pending"); + const now = Date.now(); + const OFFLINE_THRESHOLD = 120_000; + hubStats.totalMembers = activeUsers.length; + hubStats.onlineMembers = activeUsers.filter(u => + u.lastActiveAt && (now - u.lastActiveAt < OFFLINE_THRESHOLD), + ).length; + hubStats.pendingMembers = pendingUsers.length; + } catch { /* best-effort */ } + this.jsonResponse(res, { ...base, hubStats }); return; } @@ -1776,15 +1810,6 @@ export class ViewerServer { } try { const hubUrl = normalizeHubUrl(hubAddress); - const localIPs = this.getLocalIPs(); - localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); - try { - const u = new URL(hubUrl); - const targetPort = u.port || (u.protocol === "https:" ? "443" : "80"); - if (localIPs.includes(u.hostname) && targetPort === String(this.port)) { - return this.jsonResponse(res, { ok: false, error: "cannot_join_self" }); - } - } catch {} const os = await import("os"); const nickname = sharing.client?.nickname; const username = nickname || os.userInfo().username || "user"; @@ -2213,7 +2238,7 @@ export class ViewerServer { // Hub 模式:连接自己,用 bootstrap admin token const sharing = this.ctx.config.sharing; if (sharing?.role === "hub") { - const hubPort = sharing.hub?.port ?? 18800; + const hubPort = this.getHubPort(); const hubUrl = `http://127.0.0.1:${hubPort}`; try { const authPath = path.join(this.dataDir, "hub-auth.json"); @@ -2607,13 +2632,14 @@ export class ViewerServer { if (merged.role === "client" && merged.client) { const clientCfg = merged.client as Record; const addr = String(clientCfg.hubAddress || ""); - if (addr) { + if (addr && oldSharingRole === "hub" && oldSharingEnabled) { + const selfHubPort = (oldSharing?.hub as Record)?.port ?? 18800; const localIPs = this.getLocalIPs(); localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); try { const u = new URL(addr.startsWith("http") ? addr : `http://${addr}`); const targetPort = u.port || (u.protocol === "https:" ? "443" : "80"); - if (localIPs.includes(u.hostname) && targetPort === String(this.port)) { + if (localIPs.includes(u.hostname) && targetPort === String(selfHubPort)) { res.writeHead(400, { "Content-Type": "application/json" }); res.end(JSON.stringify({ error: "cannot_join_self" })); return; @@ -2638,12 +2664,9 @@ export class ViewerServer { const wasClient = oldSharingEnabled && oldSharingRole === "client"; const isClient = newEnabled && newRole === "client"; if (wasClient && !isClient) { - this.notifyHubLeave(); - const oldConn = this.store.getClientHubConnection(); - if (oldConn) { - this.store.setClientHubConnection({ ...oldConn, userToken: "", lastKnownStatus: "left" }); - } - this.log.info("Client hub connection token cleared (sharing disabled or role changed), identity preserved"); + await this.withdrawOrLeaveHub(); + this.store.clearClientHubConnection(); + this.log.info("Client hub connection cleared (sharing disabled or role changed)"); } if (wasClient && isClient) { @@ -2661,7 +2684,7 @@ export class ViewerServer { if (merged.role === "hub") { merged.client = { hubAddress: "", userToken: "", teamToken: "" }; } else if (merged.role === "client") { - merged.hub = { port: 18800, teamName: "", teamToken: "" }; + merged.hub = { teamName: "", teamToken: "" }; } config.sharing = merged; } @@ -2684,7 +2707,12 @@ export class ViewerServer { } } - this.jsonResponse(res, { ok: true, joinStatus }); + this.jsonResponse(res, { ok: true, joinStatus, restart: true }); + + setTimeout(() => { + this.log.info("config-save: triggering gateway restart via SIGUSR1..."); + try { process.kill(process.pid, "SIGUSR1"); } catch (sig) { this.log.warn(`SIGUSR1 failed: ${sig}`); } + }, 500); } catch (e) { this.log.warn(`handleSaveConfig error: ${e}`); res.writeHead(500, { "Content-Type": "application/json" }); @@ -2727,6 +2755,41 @@ export class ViewerServer { return result.status; } + private handleLeaveTeam(_req: http.IncomingMessage, res: http.ServerResponse): void { + this.readBody(_req, async () => { + try { + await this.withdrawOrLeaveHub(); + this.store.clearClientHubConnection(); + + const configPath = this.getOpenClawConfigPath(); + if (configPath && fs.existsSync(configPath)) { + const raw = JSON.parse(fs.readFileSync(configPath, "utf8")); + const pluginKey = Object.keys(raw.plugins?.entries ?? {}).find(k => k.includes("memos-local")); + if (pluginKey) { + const cfg = raw.plugins.entries[pluginKey].config ?? {}; + if (cfg.sharing) { + cfg.sharing.enabled = false; + cfg.sharing.client = { hubAddress: "", userToken: "", teamToken: "" }; + } + raw.plugins.entries[pluginKey].config = cfg; + fs.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n"); + this.log.info("handleLeaveTeam: config updated, sharing disabled"); + } + } + + this.jsonResponse(res, { ok: true, restart: true }); + + setTimeout(() => { + this.log.info("handleLeaveTeam: triggering gateway restart via SIGUSR1..."); + try { process.kill(process.pid, "SIGUSR1"); } catch (sig) { this.log.warn(`SIGUSR1 failed: ${sig}`); } + }, 500); + } catch (e) { + this.log.warn(`handleLeaveTeam error: ${e}`); + this.jsonResponse(res, { ok: false, error: String(e) }); + } + }); + } + private async notifyHubLeave(): Promise { try { const hub = this.resolveHubConnection(); @@ -2745,11 +2808,49 @@ export class ViewerServer { } } + private async withdrawOrLeaveHub(): Promise { + try { + const persisted = this.store.getClientHubConnection(); + const sharing = this.ctx?.config?.sharing; + + if (persisted?.userToken && persisted?.hubUrl) { + await hubRequestJson(persisted.hubUrl, persisted.userToken, "/api/v1/hub/leave", { method: "POST" }); + this.log.info("Notified Hub of voluntary leave (had token)"); + return; + } + + const hub = this.resolveHubConnection(); + if (hub?.userToken) { + await hubRequestJson(hub.hubUrl, hub.userToken, "/api/v1/hub/leave", { method: "POST" }); + this.log.info("Notified Hub of voluntary leave (resolved connection)"); + return; + } + + const hubUrl = persisted?.hubUrl || (sharing?.client?.hubAddress ? normalizeHubUrl(sharing.client.hubAddress) : null); + const userId = persisted?.userId; + const teamToken = sharing?.client?.teamToken; + if (hubUrl && userId && teamToken) { + const withdrawUrl = `${normalizeHubUrl(hubUrl)}/api/v1/hub/withdraw-pending`; + await fetch(withdrawUrl, { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ teamToken, userId }), + }); + this.log.info("Withdrew pending application from Hub"); + return; + } + + this.log.info("No hub connection to clean up (no token, no pending)"); + } catch (e) { + this.log.warn(`Failed to withdraw/leave Hub: ${e}`); + } + } + private async notifyHubShutdown(): Promise { try { const sharing = this.ctx?.config.sharing; if (!sharing || sharing.role !== "hub") return; - const hubPort = sharing.hub?.port ?? 18800; + const hubPort = this.getHubPort(); const authPath = path.join(this.dataDir, "hub-auth.json"); let adminToken: string | undefined; try { @@ -2834,13 +2935,17 @@ export class ViewerServer { const { hubUrl } = JSON.parse(body); if (!hubUrl) { this.jsonResponse(res, { ok: false, error: "hubUrl is required" }); return; } try { - const localIPs = this.getLocalIPs(); - localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); - const parsed = new URL(hubUrl.startsWith("http") ? hubUrl : `http://${hubUrl}`); - const targetPort = parsed.port || (parsed.protocol === "https:" ? "443" : "80"); - if (localIPs.includes(parsed.hostname) && targetPort === String(this.port)) { - this.jsonResponse(res, { ok: false, error: "cannot_join_self" }); - return; + const sharing = this.ctx?.config?.sharing; + if (sharing?.enabled && sharing.role === "hub") { + const selfHubPort = this.getHubPort(); + const localIPs = this.getLocalIPs(); + localIPs.push("127.0.0.1", "localhost", "0.0.0.0"); + const parsed = new URL(hubUrl.startsWith("http") ? hubUrl : `http://${hubUrl}`); + const targetPort = parsed.port || (parsed.protocol === "https:" ? "443" : "80"); + if (localIPs.includes(parsed.hostname) && targetPort === String(selfHubPort)) { + this.jsonResponse(res, { ok: false, error: "cannot_join_self" }); + return; + } } } catch {} const url = hubUrl.replace(/\/+$/, "") + "/api/v1/hub/info"; @@ -3078,10 +3183,9 @@ export class ViewerServer { this.log.info(`update-install: success! Updated to ${newVersion}`); this.jsonResponse(res, { ok: true, version: newVersion }); - // Trigger Gateway restart after response is sent setTimeout(() => { - this.log.info(`update-install: triggering gateway restart...`); - process.kill(process.pid, "SIGUSR1"); + this.log.info(`update-install: triggering gateway restart via SIGUSR1...`); + try { process.kill(process.pid, "SIGUSR1"); } catch (sig) { this.log.warn(`SIGUSR1 failed: ${sig}`); } }, 500); }); }); From ba2e0d07d26bdefcf3d41087eb2115cdafa5a948 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Sun, 22 Mar 2026 19:39:16 +0800 Subject: [PATCH 70/85] docs(memos-local): add comprehensive Team Sharing documentation across all docs and landing pages - README: expand Team Sharing section with capabilities table, multi-instance deployment, Viewer panel details - HUB-SHARING-GUIDE: add admin features, notifications table, multi-instance deployment, port auto-derivation - www/index.html: add Team Sharing section with Hub-Client architecture demo, admin controls, nav link, update tool count to 17 - docs/index.html: add Team Sharing sidebar group, feature card, full setup/admin/multi-instance/notification docs - openclaw.plugin.json: update description to include team sharing and collaboration tools Made-with: Cursor --- .../memos-local-openclaw/HUB-SHARING-GUIDE.md | 67 +++++++++++-- apps/memos-local-openclaw/README.md | 93 +++++++++++++++++-- apps/memos-local-openclaw/docs/index.html | 89 ++++++++++++++++++ .../memos-local-openclaw/openclaw.plugin.json | 2 +- apps/memos-local-openclaw/www/docs/index.html | 89 ++++++++++++++++++ apps/memos-local-openclaw/www/index.html | 90 +++++++++++++++++- 6 files changed, 409 insertions(+), 21 deletions(-) diff --git a/apps/memos-local-openclaw/HUB-SHARING-GUIDE.md b/apps/memos-local-openclaw/HUB-SHARING-GUIDE.md index cb4233ce0..d550cac1e 100644 --- a/apps/memos-local-openclaw/HUB-SHARING-GUIDE.md +++ b/apps/memos-local-openclaw/HUB-SHARING-GUIDE.md @@ -4,12 +4,19 @@ This guide explains how to use the v4 team sharing workflow in `memos-local-open ## What v4 adds -The plugin now supports a **Server-Client** sharing model: +The plugin now supports a **Hub-Client** sharing model with comprehensive team management: - **Local memory stays local** unless you explicitly share it -- **One team server** stores team-shared tasks, memories, and skills -- **Clients connect to the server** with a user token -- **Admins approve users** before they can access team data +- **One Hub** stores team-shared tasks, memories, and skills +- **Clients connect to the Hub** and submit join requests for admin approval +- **Hub port auto-derivation** — Hub port is automatically derived from the gateway port (`gatewayPort + 11`), avoiding port conflicts in multi-instance setups +- **Port retry** — If the derived/configured port is in use, the Hub retries up to 3 consecutive ports +- **Admins approve users** before they can access team data; admins can promote, demote, and remove members +- **Self-removal prevention** — Admins cannot accidentally remove themselves +- **Notification system** — Role changes (promoted/demoted), resource events (shared/unshared/removed), and Hub status changes trigger real-time notifications +- **Pending withdrawal** — Clients can cancel pending join requests when switching roles +- **Leave team** — Clients can leave a team with confirmation; proper cleanup and Hub notification +- **Graceful role transitions** — Switching between Hub/Client triggers confirmation prompts, connection cleanup, and restart - **Search scope** can be `local`, `group`, or `all` - **Shared skills** can be published to a group or to the whole team @@ -95,9 +102,9 @@ Use this on the first machine in the team. "enabled": true, "role": "hub", "hub": { - "port": 18800, "teamName": "My Team", "teamToken": "${MEMOS_TEAM_TOKEN}" + // port is auto-derived from gateway port; set explicitly only if needed } } } @@ -118,10 +125,13 @@ Use this on the first machine in the team. The team admin can: -- approve or reject join requests -- review pending users +- approve or reject join requests from the pending users panel +- promote members to admin or demote admins to regular members (affected users receive notifications) +- remove team members (with confirmation prompt; self-removal is prevented) +- view team overview: team name, total members, active members - see team connection and server information - manage who can access shared team data +- when disabling sharing (shutting down Hub), all connected clients receive a `hub_shutdown` notification ## Option B: Join an existing team server @@ -335,6 +345,49 @@ In the current plugin build: If you want predictable production behavior today, configure your embedding and summarizer providers explicitly. +## Multi-Instance Deployment + +When running multiple OpenClaw instances on the same machine (e.g., personal + work): + +### Port isolation + +- **Viewer port**: Auto-derived per instance — no conflicts +- **Hub port**: Auto-derived as `gatewayPort + 11` (e.g., gateway `18789` → Hub `18800`, gateway `19001` → Hub `19012`) +- **Port retry**: If the auto-derived port is in use, the Hub tries up to 3 consecutive ports automatically + +### Session isolation + +Each Viewer instance uses a unique cookie name based on its port, so you can be logged into multiple Viewer instances simultaneously in the same browser. + +### Database isolation + +Each OpenClaw instance uses its own state directory and database. Configure via `OPENCLAW_STATE_DIR` environment variable or `--state-dir` flag. + +### Example: Dual-instance setup + +```bash +# Instance 1 (Personal): gateway on 18789, viewer on 18799, hub on 18800 +OPENCLAW_CONFIG_PATH=~/oc-personal/openclaw.json openclaw gateway start + +# Instance 2 (Work): gateway on 19001, viewer on 19011, hub on 19012 +OPENCLAW_CONFIG_PATH=~/oc-work/openclaw.json openclaw gateway start +``` + +## Notifications + +The system sends real-time notifications for these events: + +| Event | Recipient | Message | +|---|---|---| +| Role promoted (to admin) | The promoted user | "You have been promoted to admin" | +| Role demoted (to member) | The demoted user | "You have been changed to regular member" | +| Resource shared | Team members | Resource name with sharing action | +| Resource unshared | Team members | Resource name with unsharing action | +| Resource removed | Resource owner | Resource name with removal action | +| Hub shutdown | All connected clients | Hub has been shut down | +| Member joined | Admin | New member has joined the team | +| Member left | Admin | Member has left the team | + ## Troubleshooting ### Viewer says sharing is disabled diff --git a/apps/memos-local-openclaw/README.md b/apps/memos-local-openclaw/README.md index e560d9422..41d99b504 100644 --- a/apps/memos-local-openclaw/README.md +++ b/apps/memos-local-openclaw/README.md @@ -49,12 +49,16 @@ Persistent local conversation memory for [OpenClaw](https://github.com/nicepkg/o - **LLM fallback chain** — `skillSummarizer` → `summarizer` → OpenClaw native model (auto-detected from `openclaw.json`). If all configured models fail, the next in chain is tried automatically ### Team Sharing (v4) -- **Server-Client collaboration** — One team server stores shared tasks, memories, and skills; clients keep private data local and query the server only when needed +- **Hub-Client architecture** — One Hub stores shared data; clients keep private data local and query the Hub on demand. Roles can be switched dynamically with proper confirmation and cleanup +- **Hub port auto-derivation** — Hub port derived from gateway port (`gatewayPort + 11`) to avoid conflicts in multi-instance setups; automatic port retry on `EADDRINUSE` +- **Admin approval flow** — Join requests require admin approval; admins can promote, demote, and remove members (with self-removal prevention) +- **Notification system** — Role change notifications (promoted/demoted), resource sharing notifications (shared/unshared/removed) with localized messages, Hub shutdown alerts - **Scoped retrieval** — `memory_search` and `skill_search` support `local`, `group`, and `all` search scopes -- **Admin approval flow** — Team members are approved by the admin before they can access shared data - **Task sharing** — `task_share` / `task_unshare` push or remove task memories from the team without changing local private storage - **Skill publish/pull** — Skills can be published to team visibility scopes and pulled back locally as full bundles for offline reuse -- **Viewer support** — The Memory Viewer now includes team connection state, pending approvals, scoped search, task share controls, and team skill pull actions +- **Graceful state transitions** — Client-to-Hub switch triggers confirmation, pending request withdrawal, connection cleanup, and automatic restart +- **Multi-instance support** — Viewer port, Hub port, sessions, and databases are all isolated per instance; supports running multiple OpenClaw instances on the same machine +- **Viewer integration** — Full team management UI: connection state, member management, pending approvals, scoped search, task share controls, skill pull, notification feed, and setup guide ### Memory Migration — Reconnect 🦐 - **One-click import** — Seamlessly migrate OpenClaw's native built-in memories (SQLite + JSONL) into the MemOS intelligent memory system @@ -259,15 +263,84 @@ memos-local: started (embedding: openai_compatible) ╚══════════════════════════════════════════╝ ``` -## Team Sharing Quick Start (v4) +## Team Sharing (v4) -If you want team sharing, do this after the basic install works: +Team Sharing turns multiple OpenClaw instances into a collaborative memory network. One instance serves as the **Hub** (team server), others connect as **Clients**. Private data stays local; only explicitly shared tasks, memories, and skills are visible to the team. -1. **Pick a server machine** and set `sharing.enabled=true`, `sharing.role="hub"`, plus `sharing.hub.port`, `sharing.hub.teamName`, and `sharing.hub.teamToken`. -2. **Configure each client machine** with `sharing.enabled=true`, `sharing.role="client"`, `sharing.client.hubAddress`, and `sharing.client.userToken`. -3. **Open Viewer → Settings → Team Sharing** to verify connection state, current user, role, and groups. -4. **Search with scope** `Group` or `All` in Memories and Skills to query team data alongside local data. -5. **Share tasks** from the Tasks view and **pull skills** from the Skills view. +### Key Capabilities + +| Capability | Description | +|---|---| +| **Hub / Client architecture** | One Hub stores shared data; clients keep private data local and query the Hub on demand | +| **Hub port auto-derivation** | Hub port is automatically derived from the gateway port (`gatewayPort + 11`), avoiding port conflicts in multi-instance setups. Explicit `hub.port` config overrides this. | +| **Port retry on conflict** | If the derived/configured Hub port is in use (`EADDRINUSE`), the server automatically retries up to 3 consecutive ports | +| **Admin approval flow** | New members submit join requests; admin approves/rejects from the Viewer | +| **Self-removal prevention** | Admins cannot accidentally remove themselves from the team | +| **Role change notifications** | When an admin promotes/demotes a member, the affected user receives a notification | +| **Resource notifications** | Shared/unshared/removed resources trigger localized notifications with resource names | +| **Pending withdrawal** | Clients can cancel pending join requests when switching roles or disabling sharing | +| **Graceful role transitions** | Switching from Client to Hub (or vice versa) triggers confirmation prompts, proper cleanup of remote connections, and restart | +| **Hub shutdown notification** | When a Hub owner disables sharing, all connected clients receive a `hub_shutdown` notification | +| **Leave team** | Clients can leave a team with a confirmation dialog; the Hub is notified and the client's data is cleaned up | +| **Scoped retrieval** | `memory_search` and `skill_search` support `local`, `group`, and `all` search scopes | +| **Task sharing** | Push/remove task memories to/from the team | +| **Skill publish/pull** | Publish skills to team visibility; pull team skills locally as full bundles for offline use | + +### Quick Setup + +**Option A — Start a Hub (team server):** + +```jsonc +{ + "config": { + "sharing": { + "enabled": true, + "role": "hub", + "hub": { + "teamName": "My Team", + "teamToken": "${MEMOS_TEAM_TOKEN}" + // port is auto-derived; set explicitly only if needed + } + } + } +} +``` + +**Option B — Join as Client:** + +```jsonc +{ + "config": { + "sharing": { + "enabled": true, + "role": "client", + "client": { + "hubAddress": "192.168.1.100:18800" + } + } + } +} +``` + +You can also configure sharing entirely through the **Viewer → Settings → Team Sharing** panel — no need to edit `openclaw.json` manually. + +### Multi-Instance Deployment + +When running multiple OpenClaw instances on the same machine (e.g., personal + work): + +- **Viewer port**: Each instance derives its Viewer port from the gateway port, so they won't conflict +- **Hub port**: Auto-derived as `gatewayPort + 11` (e.g., gateway `18789` → Hub `18800`, gateway `19001` → Hub `19012`) +- **Session isolation**: Each instance uses a separate cookie name based on its Viewer port, so multiple Viewers can be logged in simultaneously +- **Database isolation**: Each instance uses its own `memos.db` under its respective state directory + +### Viewer Team Sharing Panel + +The **Settings → Team Sharing** panel provides a complete management interface: + +- **Hub mode**: Team name, member count, active members, pending approvals, admin controls (approve/reject/promote/demote/remove) +- **Client mode**: Connection status, team info, leave team button, notification feed +- **Setup guide cards**: Always visible — choose "Host a Team" or "Join a Team" with step-by-step instructions +- **Real-time notifications**: Role changes, resource sharing events, Hub status changes For the full end-user workflow, see [`HUB-SHARING-GUIDE.md`](./HUB-SHARING-GUIDE.md). diff --git a/apps/memos-local-openclaw/docs/index.html b/apps/memos-local-openclaw/docs/index.html index 3c8d401e2..9073c0fa3 100644 --- a/apps/memos-local-openclaw/docs/index.html +++ b/apps/memos-local-openclaw/docs/index.html @@ -156,6 +156,7 @@ 快速开始Quick Start 记忆迁移Migration API + 团队共享Sharing 配置Config @@ -189,6 +190,13 @@ + @@ -216,6 +224,7 @@

    MemOS

    💰

    分级模型Tiered Models

    Embedding/摘要/技能可独立配置不同模型。Each pipeline configurable with different models.

    🤝

    多智能体协同Multi-Agent

    记忆隔离 + 公共记忆 + 技能共享,多 Agent 协同进化。Memory isolation + public memory + skill sharing for collective evolution.

    🦐

    原生记忆导入Native Memory Import

    一键迁移 OpenClaw 内置记忆,智能去重、断点续传、实时进度。One-click migration from OpenClaw built-in memories with smart dedup, resume, and real-time progress.

    +
    👥

    团队共享中心Team Sharing Hub

    Hub-Client 架构,跨实例共享记忆/任务/技能。审批流程、角色管理、实时通知、端口自动推导。Hub-Client architecture for cross-instance sharing. Approval flow, role management, real-time notifications, auto port derivation.

    🔗

    LLM 智能降级LLM Fallback Chain

    技能模型 → 摘要模型 → OpenClaw 原生模型三级自动降级,零手动干预。Skill model → summarizer → OpenClaw native model, auto-fallback with zero manual intervention.

    ✏️

    任务/技能 CRUDTask & Skill CRUD

    列表卡片直接编辑、删除、重试技能生成、切换可见性。Edit, delete, retry skill gen, toggle visibility — all from list cards.

    @@ -451,6 +460,86 @@

    Viewer HTTP

    +
    +

    👥 团队共享中心👥 Team Sharing Hub

    +

    Team Sharing 将多个 OpenClaw 实例连接为协作网络。一个实例作为 Hub(团队服务端),其他实例作为 Client 连接。私有数据始终留在本地,仅明确共享的任务、记忆和技能对团队可见。Team Sharing connects multiple OpenClaw instances into a collaborative network. One instance serves as the Hub (team server) while others connect as Clients. Private data stays local — only explicitly shared tasks, memories, and skills are visible to the team.

    + +
    +
    🏗️

    Hub-Client

    一个 Hub 存储共享数据,客户端按需查询。角色可动态切换。One Hub stores shared data; clients query on demand. Roles switchable dynamically.

    +
    🔌

    端口自动推导Auto Port

    Hub 端口自动从网关端口推导(+11),冲突时重试最多 3 次。Hub port auto-derived from gateway port (+11); retries up to 3 times on conflict.

    +

    审批流程Approval Flow

    新成员提交加入申请,管理员审批后方可访问团队数据。New members submit join requests; admin approval required before accessing team data.

    +
    🔔

    实时通知Notifications

    角色变更、资源共享/移除、Hub 上下线等事件即时通知。Instant notifications for role changes, resource sharing/removal, Hub status changes.

    +
    + +

    配置Setup

    +

    启动 Hub(团队服务端)Start a Hub (Team Server)

    +
    {
    +  "sharing": {
    +    "enabled": true,
    +    "role": "hub",
    +    "hub": {
    +      "teamName": "My Team",
    +      "teamToken": "${MEMOS_TEAM_TOKEN}"
    +      // port auto-derived from gateway; set explicitly only if needed
    +    }
    +  }
    +}json
    + +

    加入 Hub(客户端)Join a Hub (Client)

    +
    {
    +  "sharing": {
    +    "enabled": true,
    +    "role": "client",
    +    "client": {
    +      "hubAddress": "192.168.1.100:18800"
    +    }
    +  }
    +}json
    +
    也可以通过 Viewer → 设置 → 团队共享 面板直接配置,无需手动编辑 JSON。You can also configure sharing through the Viewer → Settings → Team Sharing panel without editing JSON.
    + +

    管理员功能Admin Features

    + + + + + + + +
    功能Feature说明Description
    审批加入Approve/Reject审批或拒绝待审用户Approve or reject pending members
    提升/降级Promote/Demote提升成员为管理员或降级为普通成员;被操作用户收到通知Promote members to admin or demote to regular member; affected users receive notifications
    移除成员Remove Member移除团队成员(带确认弹窗,不可移除自己)Remove team members (with confirmation, self-removal prevented)
    团队概览Team Overview查看团队名称、总成员数、活跃成员数View team name, total members, active member count
    Hub 关闭通知Shutdown Notify关闭 Hub 时自动通知所有客户端All clients notified automatically when Hub shuts down
    + +

    多实例部署Multi-Instance Deployment

    +

    同一台机器上可运行多个 OpenClaw 实例(如个人 + 工作),端口和数据完全隔离:Run multiple OpenClaw instances on the same machine (e.g., personal + work) with full isolation:

    + + + + + + +
    资源Resource隔离方式Isolation示例Example
    Viewer自动推导端口Auto-derived port18799 / 19011
    HubgatewayPort + 1118800 / 19012
    Cookie基于端口的唯一名称Port-based unique namememos_session_18799 / memos_session_19011
    Database独立 state 目录Separate state dir~/.openclaw/memos-local/ / ~/oc-work/memos-local/
    + +

    通知事件Notification Events

    + + + + + + + + +
    事件Event接收方Recipient
    role_promoted被提升的用户Promoted user
    role_demoted被降级的用户Demoted user
    resource_shared团队成员Team members
    resource_removed资源所有者Resource owner
    hub_shutdown所有客户端All clients
    member_joined / left管理员Admin
    + +

    团队共享 API 工具Team Sharing API Tools

    + + + + + + + +
    Tool说明Description
    task_share / task_unshare将任务推送到 / 移除出团队Push task to / remove from team
    skill_publish / skill_unpublish发布 / 取消发布技能到团队Publish / unpublish skill to team
    network_memory_detail获取团队记忆完整内容Fetch full team memory content
    network_skill_pull拉取团队技能到本地Pull team skill bundle locally
    network_team_info查看当前团队连接状态Show current team connection state
    +
    完整的团队共享工作流请参阅 HUB-SHARING-GUIDE.mdFor the complete team sharing workflow, see HUB-SHARING-GUIDE.md.
    +
    +

    多智能体协同Multi-Agent Collaboration

    MemOS 原生支持多 Agent 场景。每个 Agent 的记忆和任务通过 owner 字段隔离(格式 agent:{agentId}),检索时自动过滤为当前 Agent + public。MemOS natively supports multi-agent scenarios. Each agent's memories and tasks are isolated via an owner field (agent:{agentId}); retrieval automatically filters to current agent + public.

    diff --git a/apps/memos-local-openclaw/openclaw.plugin.json b/apps/memos-local-openclaw/openclaw.plugin.json index 8f74c33c8..0477d2036 100644 --- a/apps/memos-local-openclaw/openclaw.plugin.json +++ b/apps/memos-local-openclaw/openclaw.plugin.json @@ -1,7 +1,7 @@ { "id": "memos-local-openclaw-plugin", "name": "MemOS Local Memory", - "description": "Full-write local conversation memory with hybrid search (RRF + MMR + recency). Provides memory_search, memory_get, task_summary, memory_timeline, memory_viewer for layered retrieval.", + "description": "Full-write local conversation memory with hybrid search (RRF + MMR + recency), task summarization, skill evolution, and team sharing (Hub-Client). Provides memory_search, memory_get, task_summary, skill_search, task_share, network_skill_pull, network_team_info, memory_viewer for layered retrieval and team collaboration.", "kind": "memory", "version": "0.1.12", "skills": [ diff --git a/apps/memos-local-openclaw/www/docs/index.html b/apps/memos-local-openclaw/www/docs/index.html index 3c8d401e2..9073c0fa3 100644 --- a/apps/memos-local-openclaw/www/docs/index.html +++ b/apps/memos-local-openclaw/www/docs/index.html @@ -156,6 +156,7 @@ 快速开始Quick Start 记忆迁移Migration API + 团队共享Sharing 配置Config @@ -189,6 +190,13 @@ + @@ -216,6 +224,7 @@

    MemOS

    💰

    分级模型Tiered Models

    Embedding/摘要/技能可独立配置不同模型。Each pipeline configurable with different models.

    🤝

    多智能体协同Multi-Agent

    记忆隔离 + 公共记忆 + 技能共享,多 Agent 协同进化。Memory isolation + public memory + skill sharing for collective evolution.

    🦐

    原生记忆导入Native Memory Import

    一键迁移 OpenClaw 内置记忆,智能去重、断点续传、实时进度。One-click migration from OpenClaw built-in memories with smart dedup, resume, and real-time progress.

    +
    👥

    团队共享中心Team Sharing Hub

    Hub-Client 架构,跨实例共享记忆/任务/技能。审批流程、角色管理、实时通知、端口自动推导。Hub-Client architecture for cross-instance sharing. Approval flow, role management, real-time notifications, auto port derivation.

    🔗

    LLM 智能降级LLM Fallback Chain

    技能模型 → 摘要模型 → OpenClaw 原生模型三级自动降级,零手动干预。Skill model → summarizer → OpenClaw native model, auto-fallback with zero manual intervention.

    ✏️

    任务/技能 CRUDTask & Skill CRUD

    列表卡片直接编辑、删除、重试技能生成、切换可见性。Edit, delete, retry skill gen, toggle visibility — all from list cards.

    @@ -451,6 +460,86 @@

    Viewer HTTP

    +
    +

    👥 团队共享中心👥 Team Sharing Hub

    +

    Team Sharing 将多个 OpenClaw 实例连接为协作网络。一个实例作为 Hub(团队服务端),其他实例作为 Client 连接。私有数据始终留在本地,仅明确共享的任务、记忆和技能对团队可见。Team Sharing connects multiple OpenClaw instances into a collaborative network. One instance serves as the Hub (team server) while others connect as Clients. Private data stays local — only explicitly shared tasks, memories, and skills are visible to the team.

    + +
    +
    🏗️

    Hub-Client

    一个 Hub 存储共享数据,客户端按需查询。角色可动态切换。One Hub stores shared data; clients query on demand. Roles switchable dynamically.

    +
    🔌

    端口自动推导Auto Port

    Hub 端口自动从网关端口推导(+11),冲突时重试最多 3 次。Hub port auto-derived from gateway port (+11); retries up to 3 times on conflict.

    +

    审批流程Approval Flow

    新成员提交加入申请,管理员审批后方可访问团队数据。New members submit join requests; admin approval required before accessing team data.

    +
    🔔

    实时通知Notifications

    角色变更、资源共享/移除、Hub 上下线等事件即时通知。Instant notifications for role changes, resource sharing/removal, Hub status changes.

    +
    + +

    配置Setup

    +

    启动 Hub(团队服务端)Start a Hub (Team Server)

    +
    {
    +  "sharing": {
    +    "enabled": true,
    +    "role": "hub",
    +    "hub": {
    +      "teamName": "My Team",
    +      "teamToken": "${MEMOS_TEAM_TOKEN}"
    +      // port auto-derived from gateway; set explicitly only if needed
    +    }
    +  }
    +}json
    + +

    加入 Hub(客户端)Join a Hub (Client)

    +
    {
    +  "sharing": {
    +    "enabled": true,
    +    "role": "client",
    +    "client": {
    +      "hubAddress": "192.168.1.100:18800"
    +    }
    +  }
    +}json
    +
    也可以通过 Viewer → 设置 → 团队共享 面板直接配置,无需手动编辑 JSON。You can also configure sharing through the Viewer → Settings → Team Sharing panel without editing JSON.
    + +

    管理员功能Admin Features

    + + + + + + + +
    功能Feature说明Description
    审批加入Approve/Reject审批或拒绝待审用户Approve or reject pending members
    提升/降级Promote/Demote提升成员为管理员或降级为普通成员;被操作用户收到通知Promote members to admin or demote to regular member; affected users receive notifications
    移除成员Remove Member移除团队成员(带确认弹窗,不可移除自己)Remove team members (with confirmation, self-removal prevented)
    团队概览Team Overview查看团队名称、总成员数、活跃成员数View team name, total members, active member count
    Hub 关闭通知Shutdown Notify关闭 Hub 时自动通知所有客户端All clients notified automatically when Hub shuts down
    + +

    多实例部署Multi-Instance Deployment

    +

    同一台机器上可运行多个 OpenClaw 实例(如个人 + 工作),端口和数据完全隔离:Run multiple OpenClaw instances on the same machine (e.g., personal + work) with full isolation:

    + + + + + + +
    资源Resource隔离方式Isolation示例Example
    Viewer自动推导端口Auto-derived port18799 / 19011
    HubgatewayPort + 1118800 / 19012
    Cookie基于端口的唯一名称Port-based unique namememos_session_18799 / memos_session_19011
    Database独立 state 目录Separate state dir~/.openclaw/memos-local/ / ~/oc-work/memos-local/
    + +

    通知事件Notification Events

    + + + + + + + + +
    事件Event接收方Recipient
    role_promoted被提升的用户Promoted user
    role_demoted被降级的用户Demoted user
    resource_shared团队成员Team members
    resource_removed资源所有者Resource owner
    hub_shutdown所有客户端All clients
    member_joined / left管理员Admin
    + +

    团队共享 API 工具Team Sharing API Tools

    + + + + + + + +
    Tool说明Description
    task_share / task_unshare将任务推送到 / 移除出团队Push task to / remove from team
    skill_publish / skill_unpublish发布 / 取消发布技能到团队Publish / unpublish skill to team
    network_memory_detail获取团队记忆完整内容Fetch full team memory content
    network_skill_pull拉取团队技能到本地Pull team skill bundle locally
    network_team_info查看当前团队连接状态Show current team connection state
    +
    完整的团队共享工作流请参阅 HUB-SHARING-GUIDE.mdFor the complete team sharing workflow, see HUB-SHARING-GUIDE.md.
    +
    +

    多智能体协同Multi-Agent Collaboration

    MemOS 原生支持多 Agent 场景。每个 Agent 的记忆和任务通过 owner 字段隔离(格式 agent:{agentId}),检索时自动过滤为当前 Agent + public。MemOS natively supports multi-agent scenarios. Each agent's memories and tasks are isolated via an owner field (agent:{agentId}); retrieval automatically filters to current agent + public.

    diff --git a/apps/memos-local-openclaw/www/index.html b/apps/memos-local-openclaw/www/index.html index 37defcca4..6c5482c55 100644 --- a/apps/memos-local-openclaw/www/index.html +++ b/apps/memos-local-openclaw/www/index.html @@ -325,6 +325,7 @@ 能力Features 架构Architecture 快速开始Get Started + 团队共享Team Sharing 记忆迁移Migration 文档Docs @@ -396,7 +397,7 @@

    没有记忆的 Agent,每次都
    🧠

    全量可视化管理Full Visualization

    内置管理面板,记忆 / 任务 / 技能完全透明可控。Built-in web dashboard — memories, tasks, and skills fully transparent and controllable.

    任务总结与技能进化Task Summary & Skill Evolution

    碎片对话自动归纳为结构化任务,再提炼为可复用技能并持续升级。从「记住」到「学会」,同一个坑不踩两次。Fragmented conversations auto-organized into structured tasks, then distilled into reusable skills that evolve over time. From "remembering" to "mastering" — never repeat the same mistake twice.

    💰

    分级模型 · 省钱Tiered Models

    Embedding 轻量、摘要中等、技能高质量——按需分配,大幅省钱。Lightweight, mid-tier, and high-quality models layered by purpose — maximum performance at minimum cost.

    -
    🤝

    多智能体协同Multi-Agent Collaboration

    记忆隔离 + 公共记忆 + 技能共享。多个 Agent 各有私域记忆,又能共享知识与技能,协同进化。Memory isolation + public memory + skill sharing. Each agent has private memories while sharing knowledge and skills for collective evolution.

    +
    🤝

    团队共享 & 多智能体协同Team Sharing & Multi-Agent

    Hub-Client 架构跨实例共享记忆、任务与技能。记忆隔离 + 公共记忆 + 团队审批 + 实时通知,多 Agent 协同进化。Hub-Client architecture for cross-instance sharing of memories, tasks, and skills. Memory isolation + public memory + team approval + real-time notifications for collaborative evolution.

    🦞

    OpenClaw 原生记忆导入Native Memory Import

    一键迁移 OpenClaw 内置记忆,智能去重、断点续传、实时进度。你过往的记忆不会丢失,再续前缘。One-click migration from OpenClaw built-in memories. Smart dedup, resume anytime, real-time progress. Your past memories, never lost.

    @@ -705,7 +706,7 @@

    适配你的技术栈
    -

    12 个智能工具12 Smart Tools

    +

    17 个智能工具17 Smart Tools

    🧠

    auto_recall

    每轮自动回忆Auto recall each turn

    🔍

    memory_search

    记忆检索Memory search

    @@ -719,6 +720,89 @@

    适配你的技术栈
    🌍

    skill_publish

    公开技能Publish skill

    🔒

    skill_unpublish

    取消公开Unpublish skill

    🌐

    memory_viewer

    管理面板Dashboard

    +
    📤

    task_share

    共享任务Share task

    +
    📥

    task_unshare

    取消共享Unshare task

    +
    🔗

    network_memory_detail

    团队记忆详情Team memory detail

    +
    ⬇️

    network_skill_pull

    拉取团队技能Pull team skill

    +
    👥

    network_team_info

    团队信息Team info

    +
    +

    + + +
    + + +
    +
    +
    +
    + 👥 + 团队共享中心Team Sharing Hub +
    +

    多实例协作 —
    让团队的 Agent 共同进化
    Multi-Instance Collaboration —
    Your Team's Agents Evolve Together

    +

    Hub-Client 架构,一个服务端存储共享数据,客户端按需查询。私有数据始终留在本地,只有明确共享的内容才对团队可见。端口自动推导,多实例无冲突。Hub-Client architecture — one Hub stores shared data while clients query on demand. Private data stays local; only explicitly shared content is visible to the team. Auto-derived ports, zero conflicts for multi-instance setups.

    +
    + +
    +
    +
    +

    Hub-Client 协作架构Hub-Client Architecture

    +

    一台机器作为 Hub 服务端管理团队数据,其他实例作为 Client 连接。加入需要管理员审批,角色可随时切换。支持同一台机器运行多个 OpenClaw 实例(如个人 + 工作),端口和数据完全隔离。One machine serves as the Hub managing team data while others connect as Clients. Joining requires admin approval, and roles can be switched anytime. Supports multiple OpenClaw instances on the same machine (e.g., personal + work) with full port and data isolation.

    +
    审批流程Approval Flow端口自动推导Auto Port角色切换Role Switch多实例隔离Multi-Instance
    +
    +
    +
    +
    Hub-Client Architecture
    +
    Hub (Personal):  gateway:18789  hub:18800
    +  teamName: "My Team"   members: 3
    +  ✓ admin: tangbo (online)
    +  ✓ member: alice (online)
    +  ⏳ pending: bob
    +
    +Client (Work):   gateway:19001  hub:19012
    +  hubAddress: "192.168.1.100:18800"
    +  ✓ connected as: tangbo-work
    +
    +// Port auto-derived: gatewayPort + 11
    +// Retry on EADDRINUSE (up to 3 ports)
    +
    +
    +
    + +
    +
    +

    管理员团队管理Admin Team Management

    +

    管理员通过 Viewer 审批加入请求、提升/降级成员角色、移除成员。角色变更自动通知被操作用户。Hub 关闭时所有客户端收到下线通知。完善的退出确认和状态清理机制。Admins approve join requests, promote/demote members, and remove users through the Viewer. Role changes trigger automatic notifications. Hub shutdown alerts all clients. Complete leave confirmation and state cleanup mechanisms.

    +
    角色通知Role Notify资源通知Resource Notify退出确认Leave ConfirmHub 下线提醒Hub Shutdown
    +
    +
    +
    +
    Admin Controls
    +
    Admin Panel:
    +  approve(bob)       ✓ approved
    +  promote(alice)     → 🔔 role_promoted
    +  demote(alice)      → 🔔 role_demoted
    +  remove(bob)        → confirm → ✓ removed
    +  remove(self)       → ✗ blocked
    +
    +Hub Shutdown:
    +  → 📢 hub_shutdown → all clients
    +
    +Client Leave:
    +  → confirm → notify hub → cleanup → 
    +
    +
    +
    +
    + +
    +

    🔄 团队共享支持的操作🔄 Team Sharing Operations

    +
    +
    🔍
    跨域检索Scoped Search

    Local / Group / All 三级搜索范围,本地 + 团队记忆混合召回。Local / Group / All search scopes with hybrid local + team recall.

    +
    📤
    任务共享Task Sharing

    将本地任务及记忆推送到团队,随时取消共享。Push local tasks and memories to team, unshare anytime.

    +
    技能发布与拉取Skill Publish & Pull

    发布技能到团队,其他成员拉取完整 bundle 到本地离线使用。Publish skills to team; members pull full bundles locally for offline use.

    +
    🛡️
    优雅降级Graceful Degradation

    Hub 离线时本地记忆和技能正常运作,搜索自动降级为本地结果。Local memory and skills work normally when Hub is offline; search auto-falls back to local results.

    +
    @@ -888,7 +972,7 @@

    📊 Viewer 管理📊 Vi

    让你的 OpenClaw
    越用越聪明
    Give Your OpenClaw
    Lasting Intelligence

    -

    完全本地化 · 全量可视化 · 任务与技能自进化 · 多智能体协同 · 记忆迁移100% local · Full dashboard · Task & skill evolution · Multi-agent collaboration · Memory migration

    +

    完全本地化 · 全量可视化 · 任务与技能自进化 · 团队共享中心 · 多智能体协同 · 记忆迁移100% local · Full dashboard · Task & skill evolution · Team sharing hub · Multi-agent collaboration · Memory migration

    立即安装 →Get Started → 查看文档Docs From 6c2ba451a00cdfa0090ee6d9d537702a8876c0ce Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Sun, 22 Mar 2026 19:45:16 +0800 Subject: [PATCH 71/85] =?UTF-8?q?docs(memos-local):=20sync=20install=20ins?= =?UTF-8?q?tructions=20with=20openmem.net=20=E2=80=94=20dual-platform=20ta?= =?UTF-8?q?bs=20&=20one-liner=20install?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - www/index.html: add macOS/Linux + Windows tab switcher for hero terminal and Quick Start install section; update install description to match openmem.net - README.md: simplify install to one-liner curl/powershell commands matching openmem.net; add Windows PowerShell install; update homepage links Made-with: Cursor --- apps/memos-local-openclaw/README.md | 28 ++++++--------- apps/memos-local-openclaw/www/index.html | 45 ++++++++++++++++++------ 2 files changed, 46 insertions(+), 27 deletions(-) diff --git a/apps/memos-local-openclaw/README.md b/apps/memos-local-openclaw/README.md index 41d99b504..f2a9df64f 100644 --- a/apps/memos-local-openclaw/README.md +++ b/apps/memos-local-openclaw/README.md @@ -9,7 +9,7 @@ Persistent local conversation memory for [OpenClaw](https://github.com/nicepkg/o **Full-write | Hybrid Search | Task Summarization & Skill Evolution | Team Sharing | Memory Viewer** -> **Homepage:** 🌐 [Homepage](https://memos-claw.openmem.net) · 📖 [Documentation](https://memos-claw.openmem.net/docs/index.html) · 📦 [NPM](https://www.npmjs.com/package/@memtensor/memos-local-openclaw-plugin) +> 🌐 [Homepage](https://memos-claw.openmem.net) · 📖 [Documentation](https://memos-claw.openmem.net/docs/) · 📦 [NPM](https://www.npmjs.com/package/@memtensor/memos-local-openclaw-plugin) · 🛠 [Troubleshooting](https://memos-claw.openmem.net/docs/troubleshooting.html) ## Why MemOS @@ -91,35 +91,29 @@ Persistent local conversation memory for [OpenClaw](https://github.com/nicepkg/o ### 1. Install -**Step 0 — Prepare build environment (macOS / Linux):** +One command installs the plugin, all dependencies, and build tools automatically. Supports auto-upgrade to the latest version. -This plugin uses `better-sqlite3`, a native C/C++ module. On **macOS** and **Linux**, prebuilt binaries may not be available, so **install C++ build tools first** to ensure a smooth installation: +**macOS / Linux:** ```bash -# macOS -xcode-select --install - -# Linux (Ubuntu / Debian) -sudo apt install build-essential python3 +curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash ``` -> **Windows users:** `better-sqlite3` ships prebuilt binaries for Windows + Node.js LTS, so you can usually skip this step and go directly to Step 1. If installation still fails, install [Visual Studio Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) (select "C++ build tools" workload). -> -> Already have build tools? Skip to Step 1. Not sure? Run the install command above — it's safe to re-run. -> -> **Still having issues?** See the [Troubleshooting](#troubleshooting) section, the [detailed troubleshooting guide](https://memtensor.github.io/MemOS/apps/memos-local-openclaw/docs/troubleshooting.html), or the [official better-sqlite3 troubleshooting docs](https://github.com/WiseLibs/better-sqlite3/blob/master/docs/troubleshooting.md). +**Windows (PowerShell):** -**Step 1 — Install the plugin:** +```powershell +powershell -c "irm https://cdn.memtensor.com.cn/memos-local-openclaw/install.ps1 | iex" +``` + +**Alternative — Install via OpenClaw CLI:** ```bash openclaw plugins install @memtensor/memos-local-openclaw-plugin ``` -The plugin is installed under `~/.openclaw/extensions/memos-local-openclaw-plugin` and registered as `memos-local-openclaw-plugin`. Dependencies and `better-sqlite3` native module are built automatically during installation. - > **Note:** The Memory Viewer starts only when the **OpenClaw gateway** is running. After install, **configure** `openclaw.json` (step 2) and **start the gateway** (step 3); the viewer will then be available at `http://127.0.0.1:18799`. > -> **Installation failed?** If `better-sqlite3` compilation fails during install, manually rebuild after ensuring build tools are installed: +> **Installation failed?** See the [Troubleshooting](#troubleshooting) section, the [detailed troubleshooting guide](https://memos-claw.openmem.net/docs/troubleshooting.html), or the [official better-sqlite3 troubleshooting docs](https://github.com/WiseLibs/better-sqlite3/blob/master/docs/troubleshooting.md). You can also try manually rebuilding the native module: > ```bash > cd ~/.openclaw/extensions/memos-local-openclaw-plugin && npm rebuild better-sqlite3 > ``` diff --git a/apps/memos-local-openclaw/www/index.html b/apps/memos-local-openclaw/www/index.html index 6c5482c55..37b15c616 100644 --- a/apps/memos-local-openclaw/www/index.html +++ b/apps/memos-local-openclaw/www/index.html @@ -132,6 +132,13 @@ .copy-btn:hover{border-color:var(--border-glow);background:rgba(0,229,255,.12);color:var(--text)} .copy-btn .copy-icon,.copy-btn .check-icon{width:14px;height:14px;display:block} .copy-btn .check-icon{display:none;color:var(--green)} +/* ── OS Tabs ── */ +.os-tabs{display:flex;gap:0;margin-bottom:0} +.os-tab{padding:8px 20px;font-size:12px;font-weight:600;color:var(--text-thr);cursor:pointer;border:none;background:none;border-bottom:2px solid transparent;transition:all .2s} +.os-tab.active{color:var(--cyan);border-bottom-color:var(--cyan)} +.os-tab:hover{color:var(--text-sec)} +.os-pane{display:none} +.os-pane.active{display:block} .copy-btn.copied{border-color:rgba(0,230,118,.45);background:rgba(0,230,118,.12);color:var(--green)} .copy-btn.copied .copy-icon{display:none} .copy-btn.copied .check-icon{display:block} @@ -373,10 +380,15 @@

    -
    macOS/Linux
    +
    -
    # One liner, Works everywhere. Installs everything.
    -
    $curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash
    +
    # Works everywhere. Installs everything.# Works everywhere. Installs everything.
    +
    +
    $curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash
    +
    +
    +
    >powershell -c "irm https://cdn.memtensor.com.cn/memos-local-openclaw/install.ps1 | iex"
    +
    @@ -583,23 +595,27 @@

    💡 为什么这套架构对 OpenClaw 至关重要

    60 秒上手Up and Running in 60 Seconds

    -

    npm 一键安装,两种配置方式任选。One-command install. Two configuration methods.

    +

    一行命令安装,两种配置方式任选。One-command install. Two configuration methods.

    1. 一键安装1. Install

    -

    macOS / Linux 用户建议先安装 C++ 编译工具(用于 better-sqlite3)。
    遇到安装问题?查看排查指南 →
    macOS / Linux users: install C++ build tools first (for better-sqlite3).
    Install issues? See troubleshooting guide →

    +

    一行命令,自动安装所有依赖和编译工具,自动升级最新版本。
    遇到安装问题?查看排查指南 →
    One command automatically installs all dependencies and build tools, and upgrades to the latest version.
    Install issues? See troubleshooting guide →

    -
    terminal
    -
    # Step 0: 安装编译工具 (macOS / Linux)
    -xcode-select --install        # macOS
    -# sudo apt install build-essential  # Linux
    +          
    +
    +
    # Step 1: 安装插件 & 启动
     
    -# Step 1: 安装插件 & 启动
     curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash
    +
    +
    +
    # Step 1: Install plugin & start
    +
    +powershell -c "irm https://cdn.memtensor.com.cn/memos-local-openclaw/install.ps1 | iex"
    +
    @@ -1008,6 +1024,15 @@

    让你的 OpenClaw
    越用越聪明 document.querySelectorAll('.config-pane').forEach(function(p){p.classList.remove('active')}); btn.classList.add('active');document.getElementById(id).classList.add('active'); } +function switchOsTab(btn,group,os){ + var parent=btn.parentElement; + parent.querySelectorAll('.os-tab').forEach(function(t){t.classList.remove('active')}); + btn.classList.add('active'); + var container=parent.closest('.terminal,.code-block'); + container.querySelectorAll('.os-pane').forEach(function(p){p.classList.remove('active')}); + var pane=document.getElementById(group+'-'+os); + if(pane)pane.classList.add('active'); +} (function(){ function copyText(text){ if(navigator.clipboard&&navigator.clipboard.writeText){ From 593d54d8533e8bb4a96774eb9ce91370f279e9e2 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Sun, 22 Mar 2026 19:51:38 +0800 Subject: [PATCH 72/85] docs(memos-local): add realistic Hub-Client architecture SVG diagram to Team Sharing section Replace text-only code blocks with a full SVG architecture diagram showing: - Hub server with team members (online/offline/pending states) - Three Client instances (Alice connected, Bob pending, Charlie offline) - Data flow arrows (task_share, skill_pull) between instances - Shared data badges (tasks, memories, skills counts) - Per-client views (local private vs team shared data) - Port auto-derivation info box - OpenClaw mascot icons on each instance Made-with: Cursor --- apps/memos-local-openclaw/www/index.html | 173 +++++++++++++++++++---- 1 file changed, 147 insertions(+), 26 deletions(-) diff --git a/apps/memos-local-openclaw/www/index.html b/apps/memos-local-openclaw/www/index.html index 37b15c616..fc80f1369 100644 --- a/apps/memos-local-openclaw/www/index.html +++ b/apps/memos-local-openclaw/www/index.html @@ -759,6 +759,152 @@

    多实例协作 —
    让团队的 Agent Hub-Client 架构,一个服务端存储共享数据,客户端按需查询。私有数据始终留在本地,只有明确共享的内容才对团队可见。端口自动推导,多实例无冲突。Hub-Client architecture — one Hub stores shared data while clients query on demand. Private data stays local; only explicitly shared content is visible to the team. Auto-derived ports, zero conflicts for multi-instance setups.

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + Hub Server + :18800 + + 团队名称Team + Engineering Team + + Admin: tangboonline + Member: aliceonline + Member: charlieoffline + Pending: bob待审批pending + + + 📋 12 任务📋 12 Tasks + + 🧠 86 记忆🧠 86 Mems + + ⚡ 5 技能⚡ 5 Skills + + + + + + + + + + + + Client: Alice + :19001 + + 本地私有Local Private + 🧠 234 memories + ⚡ 3 skills + 团队可见Team Shared + 📋 task_share → hub + ⚡ skill_publish + + + 🔍 memory_search(scope: all) + + + 🔔 role_promoted → admin + + + + + + + + + + + + Client: Bob + :19201 + + + 等待管理员审批…Waiting for admin approval… + + + POST /api/v1/hub/join → pending + + + 可随时撤回申请 withdraw-pendingCan withdraw anytime withdraw-pending + + + + + + + + + + + + Client: Charlie + :19401 + + 本地私有Local Private + 🧠 158 memories + ⚡ 7 skills + 从团队拉取Pulled from Team + ⬇ network_skill_pull + 🔍 scope: group + + + ⚠ 离线 — 本地记忆正常工作⚠ offline — local memory works + + + 🔄 Hub 上线后自动重连🔄 Auto-reconnect when Hub is back + + + + + + ✓ 已连接✓ connected + + + + ⏳ 待审批⏳ pending + + + + ⚠ 离线⚠ offline + + + + task_share + + + + skill_pull + + + + 端口自动推导Auto Port Derivation + hub = gateway + 11 + EADDRINUSE → retry ×3 + +
    +
    +
    @@ -768,32 +914,7 @@

    Hub-Client 协作架构

    -
    Hub-Client Architecture
    -
    Hub (Personal):  gateway:18789  hub:18800
    -  teamName: "My Team"   members: 3
    -  ✓ admin: tangbo (online)
    -  ✓ member: alice (online)
    -  ⏳ pending: bob
    -
    -Client (Work):   gateway:19001  hub:19012
    -  hubAddress: "192.168.1.100:18800"
    -  ✓ connected as: tangbo-work
    -
    -// Port auto-derived: gatewayPort + 11
    -// Retry on EADDRINUSE (up to 3 ports)
    -
    -
    -
    - -
    -
    -

    管理员团队管理Admin Team Management

    -

    管理员通过 Viewer 审批加入请求、提升/降级成员角色、移除成员。角色变更自动通知被操作用户。Hub 关闭时所有客户端收到下线通知。完善的退出确认和状态清理机制。Admins approve join requests, promote/demote members, and remove users through the Viewer. Role changes trigger automatic notifications. Hub shutdown alerts all clients. Complete leave confirmation and state cleanup mechanisms.

    -
    角色通知Role Notify资源通知Resource Notify退出确认Leave ConfirmHub 下线提醒Hub Shutdown
    -
    -
    -
    -
    Admin Controls
    +
    Admin Controls & Notifications
    Admin Panel:
       approve(bob)       ✓ approved
       promote(alice)     → 🔔 role_promoted
    
    From b52152b9ba55959f207b1797e3e7c6c1fe8b3f30 Mon Sep 17 00:00:00 2001
    From: tangbo <1502220175@qq.com>
    Date: Sun, 22 Mar 2026 19:54:59 +0800
    Subject: [PATCH 73/85] docs(memos-local): add multi-OpenClaw collaboration
     visual to Hero section
    
    - Add animated SVG showing Hub + 3 Client instances with data flow particles
    - Each instance has OpenClaw mascot, role label, memory counts, and operations
    - Animated connection lines and flowing data particles between instances
    - "...N" indicator showing unlimited scalability
    - Update Hero tagline to emphasize multi-instance collaboration
    
    Made-with: Cursor
    ---
     apps/memos-local-openclaw/www/index.html | 127 ++++++++++++++++++++++-
     1 file changed, 124 insertions(+), 3 deletions(-)
    
    diff --git a/apps/memos-local-openclaw/www/index.html b/apps/memos-local-openclaw/www/index.html
    index fc80f1369..92072df3c 100644
    --- a/apps/memos-local-openclaw/www/index.html
    +++ b/apps/memos-local-openclaw/www/index.html
    @@ -366,10 +366,10 @@ 

    Give Your OpenClaw
    Lasting Intelligence

    - 为 OpenClaw 注入持久记忆与自进化技能
    完全本地化 全量可视化管理 分级模型极致省钱
    - Persistent memory and self-evolving skills for OpenClaw agents.
    100% local storage, full visualization dashboard, and tiered models for cost efficiency.
    + 为 OpenClaw 注入持久记忆与自进化技能
    多实例团队协作 完全本地化 全量可视化管理
    + Persistent memory, self-evolving skills, and multi-instance team collaboration for OpenClaw agents.
    100% local storage, full visualization, and team sharing.

    -

    把 MemOS 带进你的 OpenClawBring MemOS to your OpenClaw workflow

    +

    多个 OpenClaw 协同进化,越用越聪明Multiple OpenClaw agents evolve together, getting smarter over time

    + + +
    + + + + + + + + + + + + + + + + + + + + + Hub + 团队服务端 · 共享记忆/技能Team Server · Shared Memory/Skills + + + 🧠 86 + + 📋 12 + + ⚡ 5 + + + + + + + + + OpenClaw A + 前端开发 · 234 记忆Frontend · 234 Memories + + online + + + 📤 skill_publish + + + + + + + + + OpenClaw B + 后端开发 · 158 记忆Backend · 158 Memories + + online + + 📤 task_share + + + + + + + + + OpenClaw C + 测试工程 · 89 记忆QA/Testing · 89 Memories + + online + + ⬇ skill_pull + + + + …N + 更多实例More + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    From cf5011b692623bd637ab6846bf4df5271d19a96e Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Sun, 22 Mar 2026 20:03:30 +0800 Subject: [PATCH 74/85] docs(memos-local): move Hero collaboration diagram above install terminal & add architecture SVG to docs pages - Move multi-OpenClaw collaboration SVG from below install terminal to above it in Hero section - Add Hub-Client architecture diagram to docs/index.html and www/docs/index.html team sharing section - Use distinct gradient ID namespaces (hg*/ts*/dg*) to avoid SVG ID conflicts across pages Made-with: Cursor --- apps/memos-local-openclaw/docs/index.html | 62 +++++++ apps/memos-local-openclaw/www/docs/index.html | 62 +++++++ apps/memos-local-openclaw/www/index.html | 155 ++++++------------ 3 files changed, 173 insertions(+), 106 deletions(-) diff --git a/apps/memos-local-openclaw/docs/index.html b/apps/memos-local-openclaw/docs/index.html index 9073c0fa3..64ab7c33e 100644 --- a/apps/memos-local-openclaw/docs/index.html +++ b/apps/memos-local-openclaw/docs/index.html @@ -464,6 +464,68 @@

    Viewer HTTP

    👥 团队共享中心👥 Team Sharing Hub

    Team Sharing 将多个 OpenClaw 实例连接为协作网络。一个实例作为 Hub(团队服务端),其他实例作为 Client 连接。私有数据始终留在本地,仅明确共享的任务、记忆和技能对团队可见。Team Sharing connects multiple OpenClaw instances into a collaborative network. One instance serves as the Hub (team server) while others connect as Clients. Private data stays local — only explicitly shared tasks, memories, and skills are visible to the team.

    + +
    + + + + + + + + + + + + + + Hub + 团队服务端 · 共享记忆/技能Team Server · Shared Memory/Skills + 🧠 86 + 📋 12 + ⚡ 5 + + + + + OpenClaw A + 前端开发 · 234 记忆Frontend · 234 Memories + online + 📤 skill_publish + + + + + OpenClaw B + 后端开发 · 158 记忆Backend · 158 Memories + online + 📤 task_share + + + + + OpenClaw C + 测试工程 · 89 记忆QA/Testing · 89 Memories + online + ⬇ skill_pull + + + …N + 更多实例More + + + + + + + + + + + + +
    +
    🏗️

    Hub-Client

    一个 Hub 存储共享数据,客户端按需查询。角色可动态切换。One Hub stores shared data; clients query on demand. Roles switchable dynamically.

    🔌

    端口自动推导Auto Port

    Hub 端口自动从网关端口推导(+11),冲突时重试最多 3 次。Hub port auto-derived from gateway port (+11); retries up to 3 times on conflict.

    diff --git a/apps/memos-local-openclaw/www/docs/index.html b/apps/memos-local-openclaw/www/docs/index.html index 9073c0fa3..64ab7c33e 100644 --- a/apps/memos-local-openclaw/www/docs/index.html +++ b/apps/memos-local-openclaw/www/docs/index.html @@ -464,6 +464,68 @@

    Viewer HTTP

    👥 团队共享中心👥 Team Sharing Hub

    Team Sharing 将多个 OpenClaw 实例连接为协作网络。一个实例作为 Hub(团队服务端),其他实例作为 Client 连接。私有数据始终留在本地,仅明确共享的任务、记忆和技能对团队可见。Team Sharing connects multiple OpenClaw instances into a collaborative network. One instance serves as the Hub (team server) while others connect as Clients. Private data stays local — only explicitly shared tasks, memories, and skills are visible to the team.

    + +
    + + + + + + + + + + + + + + Hub + 团队服务端 · 共享记忆/技能Team Server · Shared Memory/Skills + 🧠 86 + 📋 12 + ⚡ 5 + + + + + OpenClaw A + 前端开发 · 234 记忆Frontend · 234 Memories + online + 📤 skill_publish + + + + + OpenClaw B + 后端开发 · 158 记忆Backend · 158 Memories + online + 📤 task_share + + + + + OpenClaw C + 测试工程 · 89 记忆QA/Testing · 89 Memories + online + ⬇ skill_pull + + + …N + 更多实例More + + + + + + + + + + + + +
    +
    -
    -
    -
    -
    -
    # Works everywhere. Installs everything.# Works everywhere. Installs everything.
    -
    -
    $curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash
    -
    -
    -
    >powershell -c "irm https://cdn.memtensor.com.cn/memos-local-openclaw/install.ps1 | iex"
    -
    -
    -
    -
    - -
    +
    - - - - - - - - - + + Hub 团队服务端 · 共享记忆/技能Team Server · Shared Memory/Skills - - - 🧠 86 - - 📋 12 - - ⚡ 5 - - + 🧠 86 + 📋 12 + ⚡ 5 + - - - - - + + OpenClaw A 前端开发 · 234 记忆Frontend · 234 Memories - - online - - - 📤 skill_publish - - + online + 📤 skill_publish + - - - - - + + OpenClaw B 后端开发 · 158 记忆Backend · 158 Memories - - online - - 📤 task_share - - + online + 📤 task_share + - - - - - + + OpenClaw C 测试工程 · 89 记忆QA/Testing · 89 Memories - - online - - ⬇ skill_pull - - + online + ⬇ skill_pull + …N 更多实例More - - - - - - - - - - - - - - - + + + + - - - - - - - - - - - - - - - - - - - - - - - + + + + + + +
    + +
    +
    +
    +
    +
    # Works everywhere. Installs everything.# Works everywhere. Installs everything.
    +
    +
    $curl -fsSL https://cdn.memtensor.com.cn/memos-local-openclaw/install.sh | bash
    +
    +
    +
    >powershell -c "irm https://cdn.memtensor.com.cn/memos-local-openclaw/install.ps1 | iex"
    +
    +
    +
    +
    +
    From 3ed165a1b26303d8c07be4b39089be83d57f7da8 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Sun, 22 Mar 2026 22:20:24 +0800 Subject: [PATCH 75/85] feat(memos-local): team_shared_chunks for Client team-share UI without hub_memories recall - Add team_shared_chunks table + upsert/get/delete; clear in deleteAll - Client: after Hub memories/share, persist metadata only; Hub role keeps upsertHubMemory - Viewer: merge team_shared_chunks into memory list sharing map; getHubMemoryForChunk reads both - Unshare paths delete team_shared_chunks; shareMemoryToHub/unshareMemoryFromHub aligned in index.ts - capture: strip MiniMax tags from captured text - hub: sync bootstrapAdminToken on rename (self + admin) - viewer html: admin preview/collapsed message fade mask 88% - chore: version 1.0.4-beta.19 Made-with: Cursor --- apps/memos-local-openclaw/index.ts | 6 ++- apps/memos-local-openclaw/package.json | 2 +- .../memos-local-openclaw/src/capture/index.ts | 5 +- apps/memos-local-openclaw/src/hub/server.ts | 8 +++ .../src/storage/sqlite.ts | 49 +++++++++++++++++++ apps/memos-local-openclaw/src/viewer/html.ts | 4 +- .../memos-local-openclaw/src/viewer/server.ts | 34 +++++++++++-- 7 files changed, 98 insertions(+), 10 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 77f1f6d3c..5af504640 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -388,8 +388,7 @@ const memosLocalPlugin = { const memoryId = response?.memoryId ?? `${chunk.id}-hub`; - // Only persist hub_memories locally in Hub mode where this DB owns the data. - // Client mode relies on the remote Hub for storage and search. + // Hub role: full hub_memories row for local recall/embeddings. Client: metadata only (team_shared_chunks) for UI. if (ctx.config.sharing?.role === "hub") { const now = Date.now(); const existing = store.getHubMemoryBySource(hubClient.userId, chunk.id); @@ -406,6 +405,8 @@ const memosLocalPlugin = { createdAt: existing?.createdAt ?? now, updatedAt: now, }); + } else if (ctx.config.sharing?.enabled && hubClient.userId) { + store.upsertTeamSharedChunk(chunk.id, { hubMemoryId: memoryId, visibility, groupId }); } return { memoryId, visibility, groupId }; @@ -425,6 +426,7 @@ const memosLocalPlugin = { body: JSON.stringify({ sourceChunkId: chunk.id }), }); store.deleteHubMemoryBySource(hubClient.userId, chunk.id); + store.deleteTeamSharedChunk(chunk.id); }; // ─── Tool: memory_search ─── diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 3a28c5942..21545ddf3 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.14", + "version": "1.0.4-beta.19", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", diff --git a/apps/memos-local-openclaw/src/capture/index.ts b/apps/memos-local-openclaw/src/capture/index.ts index 729a1036d..de128a80a 100644 --- a/apps/memos-local-openclaw/src/capture/index.ts +++ b/apps/memos-local-openclaw/src/capture/index.ts @@ -167,8 +167,11 @@ export function stripInboundMetadata(text: string): string { /** Strip … blocks emitted by DeepSeek-style reasoning models. */ const THINKING_TAG_RE = /][\s\S]*?<\/think>\s*/gi; +/** Unwrap tags from MiniMax-style models (keep content, strip tags). */ +const FINAL_TAG_RE = /<\/?final\s*>/gi; + function stripThinkingTags(text: string): string { - return text.replace(THINKING_TAG_RE, ""); + return text.replace(THINKING_TAG_RE, "").replace(FINAL_TAG_RE, "").trim(); } function extractEnvelopeTimestamp(text: string): number | null { diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index fc4dc1fb1..57b647b0f 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -414,6 +414,10 @@ export class HubServer { ttlMs, ); this.userManager.approveUser(updated.id, newToken); + if (updated.id === this.authState.bootstrapAdminUserId) { + this.authState.bootstrapAdminToken = newToken; + this.saveAuthState(); + } this.opts.log.info(`Hub: user "${auth.userId}" renamed to "${newUsername}"`); return this.json(res, 200, { ok: true, username: newUsername, userToken: newToken }); } @@ -522,6 +526,10 @@ export class HubServer { const updated = this.opts.store.getHubUser(userId)!; const finalUser = { ...updated, username: newUsername }; this.opts.store.upsertHubUser(finalUser); + if (userId === this.authState.bootstrapAdminUserId) { + this.authState.bootstrapAdminToken = newToken; + this.saveAuthState(); + } this.opts.log.info(`Hub: admin "${auth.userId}" renamed user "${userId}" to "${newUsername}"`); return this.json(res, 200, { ok: true, username: newUsername }); } diff --git a/apps/memos-local-openclaw/src/storage/sqlite.ts b/apps/memos-local-openclaw/src/storage/sqlite.ts index c8ef00841..d196fb3b5 100644 --- a/apps/memos-local-openclaw/src/storage/sqlite.ts +++ b/apps/memos-local-openclaw/src/storage/sqlite.ts @@ -792,6 +792,15 @@ export class SqliteStore { shared_at INTEGER NOT NULL ); + -- Client: team share UI metadata only (no hub_memories row — avoids local FTS/embed recall duplication) + CREATE TABLE IF NOT EXISTS team_shared_chunks ( + chunk_id TEXT PRIMARY KEY REFERENCES chunks(id) ON DELETE CASCADE, + hub_memory_id TEXT NOT NULL DEFAULT '', + visibility TEXT NOT NULL DEFAULT 'public', + group_id TEXT, + shared_at INTEGER NOT NULL + ); + CREATE TABLE IF NOT EXISTS hub_users ( id TEXT PRIMARY KEY, username TEXT NOT NULL UNIQUE, @@ -1369,6 +1378,7 @@ export class SqliteStore { "skill_versions", "skills", "local_shared_memories", + "team_shared_chunks", "local_shared_tasks", "embeddings", "chunks", @@ -2355,6 +2365,45 @@ export class SqliteStore { return info.changes > 0; } + // ─── Team share metadata (Client role — UI only, not used for local recall / FTS) ─── + + upsertTeamSharedChunk( + chunkId: string, + row: { hubMemoryId?: string; visibility?: string; groupId?: string | null }, + ): void { + const now = Date.now(); + const vis = row.visibility === "group" ? "group" : "public"; + const gid = vis === "group" ? (row.groupId ?? null) : null; + this.db.prepare(` + INSERT INTO team_shared_chunks (chunk_id, hub_memory_id, visibility, group_id, shared_at) + VALUES (?, ?, ?, ?, ?) + ON CONFLICT(chunk_id) DO UPDATE SET + hub_memory_id = excluded.hub_memory_id, + visibility = excluded.visibility, + group_id = excluded.group_id, + shared_at = excluded.shared_at + `).run(chunkId, row.hubMemoryId ?? "", vis, gid, now); + } + + getTeamSharedChunk(chunkId: string): { chunkId: string; hubMemoryId: string; visibility: string; groupId: string | null; sharedAt: number } | null { + const r = this.db.prepare("SELECT chunk_id, hub_memory_id, visibility, group_id, shared_at FROM team_shared_chunks WHERE chunk_id = ?").get(chunkId) as { + chunk_id: string; hub_memory_id: string; visibility: string; group_id: string | null; shared_at: number; + } | undefined; + if (!r) return null; + return { + chunkId: r.chunk_id, + hubMemoryId: r.hub_memory_id, + visibility: r.visibility, + groupId: r.group_id, + sharedAt: r.shared_at, + }; + } + + deleteTeamSharedChunk(chunkId: string): boolean { + const info = this.db.prepare("DELETE FROM team_shared_chunks WHERE chunk_id = ?").run(chunkId); + return info.changes > 0; + } + // ─── Hub Notifications ─── insertHubNotification(n: { id: string; userId: string; type: string; resource: string; title: string; message?: string }): void { diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 2b51313ce..d8daffb9f 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -287,7 +287,7 @@ input,textarea,select{font-family:inherit;font-size:inherit} .admin-card-tag.tag-version{background:rgba(139,92,246,.1);color:#8b5cf6} .admin-card-tag.tag-visibility{background:rgba(99,102,241,.08);color:var(--pri)} .admin-card-tag.tag-group{background:rgba(139,92,246,.08);color:#8b5cf6} -.admin-card-preview{font-size:12px;color:var(--text-sec);line-height:1.5;margin:8px 0;padding:10px 12px;background:rgba(99,102,241,.02);border-radius:10px;border:1px solid rgba(99,102,241,.08);max-height:120px;overflow:hidden;white-space:pre-wrap;word-break:break-all;position:relative;-webkit-mask-image:linear-gradient(to bottom,#000 70%,transparent 100%);mask-image:linear-gradient(to bottom,#000 70%,transparent 100%)} +.admin-card-preview{font-size:12px;color:var(--text-sec);line-height:1.5;margin:8px 0;padding:10px 12px;background:rgba(99,102,241,.02);border-radius:10px;border:1px solid rgba(99,102,241,.08);max-height:120px;overflow:hidden;white-space:pre-wrap;word-break:break-all;position:relative;-webkit-mask-image:linear-gradient(to bottom,#000 88%,transparent 100%);mask-image:linear-gradient(to bottom,#000 88%,transparent 100%)} .admin-card-actions{display:inline-flex;gap:6px;margin-left:auto;align-items:center;flex-shrink:0} .admin-card-time{font-size:11px;color:var(--text-muted)} .admin-card-detail{display:none;margin-top:0;padding:20px 24px 24px;border-top:1px dashed rgba(99,102,241,.12);background:linear-gradient(180deg,rgba(99,102,241,.02) 0%,transparent 60%);animation:adminDetailIn .25s ease} @@ -322,7 +322,7 @@ input,textarea,select{font-family:inherit;font-size:inherit} .adm-msg-side.assistant .adm-msg-role{color:var(--green)} .adm-msg-time{font-size:9px;color:var(--text-muted)} .adm-msg-body{flex:1;min-width:0;padding:12px 16px;font-size:13px;line-height:1.75;color:var(--text);white-space:pre-wrap;word-break:break-word} -.adm-msg-body.collapsed{max-height:120px;overflow:hidden;-webkit-mask-image:linear-gradient(180deg,#000 65%,transparent);mask-image:linear-gradient(180deg,#000 65%,transparent)} +.adm-msg-body.collapsed{max-height:120px;overflow:hidden;-webkit-mask-image:linear-gradient(180deg,#000 88%,transparent);mask-image:linear-gradient(180deg,#000 88%,transparent)} .adm-msg-toggle{display:none;padding:0 16px 8px;font-size:11px;color:var(--pri);cursor:pointer;transition:color .15s} .adm-msg-toggle:hover{color:var(--pri-dark)} .admin-card-expand-btn{font-size:12px;color:var(--pri);cursor:pointer;background:none;border:none;padding:2px 6px;font-family:inherit} diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index 98a9179c2..c4afaaba8 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -492,6 +492,12 @@ export class ViewerServer { const placeholders = chunkIds.map(() => "?").join(","); const sharedRows = db.prepare(`SELECT source_chunk_id, visibility, group_id FROM hub_memories WHERE source_chunk_id IN (${placeholders})`).all(...chunkIds) as Array<{ source_chunk_id: string; visibility: string; group_id: string | null }>; for (const r of sharedRows) sharingMap.set(r.source_chunk_id, r); + const teamMetaRows = db.prepare(`SELECT chunk_id, visibility, group_id FROM team_shared_chunks WHERE chunk_id IN (${placeholders})`).all(...chunkIds) as Array<{ chunk_id: string; visibility: string; group_id: string | null }>; + for (const r of teamMetaRows) { + if (!sharingMap.has(r.chunk_id)) { + sharingMap.set(r.chunk_id, { visibility: r.visibility, group_id: r.group_id }); + } + } const localRows = db.prepare(`SELECT chunk_id, original_owner, shared_at FROM local_shared_memories WHERE chunk_id IN (${placeholders})`).all(...chunkIds) as Array<{ chunk_id: string; original_owner: string; shared_at: number }>; for (const r of localRows) localShareMap.set(r.chunk_id, r); } catch { @@ -1252,15 +1258,19 @@ export class ViewerServer { body: JSON.stringify({ memory: { sourceChunkId: refreshedChunk.id, role: refreshedChunk.role, content: refreshedChunk.content, summary: refreshedChunk.summary, kind: refreshedChunk.kind, groupId: null, visibility: "public" } }), }); if (!isLocalShared) this.store.markMemorySharedLocally(chunkId); - if (hubClient.userId && this.ctx?.config?.sharing?.role === "hub") { + const memoryId = String((response as any)?.memoryId ?? ""); + const isHubRole = this.ctx?.config?.sharing?.role === "hub"; + if (hubClient.userId && isHubRole) { const existing = this.store.getHubMemoryBySource(hubClient.userId, chunkId); this.store.upsertHubMemory({ - id: (response as any)?.memoryId ?? existing?.id ?? crypto.randomUUID(), + id: memoryId || existing?.id || crypto.randomUUID(), sourceChunkId: chunkId, sourceUserId: hubClient.userId, role: refreshedChunk.role, content: refreshedChunk.content, summary: refreshedChunk.summary ?? "", kind: refreshedChunk.kind, groupId: null, visibility: "public", createdAt: existing?.createdAt ?? Date.now(), updatedAt: Date.now(), }); + } else if (hubClient.userId) { + this.store.upsertTeamSharedChunk(chunkId, { hubMemoryId: memoryId, visibility: "public", groupId: null }); } hubSynced = true; } else { @@ -1274,6 +1284,7 @@ export class ViewerServer { method: "POST", body: JSON.stringify({ sourceChunkId: chunkId }), }); if (hubClient.userId) this.store.deleteHubMemoryBySource(hubClient.userId, chunkId); + this.store.deleteTeamSharedChunk(chunkId); hubSynced = true; } catch (err) { this.log.warn(`Failed to unshare memory from team: ${err}`); } } @@ -1286,6 +1297,7 @@ export class ViewerServer { method: "POST", body: JSON.stringify({ sourceChunkId: chunkId }), }); if (hubClient.userId) this.store.deleteHubMemoryBySource(hubClient.userId, chunkId); + this.store.deleteTeamSharedChunk(chunkId); hubSynced = true; } catch (err) { this.log.warn(`Failed to unshare memory from team: ${err}`); } } @@ -1495,7 +1507,17 @@ export class ViewerServer { private getHubMemoryForChunk(chunkId: string): any { const db = (this.store as any).db; - return db.prepare("SELECT * FROM hub_memories WHERE source_chunk_id = ? LIMIT 1").get(chunkId); + const hub = db.prepare("SELECT * FROM hub_memories WHERE source_chunk_id = ? LIMIT 1").get(chunkId); + if (hub) return hub; + const ts = this.store.getTeamSharedChunk(chunkId); + if (ts) { + return { + source_chunk_id: chunkId, + visibility: ts.visibility, + group_id: ts.groupId, + }; + } + return undefined; } private getHubTaskForLocal(taskId: string): any { @@ -2105,11 +2127,12 @@ export class ViewerServer { }, }), }); + const mid = String((response as any)?.memoryId ?? ""); if (hubClient.userId && this.ctx?.config?.sharing?.role === "hub") { const now = Date.now(); const existing = this.store.getHubMemoryBySource(hubClient.userId, chunk.id); this.store.upsertHubMemory({ - id: (response as any)?.memoryId ?? existing?.id ?? crypto.randomUUID(), + id: mid || existing?.id || crypto.randomUUID(), sourceChunkId: chunk.id, sourceUserId: hubClient.userId, role: chunk.role, @@ -2121,6 +2144,8 @@ export class ViewerServer { createdAt: existing?.createdAt ?? now, updatedAt: now, }); + } else if (hubClient.userId) { + this.store.upsertTeamSharedChunk(chunk.id, { hubMemoryId: mid, visibility, groupId }); } this.jsonResponse(res, { ok: true, chunkId, visibility, response }); } catch (err) { @@ -2142,6 +2167,7 @@ export class ViewerServer { }); const hubUserId = hubClient.userId; if (hubUserId) this.store.deleteHubMemoryBySource(hubUserId, chunkId); + this.store.deleteTeamSharedChunk(chunkId); this.jsonResponse(res, { ok: true, chunkId }); } catch (err) { this.jsonResponse(res, { ok: false, error: String(err) }); From 5701ac44f43ef43bf92004f9ec07fd8518fba9ea Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Sun, 22 Mar 2026 22:40:26 +0800 Subject: [PATCH 76/85] fix(memos-local): sync client team share badge after admin removes memory on Hub - Hub: resource_removed notification includes JSON message with memoryId + sourceChunkId - Viewer: POST /api/sharing/sync-hub-removal clears team_shared_chunks + local hub_memories row - html: sync from notifications before non-silent loadMemories Made-with: Cursor --- apps/memos-local-openclaw/package.json | 2 +- apps/memos-local-openclaw/src/hub/server.ts | 13 ++++++++++- apps/memos-local-openclaw/src/viewer/html.ts | 20 +++++++++++++++++ .../memos-local-openclaw/src/viewer/server.ts | 22 +++++++++++++++++++ 4 files changed, 55 insertions(+), 2 deletions(-) diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 21545ddf3..4fbc4a473 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.19", + "version": "1.0.4-beta.20", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", diff --git a/apps/memos-local-openclaw/src/hub/server.ts b/apps/memos-local-openclaw/src/hub/server.ts index 57b647b0f..3b8a44026 100644 --- a/apps/memos-local-openclaw/src/hub/server.ts +++ b/apps/memos-local-openclaw/src/hub/server.ts @@ -950,7 +950,18 @@ export class HubServer { const deleted = this.opts.store.deleteHubMemoryById(memoryId); if (!deleted) return this.json(res, 404, { error: "not_found" }); if (memInfo) { - this.opts.store.insertHubNotification({ id: randomUUID(), userId: memInfo.sourceUserId, type: "resource_removed", resource: "memory", title: memInfo.summary || memInfo.id }); + const payload = JSON.stringify({ + memoryId, + sourceChunkId: memInfo.sourceChunkId, + }); + this.opts.store.insertHubNotification({ + id: randomUUID(), + userId: memInfo.sourceUserId, + type: "resource_removed", + resource: "memory", + title: memInfo.summary || memInfo.id, + message: payload, + }); } return this.json(res, 200, { ok: true }); } diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index d8daffb9f..6a098c938 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -7764,11 +7764,31 @@ function getFilterParams(){ return p; } +/** Hub admin removed a shared memory — clear local team_shared_chunks so badges match (notifications carry sourceChunkId in message). */ +async function syncTeamShareRemovedFromNotifications(){ + try{ + var r=await fetch('/api/sharing/notifications'); + var d=await r.json(); + var list=d.notifications||[]; + for(var i=0;i { + try { + const parsed = JSON.parse(body || "{}"); + const sourceChunkId = String(parsed.sourceChunkId || ""); + if (!sourceChunkId) return this.jsonResponse(res, { ok: false, error: "missing_source_chunk_id" }, 400); + this.store.deleteTeamSharedChunk(sourceChunkId); + try { + const hubClient = await this.resolveHubClientAware(); + if (hubClient.userId) { + this.store.deleteHubMemoryBySource(hubClient.userId, sourceChunkId); + } + } catch { /* ignore */ } + this.jsonResponse(res, { ok: true, sourceChunkId }); + } catch (e) { + this.jsonResponse(res, { ok: false, error: String(e) }, 500); + } + }); + } + private handleNotifSSE(req: http.IncomingMessage, res: http.ServerResponse): void { res.writeHead(200, { "Content-Type": "text/event-stream", From a94f574116f664f938b8a07b9f37cbff23a38749 Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Sun, 22 Mar 2026 22:40:36 +0800 Subject: [PATCH 77/85] fix(memos-local): sync team share on SSE notif update when on memories view Made-with: Cursor --- apps/memos-local-openclaw/src/viewer/html.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index 6a098c938..da827c497 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -7419,6 +7419,9 @@ function connectNotifSSE(){ _notifUnread=d.unreadCount||0; renderNotifBadge(); if(_notifUnread>prev&&_notifPanelOpen) loadNotifications(); + if(_notifUnread>prev&&_activeView==='memories'&&memorySearchScope!=='hub'){ + syncTeamShareRemovedFromNotifications().then(function(){ loadMemories(currentPage,true); }); + } } if(d.type==='cleared'){ _notifUnread=0;_notifCache=[]; From c191f2fbdbb461b9abb2d3c8d4a632add4c2c1ed Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Sun, 22 Mar 2026 22:44:02 +0800 Subject: [PATCH 78/85] fix(memos-local): sync-hub-removal only clears team_shared_chunks (badge), never hub_memories/recall Made-with: Cursor --- apps/memos-local-openclaw/src/viewer/html.ts | 2 +- apps/memos-local-openclaw/src/viewer/server.ts | 10 ++-------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index da827c497..e13b80e79 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -7767,7 +7767,7 @@ function getFilterParams(){ return p; } -/** Hub admin removed a shared memory — clear local team_shared_chunks so badges match (notifications carry sourceChunkId in message). */ +/** Hub admin removed a shared memory — badge-only: clear team_shared_chunks (never touches chunks/embeddings/hub_memories recall data). */ async function syncTeamShareRemovedFromNotifications(){ try{ var r=await fetch('/api/sharing/notifications'); diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index 299e7a687..a985ff3d5 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -2482,20 +2482,14 @@ export class ViewerServer { }); } - /** When Hub admin removes a shared memory, clear local team_shared_chunks / hub_memories mirror so the client list shows correct scope. */ + /** Badge-only: clear Client team-share UI metadata when Hub admin removes that memory. Does NOT touch chunks, embeddings, or hub_memories (recall paths). */ private handleSyncHubRemoval(req: http.IncomingMessage, res: http.ServerResponse): void { - this.readBody(req, async (body) => { + this.readBody(req, (body) => { try { const parsed = JSON.parse(body || "{}"); const sourceChunkId = String(parsed.sourceChunkId || ""); if (!sourceChunkId) return this.jsonResponse(res, { ok: false, error: "missing_source_chunk_id" }, 400); this.store.deleteTeamSharedChunk(sourceChunkId); - try { - const hubClient = await this.resolveHubClientAware(); - if (hubClient.userId) { - this.store.deleteHubMemoryBySource(hubClient.userId, sourceChunkId); - } - } catch { /* ignore */ } this.jsonResponse(res, { ok: true, sourceChunkId }); } catch (e) { this.jsonResponse(res, { ok: false, error: String(e) }, 500); From 8d018abd7236b74778aa3b427d2db496f3f5fc0e Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Sun, 22 Mar 2026 23:10:18 +0800 Subject: [PATCH 79/85] fix(memos-local): skip stale hub-removal notifications after re-share When a memory is re-shared to team after an admin removal, old resource_removed notifications would incorrectly clear the new team_shared_chunks badge on every page refresh. Now handleSyncHubRemoval compares the notification's memoryId against the current hub_memory_id and skips deletion when they differ (stale notification). Bump to 1.0.4-beta.21. Made-with: Cursor --- apps/memos-local-openclaw/package.json | 2 +- apps/memos-local-openclaw/src/viewer/html.ts | 2 +- apps/memos-local-openclaw/src/viewer/server.ts | 10 ++++++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 4fbc4a473..5c0dec10f 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.20", + "version": "1.0.4-beta.21", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", diff --git a/apps/memos-local-openclaw/src/viewer/html.ts b/apps/memos-local-openclaw/src/viewer/html.ts index e13b80e79..ec0523deb 100644 --- a/apps/memos-local-openclaw/src/viewer/html.ts +++ b/apps/memos-local-openclaw/src/viewer/html.ts @@ -7779,7 +7779,7 @@ async function syncTeamShareRemovedFromNotifications(){ try{ var meta=JSON.parse(n.message); if(meta.sourceChunkId){ - await fetch('/api/sharing/sync-hub-removal',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({sourceChunkId:meta.sourceChunkId})}); + await fetch('/api/sharing/sync-hub-removal',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify({sourceChunkId:meta.sourceChunkId,memoryId:meta.memoryId||''})}); } }catch(e){} } diff --git a/apps/memos-local-openclaw/src/viewer/server.ts b/apps/memos-local-openclaw/src/viewer/server.ts index a985ff3d5..cf40b7f7d 100644 --- a/apps/memos-local-openclaw/src/viewer/server.ts +++ b/apps/memos-local-openclaw/src/viewer/server.ts @@ -2488,7 +2488,17 @@ export class ViewerServer { try { const parsed = JSON.parse(body || "{}"); const sourceChunkId = String(parsed.sourceChunkId || ""); + const memoryIdFromNotif = parsed.memoryId != null && parsed.memoryId !== "" ? String(parsed.memoryId) : ""; if (!sourceChunkId) return this.jsonResponse(res, { ok: false, error: "missing_source_chunk_id" }, 400); + // Admin removal notifications stay in the feed; if the user re-shared, team_shared_chunks has a new hub_memory_id. + // Only clear the badge when this notification refers to the same Hub row we still track (or no id — legacy). + if (memoryIdFromNotif) { + const current = this.store.getTeamSharedChunk(sourceChunkId); + const curId = current?.hubMemoryId ? String(current.hubMemoryId) : ""; + if (curId && curId !== memoryIdFromNotif) { + return this.jsonResponse(res, { ok: true, sourceChunkId, skipped: true, reason: "stale_notification_re_shared" }); + } + } this.store.deleteTeamSharedChunk(sourceChunkId); this.jsonResponse(res, { ok: true, sourceChunkId }); } catch (e) { From e0af079bd94af8c7a9eccb829598e13343f1457d Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Mon, 23 Mar 2026 00:17:16 +0800 Subject: [PATCH 80/85] =?UTF-8?q?ci:=20fix=20darwin-x64=20prebuild=20?= =?UTF-8?q?=E2=80=94=20use=20macos-15=20+=20Rosetta?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit macos-13 runner is deprecated. Use macos-15 (ARM64) and rebuild better-sqlite3 under Rosetta 2 via arch -x86_64 to produce the x64 native binary. Made-with: Cursor --- .github/workflows/openclaw-plugin-publish.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/openclaw-plugin-publish.yml b/.github/workflows/openclaw-plugin-publish.yml index 16820439d..5c79e5ad5 100644 --- a/.github/workflows/openclaw-plugin-publish.yml +++ b/.github/workflows/openclaw-plugin-publish.yml @@ -25,7 +25,7 @@ jobs: include: - os: macos-14 platform: darwin-arm64 - - os: macos-13 + - os: macos-15 platform: darwin-x64 - os: ubuntu-latest platform: linux-x64 @@ -42,6 +42,11 @@ jobs: - name: Install dependencies run: npm install + - name: Rebuild for x64 under Rosetta (darwin-x64 only) + if: matrix.platform == 'darwin-x64' + run: | + arch -x86_64 npm rebuild better-sqlite3 + - name: Collect prebuild shell: bash run: | From 89af2d871d1d30fdc2c4c6d4c19f4826cfa24d79 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 22 Mar 2026 16:31:25 +0000 Subject: [PATCH 81/85] release: openclaw-plugin v1.0.4 --- apps/memos-local-openclaw/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 5c0dec10f..3a2bfa15d 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4-beta.21", + "version": "1.0.4", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", From 20ceda16c34de9a152f8491841c33dbf80a6dbdb Mon Sep 17 00:00:00 2001 From: jiachengzhen Date: Mon, 23 Mar 2026 01:12:14 +0800 Subject: [PATCH 82/85] fix(memos-local): resolve telemetry credentials path in ESM mode The plugin runs as ESM (type: module) but telemetry.ts relied on __dirname which is undefined in ESM. Credentials file existed on disk but was never found, silently disabling all telemetry since day one. Now accepts pluginDir from index.ts (resolved via import.meta.url) and uses it as primary search path for telemetry.credentials.json. Made-with: Cursor --- apps/memos-local-openclaw/index.ts | 2 +- apps/memos-local-openclaw/src/telemetry.ts | 43 +++++++++++++--------- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/apps/memos-local-openclaw/index.ts b/apps/memos-local-openclaw/index.ts index 5af504640..b9ca99552 100644 --- a/apps/memos-local-openclaw/index.ts +++ b/apps/memos-local-openclaw/index.ts @@ -225,7 +225,7 @@ const memosLocalPlugin = { const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, "package.json"), "utf-8")); pluginVersion = pkg.version ?? pluginVersion; } catch {} - const telemetry = new Telemetry(ctx.config.telemetry ?? {}, stateDir, pluginVersion, ctx.log); + const telemetry = new Telemetry(ctx.config.telemetry ?? {}, stateDir, pluginVersion, ctx.log, pluginDir); // Install bundled memory-guide skill so OpenClaw loads it (write from embedded content so it works regardless of deploy layout) const workspaceSkillsDir = path.join(workspaceDir, "skills"); diff --git a/apps/memos-local-openclaw/src/telemetry.ts b/apps/memos-local-openclaw/src/telemetry.ts index f688260d5..c11484e9c 100644 --- a/apps/memos-local-openclaw/src/telemetry.ts +++ b/apps/memos-local-openclaw/src/telemetry.ts @@ -18,7 +18,7 @@ export interface TelemetryConfig { enabled?: boolean; } -function loadTelemetryCredentials(): { endpoint: string; pid: string; env: string } { +function loadTelemetryCredentials(pluginDir?: string): { endpoint: string; pid: string; env: string } { if (process.env.MEMOS_ARMS_ENDPOINT) { return { endpoint: process.env.MEMOS_ARMS_ENDPOINT, @@ -26,20 +26,19 @@ function loadTelemetryCredentials(): { endpoint: string; pid: string; env: strin env: process.env.MEMOS_ARMS_ENV ?? "prod", }; } - try { - const credPath = path.resolve(__dirname, "..", "telemetry.credentials.json"); - const raw = fs.readFileSync(credPath, "utf-8"); - const creds = JSON.parse(raw); - if (creds.endpoint) return { endpoint: creds.endpoint, pid: creds.pid ?? "", env: creds.env ?? "prod" }; - } catch {} + const bases = pluginDir ? [pluginDir, path.join(pluginDir, "src")] : []; + if (typeof __dirname === "string") bases.push(path.resolve(__dirname, ".."), __dirname); + const candidates = bases.map(b => path.join(b, "telemetry.credentials.json")); + for (const credPath of candidates) { + try { + const raw = fs.readFileSync(credPath, "utf-8"); + const creds = JSON.parse(raw); + if (creds.endpoint) return { endpoint: creds.endpoint, pid: creds.pid ?? "", env: creds.env ?? "prod" }; + } catch {} + } return { endpoint: "", pid: "", env: "prod" }; } -const _creds = loadTelemetryCredentials(); -const ARMS_ENDPOINT = _creds.endpoint; -const ARMS_PID = _creds.pid; -const ARMS_ENV = _creds.env; - const FLUSH_AT = 10; const FLUSH_INTERVAL_MS = 30_000; const SEND_TIMEOUT_MS = 30_000; @@ -67,8 +66,11 @@ export class Telemetry { private flushTimer: ReturnType | null = null; private sessionId: string; private firstSeenDate: string; + private armsEndpoint: string; + private armsPid: string; + private armsEnv: string; - constructor(config: TelemetryConfig, stateDir: string, pluginVersion: string, log: Logger) { + constructor(config: TelemetryConfig, stateDir: string, pluginVersion: string, log: Logger, pluginDir?: string) { this.log = log; this.pluginVersion = pluginVersion; this.enabled = config.enabled !== false; @@ -76,10 +78,15 @@ export class Telemetry { this.firstSeenDate = this.loadOrCreateFirstSeen(stateDir); this.sessionId = this.loadOrCreateSessionId(stateDir); - if (!this.enabled || !ARMS_ENDPOINT) { + const creds = loadTelemetryCredentials(pluginDir); + this.armsEndpoint = creds.endpoint; + this.armsPid = creds.pid; + this.armsEnv = creds.env; + + if (!this.enabled || !this.armsEndpoint) { this.enabled = false; this.log.debug( - !ARMS_ENDPOINT + !this.armsEndpoint ? "Telemetry disabled (no credentials configured)" : "Telemetry disabled (opt-out)", ); @@ -192,8 +199,8 @@ export class Telemetry { private buildPayload(events: ArmsEvent[]): Record { return { app: { - id: ARMS_PID, - env: ARMS_ENV, + id: this.armsPid, + env: this.armsEnv, version: this.pluginVersion, type: "node", }, @@ -212,7 +219,7 @@ export class Telemetry { const payload = this.buildPayload(batch); try { - const resp = await fetch(ARMS_ENDPOINT, { + const resp = await fetch(this.armsEndpoint, { method: "POST", headers: { "Content-Type": "text/plain" }, body: JSON.stringify(payload), From 1e5344a6a8d630b12c59ea1c4e2fda024a10fb49 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 22 Mar 2026 17:14:41 +0000 Subject: [PATCH 83/85] release: openclaw-plugin v1.0.5 --- apps/memos-local-openclaw/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/memos-local-openclaw/package.json b/apps/memos-local-openclaw/package.json index 3a2bfa15d..20f2a11b5 100644 --- a/apps/memos-local-openclaw/package.json +++ b/apps/memos-local-openclaw/package.json @@ -1,6 +1,6 @@ { "name": "@memtensor/memos-local-openclaw-plugin", - "version": "1.0.4", + "version": "1.0.5", "description": "MemOS Local memory plugin for OpenClaw — full-write, hybrid-recall, progressive retrieval", "type": "module", "main": "index.ts", From 1afd3f576ae3efe877f9f203bae01dd88f21066a Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Mon, 23 Mar 2026 15:22:59 +0800 Subject: [PATCH 84/85] Remove redundant docs. --- .../2026-03-08-v4-fast-track-completion.md | 237 ------------ ...3-08-v4-hub-sharing-implementation-plan.md | 344 ------------------ 2 files changed, 581 deletions(-) delete mode 100644 docs/plans/2026-03-08-v4-fast-track-completion.md delete mode 100644 docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md diff --git a/docs/plans/2026-03-08-v4-fast-track-completion.md b/docs/plans/2026-03-08-v4-fast-track-completion.md deleted file mode 100644 index f05f0258e..000000000 --- a/docs/plans/2026-03-08-v4-fast-track-completion.md +++ /dev/null @@ -1,237 +0,0 @@ -# V4 Hub Sharing Fast-Track Completion Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Ship a complete first usable version of v4 hub-spoke sharing as fast as possible, while preserving the already-working MVP memory-sharing path and finishing the remaining product-completeness items. - -**Architecture:** Keep the current mainline focused on the Hub memory-sharing critical path, then close remaining gaps in descending product value: detail retrieval, team info/onboarding polish, skill sync, Viewer UI, and final integration tests. Use parallel work whenever tasks have disjoint write scopes and do not block the mainline. - -**Tech Stack:** TypeScript, `better-sqlite3`, Hub HTTP server, local `RecallEngine`, Viewer server/UI, Vitest, OpenClaw plugin lifecycle. - ---- - -## Current State - -### Already completed on `codex/v4-hub-sharing` -- `T1` sharing config/types foundation -- `T2` hub/client schema + store helpers -- `T3` hub service skeleton + auth bootstrap -- `T5` minimal hub search + task share endpoints -- `T6` minimal client connector -- `T7` minimal local + hub memory search -- `T8` minimal `task_share` / `task_unshare` - -### Completed on side branch/worktree -- `T4` openclaw fallback guards on `codex/t4-openclaw-fallback` - -### Immediate cleanup items before next feature work -- Main branch currently has uncommitted changes in: - - `apps/memos-local-openclaw/src/tools/memory-search.ts` - - `apps/memos-local-openclaw/tests/integration.test.ts` -- There is an untracked stray path to clean: - - `apps/memos-local-openclaw/~/` - -These should be resolved before starting the next implementation batch. - -## Remaining Work (All still required) - -### Product-critical gaps -- Add `network_memory_detail` tool wired to `/api/v1/hub/memory-detail` -- Add `network_team_info` tool wired to `/api/v1/hub/me` (+ group list if available) -- Finish `T10` skill publish/pull via Hub -- Merge `T4` branch after quick sanity verification - -### Product-completeness gaps -- Minimal but usable Viewer/UI for hub/client state (`T12` MVP slice) -- Full tool registration completeness (`T11` finish) -- Final integrated tests + README update (`T13`) - -## Fastest Completion Strategy - -```text -Phase A — Stabilize current core (serial, short) - A0 Clean current working tree - A1 Land or discard leftover uncommitted T7 edits - A2 Merge T4 worktree branch into mainline - -Phase B — Finish missing user-facing MVP links (mainline + parallel) - B1 Mainline: network_memory_detail + network_team_info - B2 Parallel: T10 skill sync via Hub - -Phase C — Product completeness (parallel) - C1 Mainline: T11 tool registration completion - C2 Parallel: T12 minimal usable Viewer UX - -Phase D — Hardening (serial) - D1 T13 end-to-end smoke + focused integration suite - D2 README / setup docs / release sanity check -``` - -## Parallelization Rules - -### Must stay on the mainline critical path -- `A0` cleanup -- `A1` settle current dirty changes -- `B1` `network_memory_detail` + `network_team_info` -- `C1` final tool registration -- `D1` final smoke/integration verification - -### Should run in parallel when possible -- `T4` merge prep / sanity verification -- `T10` skill publish/pull via Hub -- `T12` Viewer/UI minimal admin & client state screens - -### Why these are parallel-safe -- `T10` mostly touches skill/Hub endpoints + client skill flows -- `T12` mostly touches `src/viewer/server.ts`, `src/viewer/html.ts` -- `B1` mostly touches tool wiring and client helper paths - -## Recommended Execution Order - -```text -Now -├─ A0 Clean worktree state -├─ A1 Decide whether dirty T7 edits are keep/amend/discard -├─ A2 Merge `codex/t4-openclaw-fallback` -│ -├─ B1 Mainline: add `network_memory_detail` -│ └─ then add `network_team_info` -│ -├─ B2 Parallel: T10 skill publish/pull via Hub -│ -├─ C1 Mainline: finalize tool registration -│ -├─ C2 Parallel: minimal Viewer/client/hub UI -│ -└─ D1/D2 Final test + docs + release pass -``` - -## Exact Next Task Recommendations - -### Task A0: Clean current branch state -**Why first:** Prevent accidental overwrite/confusion before more parallel work. - -**Actions:** -- Inspect current diffs in: - - `apps/memos-local-openclaw/src/tools/memory-search.ts` - - `apps/memos-local-openclaw/tests/integration.test.ts` -- Decide whether they belong to `T7` and should be committed/amended, or discarded -- Remove stray path `apps/memos-local-openclaw/~/` - -### Task A2: Merge T4 worktree branch -**Why now:** It is already implemented and does not block the current core, but the branch divergence should not grow. - -**Actions:** -- Compare `codex/t4-openclaw-fallback` against mainline -- Merge or cherry-pick: - - `69b96c0 feat(memos-local): add openclaw fallback guards` - - later viewer-gating follow-up if present in worktree -- Run targeted fallback tests + build on mainline - -### Task B1.1: Add `network_memory_detail` -**Why next:** Search already returns `remoteHitId`, so this closes the user-visible memory flow. - -**Files:** -- Modify: `apps/memos-local-openclaw/index.ts` -- Modify: `apps/memos-local-openclaw/src/client/hub.ts` -- Modify: `apps/memos-local-openclaw/tests/integration.test.ts` - -**MVP behavior:** -- Input: `remoteHitId`, optional `hubAddress`, optional `userToken` -- Resolve hub client using existing connector/helper fallback chain -- Call `/api/v1/hub/memory-detail` -- Return content/summary/source - -### Task B1.2: Add `network_team_info` -**Why immediately after:** Cheap, high-value visibility into connection/user/group context. - -**Files:** -- Modify: `apps/memos-local-openclaw/index.ts` -- Modify: `apps/memos-local-openclaw/src/client/connector.ts` -- Modify: `apps/memos-local-openclaw/tests/client-connector.test.ts` - -**MVP behavior:** -- Return connected/disconnected state -- Return current user identity/role -- Return groups if available from `/me` or local persisted state - -### Task B2: Finish T10 Hub skill sync -**Why parallel:** Important for completeness, but does not block memory-sharing MVP. - -**Files:** -- Modify: `apps/memos-local-openclaw/index.ts` -- Modify: `apps/memos-local-openclaw/src/skill/installer.ts` -- Create/modify: `apps/memos-local-openclaw/src/client/skill-sync.ts` -- Add tests in `apps/memos-local-openclaw/tests/integration.test.ts` - -**Minimum completion criteria:** -- `skill_publish(scope=group|public)` to Hub -- `network_skill_pull` from Hub -- bundle validation stays enforced - -### Task C1: Finish T11 tool registration -**Why after B1/T10:** Register only once core tools and skill tools exist. - -**Required tools to expose by end:** -- `task_share` -- `task_unshare` -- `network_memory_detail` -- `network_team_info` -- `network_skill_pull` - -### Task C2: Minimal T12 Viewer UI -**Why not earlier:** UI should follow working APIs. - -**MVP UI only:** -- Client: show Hub connected/disconnected + current user/role -- Search: local/group/all selector if not already present -- Hub/Admin: pending users list + approve action -- Shared result section rendering for hub hits - -### Task D1: Final T13 test pass -**Do not skip.** - -**Minimum smoke matrix:** -- Hub start/stop -- Join + approve -- Connect client -- `task_share` -- `memory_search(scope=group)` -- `network_memory_detail` -- `task_unshare` -- Hub-down fallback to local-only search -- skill publish/pull smoke if T10 lands - -## Fastest Team Split - -### Mainline owner -- A0/A1 cleanup -- A2 T4 merge -- B1 `network_memory_detail`, `network_team_info` -- C1 tool registration -- D1/D2 final smoke + docs - -### Parallel lane 1 -- T10 skill sync via Hub - -### Parallel lane 2 -- T12 minimal Viewer UI - -## “Tomorrow-ready” Definition - -A release is acceptable when all are true: -- Hub starts with a valid team token -- Admin bootstrap and approval flow work -- Client can connect and persist session -- Local task can be shared to Hub and searched back -- Hub hit can be opened with `network_memory_detail` -- Task can be unshared -- Local-only behavior still works if Hub is unavailable -- No obvious auth bypass in the happy-path MVP routes - -## Non-blocking defects to defer if time is short -- Advanced Viewer polish -- Rich group management UX -- Deep fallback/host integration beyond safe guards -- Skill version-management niceties beyond publish/pull happy path -- Exhaustive edge-case test coverage outside the smoke matrix diff --git a/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md b/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md deleted file mode 100644 index 1f02b252b..000000000 --- a/docs/plans/2026-03-08-v4-hub-sharing-implementation-plan.md +++ /dev/null @@ -1,344 +0,0 @@ -# V4 Hub Sharing Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Deliver the v4 Hub-Spoke memory and skill sharing architecture in safe, testable phases without blocking existing local memory behavior. - -**Architecture:** Keep the current local memory plugin behavior intact, then layer in a centralized Hub mode, a Client connector, Hub-side shared search/indexing, and group/public sharing flows. Treat the OpenClaw default-model fallback as a separate capability track so platform uncertainty does not block the Hub MVP. - -**Tech Stack:** TypeScript, `better-sqlite3`, existing local `RecallEngine`, HTTP server routes, FTS5, vector embeddings, Viewer UI, Vitest. - ---- - -## Delivery Strategy - -- **Execution style:** 3 major workstreams in parallel where possible -- **Critical path:** `T1 → T2/T3 → T5 → T6 → T7 → T9/T11 → T13` -- **Do not block MVP on:** advanced Viewer polish, full admin ergonomics, OpenClaw default-model fallback -- **MVP definition:** Hub up, user join/approve works, group/public task sharing works, local+Hub search works, Hub skill publish/pull works, Hub outage degrades to local-only - -## Recommended Staffing - -- **Track A — Platform/Core:** config, schema, Hub server, auth, search -- **Track B — Client Flows:** connector, sync, federated search, tools -- **Track C — UX/Viewer:** onboarding, admin approval UI, scope switch, shared result views - -## Phase Schedule (Recommended) - -```mermaid -gantt - title V4 Hub-Spoke Development Schedule - dateFormat YYYY-MM-DD - axisFormat %m/%d - - section Foundation - T1 Types & config :a1, 2026-03-09, 3d - - section Parallel Base - T2 Hub schema & store :a2, after a1, 4d - T3 Hub server skeleton & auth :a3, after a1, 4d - T4 Default model fallback spike :a4, after a1, 3d - - section Core Search Path - T5 Hub search/index pipeline :a5, after a2, 5d - T6 Client connector & onboarding :a6, after a3, 4d - T7 Local + Hub search integration :a7, after a5, 4d - - section Sharing Features - T8 Task share & incremental sync :a8, after a6, 4d - T9 Search tool adaptation :a9, after a7, 3d - T10 Skill publish/pull via Hub :a10, after a6, 4d - - section Productization - T11 Tool registration & plugin wiring :a11, after a8, 3d - T12 Viewer/admin UX :a12, after a8, 5d - - section Hardening - T13 Integration tests & docs :a13, after a11, 5d -``` - -## Milestones - -| Milestone | Exit Criteria | Target | -|---|---|---| -| M1 Foundation Ready | Config resolves hub/client mode; schema and server skeleton compile | End of Week 1 | -| M2 Search Closed Loop | Hub can ingest shared task data and return filtered search results | Mid Week 2 | -| M3 Client Closed Loop | Client can join team, authenticate, search local + Hub, and degrade on Hub outage | End of Week 2 | -| M4 Sharing Closed Loop | Task share/unshare and skill publish/pull complete end-to-end | Mid Week 3 | -| M5 Product Ready | Viewer/admin flows, tests, docs, fallback behavior verified | End of Week 3 | - -## Task Graph - -```text -T1 Types & Config -├─ T2 Hub Schema & Store -├─ T3 Hub Server Skeleton & Auth -├─ T4 Default Model Fallback Spike -│ -├─ T5 Hub Search/Index Pipeline <- T2 + T3 -├─ T6 Client Connector & Onboarding <- T3 (+ T4 optional for fallback hookup) -├─ T7 Local + Hub Search Integration <- T5 + T6 -│ -├─ T8 Task Share & Incremental Sync <- T5 + T6 -├─ T9 Search Tool Adaptation <- T7 -├─ T10 Skill Publish/Pull via Hub <- T5 + T6 -│ -├─ T11 Tool Registration & Wiring <- T8 + T9 + T10 -├─ T12 Viewer/Admin UX <- T8 + T9 + T10 -└─ T13 Integration Tests & Docs <- T11 + T12 -``` - -## Work Breakdown - -### Task 1: Types and configuration foundation - -**Files:** -- Create: `apps/memos-local-openclaw/src/sharing/types.ts` -- Modify: `apps/memos-local-openclaw/src/types.ts` -- Modify: `apps/memos-local-openclaw/src/config.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Add Hub/Client mode config types -- Define `HubSearchHit`, `NetworkSearchResult`, `UserInfo`, `GroupInfo`, `SkillBundle` -- Define fallback capability flags instead of assuming OpenClaw APIs always exist - -**Done when:** -- Types compile -- Config parsing supports hub/client branches cleanly -- Existing local-only config remains backward compatible - -### Task 2: Hub schema and store layer - -**Files:** -- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` -- Test: `apps/memos-local-openclaw/tests/storage.test.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Add `hub_users`, `hub_groups`, `hub_group_members`, `hub_tasks`, `hub_chunks`, `hub_embeddings`, `hub_skills` -- Add uniqueness constraints for source IDs -- Add CRUD helpers for user approval, group membership, shared task/skill upsert, shared delete - -**Done when:** -- Repeated share requests are idempotent -- Group membership queries are fast and test-covered -- Existing local tables remain backward compatible - -### Task 3: Hub server skeleton and auth - -**Files:** -- Create: `apps/memos-local-openclaw/src/hub/server.ts` -- Create: `apps/memos-local-openclaw/src/hub/auth.ts` -- Create: `apps/memos-local-openclaw/src/hub/user-manager.ts` -- Modify: `apps/memos-local-openclaw/index.ts` -- Test: `apps/memos-local-openclaw/tests/plugin-impl-access.test.ts` - -**Deliverables:** -- Start/stop Hub HTTP server in hub mode -- Implement team-token join flow and JWT user-token verification -- Register `/hub/info`, `/hub/join`, `/hub/me`, `/hub/admin/*` skeleton routes -- Add rate limiting middleware - -**Done when:** -- Admin can bootstrap team -- Pending user can join and wait for approval -- Approved user receives valid token and blocked user is rejected - -### Task 4: Default-model fallback spike - -**Files:** -- Modify: `apps/memos-local-openclaw/src/embedding/index.ts` -- Modify: `apps/memos-local-openclaw/src/ingest/providers/index.ts` -- Modify: `apps/memos-local-openclaw/src/types.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Add an `openclaw` provider abstraction if host capabilities exist -- Detect host capability instead of assuming `api.embed()` / `api.complete()` -- Preserve current local/heuristic fallback as final safety net - -**Done when:** -- No explicit provider still works -- Missing host capability does not break startup -- Fallback chain is logged and testable - -### Task 5: Hub search and indexing pipeline - -**Files:** -- Create: `apps/memos-local-openclaw/src/hub/search.ts` -- Modify: `apps/memos-local-openclaw/src/hub/server.ts` -- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` -- Test: `apps/memos-local-openclaw/tests/recall.test.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Receive shared task/chunk payloads from clients -- Re-embed and FTS-index all shared chunks on Hub -- Filter by requester user groups and `public` -- Return `remoteHitId`-based Hub search results - -**Done when:** -- A shared task becomes searchable on Hub -- A user cannot see data from groups they do not belong to -- `memory-detail` honors permissions and hit expiry - -### Task 6: Client connector and onboarding - -**Files:** -- Create: `apps/memos-local-openclaw/src/client/connector.ts` -- Modify: `apps/memos-local-openclaw/src/viewer/server.ts` -- Modify: `apps/memos-local-openclaw/src/viewer/html.ts` -- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` -- Test: `apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts` - -**Deliverables:** -- Persist Hub connection state in `client_hub_connection` -- Implement join-team and create-team state machine -- Add connection health and reconnect handling -- Expose waiting-approved / active / rejected states to Viewer - -**Done when:** -- Fresh install can choose create-team or join-team -- Client survives Hub restart and reconnects -- Rejected client is visibly blocked from Hub actions - -### Task 7: Local + Hub search integration - -**Files:** -- Create: `apps/memos-local-openclaw/src/client/federated-search.ts` -- Modify: `apps/memos-local-openclaw/src/recall/engine.ts` -- Modify: `apps/memos-local-openclaw/src/types.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Execute local search and Hub search in parallel for `group/all` -- Return local and Hub results in separate sections -- Degrade to local-only when Hub is unavailable - -**Done when:** -- `scope=local` is unchanged -- `scope=group/all` returns stable two-section results -- Hub outage does not break the tool - -### Task 8: Task share and incremental sync - -**Files:** -- Create: `apps/memos-local-openclaw/src/client/sync.ts` -- Modify: `apps/memos-local-openclaw/index.ts` -- Modify: `apps/memos-local-openclaw/src/storage/sqlite.ts` -- Test: `apps/memos-local-openclaw/tests/task-processor.test.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Implement `task_share` and `task_unshare` -- Push full task on first share, then incremental chunks on `agent_end` -- Track sync cursor or last-pushed chunk for idempotent uploads - -**Done when:** -- Shared task appears on Hub -- New chunks for shared task are pushed once -- Unshare removes data from Hub and stops future push - -### Task 9: Search tool adaptation - -**Files:** -- Modify: `apps/memos-local-openclaw/src/tools/memory-search.ts` -- Modify: `apps/memos-local-openclaw/index.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Add `scope: local | group | all` to `memory_search` -- Add Hub-aware formatting to `skill_search` -- Preserve current local tool UX for existing users - -**Done when:** -- Existing prompts still work unchanged -- New Hub scopes return intelligible, separable outputs - -### Task 10: Skill publish and pull via Hub - -**Files:** -- Modify: `apps/memos-local-openclaw/src/skill/installer.ts` -- Create: `apps/memos-local-openclaw/src/client/skill-sync.ts` -- Modify: `apps/memos-local-openclaw/index.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Publish full skill bundle to Hub with group/public scope -- Pull bundle from Hub with client-side safety validation -- Store provenance for pulled skills - -**Done when:** -- Group member can publish and another group member can pull -- Unauthorized user cannot pull group-restricted skill -- Malformed bundle is rejected atomically - -### Task 11: Tool registration and plugin wiring - -**Files:** -- Modify: `apps/memos-local-openclaw/index.ts` -- Test: `apps/memos-local-openclaw/tests/plugin-impl-access.test.ts` - -**Deliverables:** -- Register `task_share`, `task_unshare`, `network_memory_detail`, `network_skill_pull`, `network_team_info` -- Start Hub services in hub mode and connector in client mode -- Keep local-only mode intact - -**Done when:** -- Tool list changes by mode as intended -- Startup/shutdown lifecycle remains clean - -### Task 12: Viewer and admin UX - -**Files:** -- Modify: `apps/memos-local-openclaw/src/viewer/server.ts` -- Modify: `apps/memos-local-openclaw/src/viewer/html.ts` -- Test: `apps/memos-local-openclaw/tests/integration.test.ts` - -**Deliverables:** -- Hub-side admin approval and group management -- Client-side connection status and scope switch -- Shared search results with owner/group metadata -- Skill browser and pull actions - -**Done when:** -- Admin can approve users and manage groups in Viewer -- Client can see its state and accessible Hub content clearly - -### Task 13: Integration tests and docs - -**Files:** -- Modify: `apps/memos-local-openclaw/tests/integration.test.ts` -- Modify: `apps/memos-local-openclaw/tests/storage.test.ts` -- Modify: `apps/memos-local-openclaw/tests/shutdown-lifecycle.test.ts` -- Modify: `apps/memos-local-openclaw/README.md` - -**Deliverables:** -- End-to-end tests for join, approve, group isolation, task share, incremental sync, skill pull, Hub outage fallback, fallback model behavior -- README updates for hub/client setup and default model behavior - -**Done when:** -- MVP flow is test-covered end-to-end -- README is sufficient for a fresh teammate to run Hub and join as client - -## Release Recommendation - -- **MVP Cut:** T1–T11 complete, T12 basic UI only, T13 essential integration tests only -- **Post-MVP Cut:** advanced admin UX, analytics, team-token rotation UX polish, richer Hub browsing -- **Spike Before Coding:** verify whether OpenClaw host truly exposes embedding/completion APIs; if not, keep local/heuristic fallback as the documented default fallback path - -## Suggested Calendar - -| Week | Focus | Primary Owners | -|---|---|---| -| Week 1 | Foundation + Hub base (`T1-T4`) | Core + Platform | -| Week 2 | Hub search + connector + combined search (`T5-T7`) | Core + Client | -| Week 3 | Share/pull flows + tools + basic UI (`T8-T12`) | Client + UX | -| Week 4 | Hardening, test expansion, docs (`T13`) | Whole team | - -## Critical Path Notes - -- `T4` must not block Hub MVP unless OpenClaw fallback is a release requirement -- `T12` should not block API completion; ship a minimal admin UI first -- `T13` should prioritize permission isolation and outage fallback before UI polish From 9fac874b08bf01071b93ce17d6a6ab57178bd447 Mon Sep 17 00:00:00 2001 From: tangbo <1502220175@qq.com> Date: Mon, 23 Mar 2026 15:42:15 +0800 Subject: [PATCH 85/85] fix(memos-local-openclaw): remove accidental runtime-path skill copy from repo - Delete the duplicated memos-memory-guide file under the faux ~/.openclaw runtime path - Ignore ~/.openclaw-style generated content inside the plugin directory to avoid future accidental commits Made-with: Cursor --- apps/memos-local-openclaw/.gitignore | 1 + .../skills/memos-memory-guide/SKILL.md | 211 ------------------ 2 files changed, 1 insertion(+), 211 deletions(-) delete mode 100644 apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md diff --git a/apps/memos-local-openclaw/.gitignore b/apps/memos-local-openclaw/.gitignore index 2fe5cd4d5..d0bfd2a76 100644 --- a/apps/memos-local-openclaw/.gitignore +++ b/apps/memos-local-openclaw/.gitignore @@ -32,3 +32,4 @@ telemetry.credentials.json *.sqlite *.sqlite-journal *.db +/~/.openclaw/ diff --git a/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md b/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md deleted file mode 100644 index c7897bb49..000000000 --- a/apps/memos-local-openclaw/~/.openclaw/workspace/skills/memos-memory-guide/SKILL.md +++ /dev/null @@ -1,211 +0,0 @@ ---- -name: memos-memory-guide -description: "Use the MemOS Local memory system to search and use the user's past conversations. Use this skill whenever the user refers to past chats, their own preferences or history, or when you need to answer from prior context. When auto-recall returns nothing (long or unclear user query), generate your own short search query and call memory_search. Available tools: memory_search, memory_get, memory_write_public, memory_share, memory_unshare, task_summary, skill_get, skill_search, skill_install, skill_publish, skill_unpublish, network_memory_detail, network_skill_pull, network_team_info, memory_timeline, memory_viewer." ---- - -# MemOS Local Memory — Agent Guide - -This skill describes how to use the MemOS memory tools so you can reliably search and use the user's long-term conversation history, query team-shared data, share tasks, and discover or pull reusable skills. - -Two sharing planes exist and must not be confused: - -- **Local agent sharing:** visible to agents in the same OpenClaw workspace only. -- **Team sharing:** visible to teammates through the configured team server. - -## How memory is provided each turn - -- **Automatic recall (hook):** At the start of each turn, the system runs a memory search using the user's current message and injects relevant past memories into your context. You do not need to call any tool for that. -- **When that is not enough:** If the user's message is very long, vague, or the automatic search returns **no memories**, you should **generate your own short, focused query** and call `memory_search` yourself. -- **Memory isolation:** Each agent can only see its own local private memories and local `public` memories. Team-shared data only appears when you search with `scope="group"` or `scope="all"`. - -## Tools — what they do and when to call - -### memory_search - -- **What it does:** Search long-term conversation memory for past conversations, user preferences, decisions, and experiences. Returns relevant excerpts with `chunkId` and optionally `task_id`. Only returns memories belonging to the current agent or marked as public. -- **When to call:** - - The automatic recall did not run or returned nothing. - - The user's query is long or unclear — **generate a short query yourself** and call `memory_search(query="...")`. - - You need to search with a different angle (e.g. filter by `role='user'`). -- **Parameters:** - - `query` (string, **required**) — Natural language search query. - - `scope` (string, optional) — `'local'` (default) for current agent + local shared memories, or `'group'` / `'all'` to include team-shared memories. - - `maxResults` (number, optional) — Increase when the first search is too narrow. - - `minScore` (number, optional) — Lower slightly if recall is too strict. - - `role` (string, optional) — Filter local results by `'user'`, `'assistant'`, `'tool'`, or `'system'`. - -### memory_get - -- **What it does:** Get the full original text of a memory chunk. Use to verify exact details from a search hit. -- **When to call:** A `memory_search` hit looks relevant but you need to see the complete original content, not just the summary/excerpt. -- **Parameters:** - - `chunkId` (string, **required**) — The chunkId from a search hit. - - `maxChars` (number, optional) — Max characters to return (default 4000, max 12000). - -### memory_write_public - -- **What it does:** Create a brand new local shared memory. These memories are visible to all agents in the same OpenClaw workspace during `memory_search`. This does **not** publish anything to the team server. -- **When to call:** In multi-agent or collaborative scenarios, when you want to create a new persistent shared note from scratch (e.g. shared decisions, conventions, configurations, workflows). Do not use it if you already have a specific memory chunk to expose. -- **Parameters:** - - `content` (string, **required**) — The content to write to local shared memory. - - `summary` (string, optional) — Short summary of the content. - -### memory_share - -- **What it does:** Share an existing memory either with local OpenClaw agents, to the team, or to both. -- **When to call:** You already have a useful memory chunk and want to expose it beyond the current agent. -- **Do not use when:** You are creating a new shared note from scratch. In that case use `memory_write_public`. -- **Parameters:** - - `chunkId` (string, **required**) — Existing memory chunk ID. - - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'both'`. - - `visibility` (string, optional) — Team visibility when target includes team: `'public'` (default) or `'group'`. - - `groupId` (string, optional) — Optional team group ID when `visibility='group'`. - -### memory_unshare - -- **What it does:** Remove an existing memory from local agent sharing, team sharing, or both. -- **When to call:** A memory should no longer be visible outside the current agent or should be removed from the team. -- **Parameters:** - - `chunkId` (string, **required**) — Existing memory chunk ID. - - `target` (string, optional) — `'agents'`, `'hub'`, or `'all'` (default). - - `privateOwner` (string, optional) — Rare fallback only for older public memories that have no recorded original owner. - -### task_summary - -- **What it does:** Get the detailed summary of a complete task: title, status, narrative summary, and related skills. Use when `memory_search` returns a hit with a `task_id` and you need the full story. Preserves critical information: URLs, file paths, commands, error codes, step-by-step instructions. -- **When to call:** A `memory_search` hit included a `task_id` and you need the full context of that task. -- **Parameters:** - - `taskId` (string, **required**) — The task_id from a memory_search hit. - -### skill_get - -- **What it does:** Retrieve a proven skill (experience guide) by `skillId` or by `taskId`. If you pass a `taskId`, the system will find the associated skill automatically. -- **When to call:** A search hit has a `task_id` and the task has a "how to do this again" guide. Use this to follow the same approach or reuse steps. -- **Parameters:** - - `skillId` (string, optional) — Direct skill ID. - - `taskId` (string, optional) — Task ID — will look up the skill linked to this task. - - At least one of `skillId` or `taskId` must be provided. - -### skill_search - -- **What it does:** Search available skills by natural language. Searches your own skills, local shared skills, or both. It can also include team skills. -- **When to call:** The current task requires a capability or guide you don't have. Use `skill_search` to find one first; after finding it, use `skill_get` to read it, then `skill_install` to load it for future turns. -- **Parameters:** - - `query` (string, **required**) — Natural language description of the needed skill. - - `scope` (string, optional) — `'mix'` (default, self + local shared), `'self'`, `'public'` (local shared only), or `'group'` / `'all'` to include team results. - -### skill_install - -- **What it does:** Install a learned skill into the agent workspace so it becomes permanently available. After installation, the skill will be loaded automatically in future sessions. -- **When to call:** After `skill_get` when the skill is useful for ongoing use. -- **Parameters:** - - `skillId` (string, **required**) — The skill ID to install. - -### skill_publish - -- **What it does:** Share a skill with local agents, or publish it to the team. -- **When to call:** You have a useful skill that other agents or your team could benefit from. -- **Parameters:** - - `skillId` (string, **required**) — The skill ID to publish. - - `target` (string, optional) — `'agents'` (default) or `'hub'`. - - `visibility` (string, optional) — When `target='hub'`, use `'public'` (default) or `'group'`. - - `groupId` (string, optional) — Optional team group ID when `target='hub'` and `visibility='group'`. - - `scope` (string, optional) — Backward-compatible alias for old calls. Prefer `target` + `visibility` in new calls. - -### skill_unpublish - -- **What it does:** Stop local agent sharing, remove a team-published copy, or do both. -- **When to call:** You want to stop sharing a previously published skill. -- **Parameters:** - - `skillId` (string, **required**) — The skill ID to unpublish. - - `target` (string, optional) — `'agents'` (default), `'hub'`, or `'all'`. - -### network_memory_detail - -- **What it does:** Fetches the full content behind a team search hit. -- **When to call:** A `memory_search` result came from the team and you need the full shared memory content. -- **Parameters:** `remoteHitId`. - -### task_share / task_unshare - -- **What they do:** Share a local task to the team, or remove it later. -- **When to call:** A task is valuable to your group or to the whole team and should be discoverable via shared search. -- **Parameters:** `taskId`, plus sharing visibility/scope when required. - -### network_skill_pull - -- **What it does:** Pulls a team-shared skill bundle down into local storage. -- **When to call:** `skill_search` found a useful team skill and you want to use it locally or offline. -- **Parameters:** `skillId`. - -### network_team_info - -- **What it does:** Returns current team server connection information, user, role, and groups. -- **When to call:** You need to confirm whether team sharing is configured or which groups the current client belongs to. -- **Call this first before:** `memory_share(... target='hub'|'both')`, `memory_unshare(... target='hub'|'all')`, `task_share`, `task_unshare`, `skill_publish(... target='hub')`, `skill_unpublish(... target='hub'|'all')`, or `network_skill_pull`. -- **Parameters:** none. - -### memory_timeline - -- **What it does:** Expand context around a memory search hit. Pass the `chunkId` from a search result to read the surrounding conversation messages. -- **When to call:** A `memory_search` hit is relevant but you need the surrounding dialogue. -- **Parameters:** - - `chunkId` (string, **required**) — The chunkId from a memory_search hit. - - `window` (number, optional) — Context window ±N messages, default 2. - -### memory_viewer - -- **What it does:** Show the MemOS Memory Viewer URL. Call this when the user asks how to view, browse, manage, or check their memories. Returns the URL the user can open in their browser. -- **When to call:** The user asks where to see or manage their memories. -- **Parameters:** None. - -## Quick decision flow - -1. **No memories in context or auto-recall reported nothing** - → Call `memory_search(query="...")` with a **self-generated short query**. - -2. **Need to see the full original text of a search hit** - → Call `memory_get(chunkId="...")`. - -3. **Search returned hits with `task_id` and you need full context** - → Call `task_summary(taskId="...")`. - -4. **Task has an experience guide you want to follow** - → Call `skill_get(taskId="...")` or `skill_get(skillId="...")`. Optionally `skill_install(skillId="...")` for future use. - -5. **You need the exact surrounding conversation of a hit** - → Call `memory_timeline(chunkId="...")`. - -6. **You need a capability/guide that you don't have** - → Call `skill_search(query="...", scope="mix")` to discover available skills. - -7. **You have new shared knowledge useful to all local agents** - → Call `memory_write_public(content="...")`. - -8. **You already have an existing memory chunk and want to expose or hide it** - → Call `memory_share(chunkId="...", target="agents|hub|both")` or `memory_unshare(chunkId="...", target="agents|hub|all")`. - -9. **You are about to do anything team-sharing-related** - → Call `network_team_info()` first if team server availability is uncertain. - -10. **You want to share/stop sharing a skill with local agents or team** - → Prefer `skill_publish(skillId="...", target="agents|hub", visibility=...)` and `skill_unpublish(skillId="...", target="agents|hub|all")`. - -11. **User asks where to see or manage their memories** - → Call `memory_viewer()` and share the URL. - -## Writing good search queries - -- Prefer **short, focused** queries (a few words or one clear question). -- Use **concrete terms**: names, topics, tools, or decisions. -- If the user's message is long, **derive one or two sub-queries** rather than pasting the whole message. -- Use `role='user'` when you specifically want to find what the user said. - -## Memory ownership and agent isolation - -Each memory is tagged with an `owner` (e.g. `agent:main`, `agent:sales-bot`). This is handled **automatically** — you do not need to pass any owner parameter. - -- **Your memories:** All tools (`memory_search`, `memory_get`, `memory_timeline`) automatically scope queries to your agent's own memories. -- **Local shared memories:** Memories marked as local shared are visible to all agents in the same OpenClaw workspace. Use `memory_write_public` to create them, or `memory_share(target='agents')` to expose an existing chunk. -- **Cross-agent isolation:** You cannot see memories owned by other agents (unless they are public). -- **How it works:** The system identifies your agent ID from the OpenClaw runtime context and applies owner filtering automatically on every search, recall, and retrieval.