diff --git a/src/gateway/server.reload.test.ts b/src/gateway/server.reload.test.ts index 87351113911..9c715c16177 100644 --- a/src/gateway/server.reload.test.ts +++ b/src/gateway/server.reload.test.ts @@ -717,8 +717,6 @@ describe("gateway hot reload", () => { await vi.waitFor(() => { expect(restartedCron.start).toHaveBeenCalledTimes(1); }); - await new Promise((resolve) => setTimeout(resolve, 300)); - expect(restartedCron.start).toHaveBeenCalledTimes(1); expect(hoisted.providerManager.stopChannel).toHaveBeenCalledTimes(5); expect(hoisted.providerManager.startChannel).toHaveBeenCalledTimes(5); diff --git a/src/gateway/session-utils.test.ts b/src/gateway/session-utils.test.ts index b53a44b9fed..7f8bba3b51d 100644 --- a/src/gateway/session-utils.test.ts +++ b/src/gateway/session-utils.test.ts @@ -86,7 +86,7 @@ describe("gateway session utils", () => { test("session lists apply a bounded default and expose truncation metadata", async () => { const cfg = createModelDefaultsConfig({ primary: "openai/gpt-5.4" }); const store = Object.fromEntries( - Array.from({ length: 105 }, (_value, index) => [ + Array.from({ length: 101 }, (_value, index) => [ `session-${index}`, { sessionId: `session-${index}`, @@ -104,7 +104,7 @@ describe("gateway session utils", () => { expect(listed.sessions).toHaveLength(100); expect(listed.count).toBe(100); - expect(listed.totalCount).toBe(105); + expect(listed.totalCount).toBe(101); expect(listed.limitApplied).toBe(100); expect(listed.hasMore).toBe(true); expect(listed.sessions[0]?.key).toBe("session-0"); @@ -1257,6 +1257,10 @@ describe("listSessionsFromStore selected model display", () => { store[`agent:main:${sessionId}`] = { sessionId, updatedAt: now - i, + totalTokens: 1, + totalTokensFresh: true, + contextTokens: 1, + estimatedCostUsd: 0, } as SessionEntry; fs.writeFileSync( path.join(tmpDir, `${sessionId}.jsonl`), @@ -1307,37 +1311,39 @@ describe("listSessionsFromStore selected model display", () => { } }); - test("caps transcript title and last-message hydration for bulk list responses", () => { + test("caps transcript title and last-message hydration for bulk list responses", async () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-sessions-list-cap-")); try { const storePath = path.join(tmpDir, "sessions.json"); const store: Record = {}; const now = Date.now(); - for (let i = 0; i < 105; i += 1) { + for (let i = 0; i < 101; i += 1) { const sessionId = `sess-${i}`; store[`agent:main:${sessionId}`] = { sessionId, updatedAt: now - i, } as SessionEntry; - fs.writeFileSync( - path.join(tmpDir, `${sessionId}.jsonl`), - [ - JSON.stringify({ type: "session", version: 1, id: sessionId }), - JSON.stringify({ message: { role: "user", content: `title ${i}` } }), - JSON.stringify({ message: { role: "assistant", content: `last ${i}` } }), - ].join("\n"), - "utf-8", - ); + if (i === 0 || i === 99 || i === 100) { + fs.writeFileSync( + path.join(tmpDir, `${sessionId}.jsonl`), + [ + JSON.stringify({ type: "session", version: 1, id: sessionId }), + JSON.stringify({ message: { role: "user", content: `title ${i}` } }), + JSON.stringify({ message: { role: "assistant", content: `last ${i}` } }), + ].join("\n"), + "utf-8", + ); + } } - const result = listSessionsFromStore({ + const result = await listSessionsFromStoreAsync({ cfg: createModelDefaultsConfig({ primary: "openai/gpt-5.4" }), storePath, store, - opts: { includeDerivedTitles: true, includeLastMessage: true, limit: 105 }, + opts: { includeDerivedTitles: true, includeLastMessage: true, limit: 101 }, }); - expect(result.sessions).toHaveLength(105); + expect(result.sessions).toHaveLength(101); expect(result.sessions[0]?.derivedTitle).toBe("title 0"); expect(result.sessions[0]?.lastMessagePreview).toBe("last 0"); expect(result.sessions[99]?.derivedTitle).toBe("title 99");