test: share sanitize session usage helpers

This commit is contained in:
Peter Steinberger 2026-03-13 23:59:46 +00:00
parent 91b9c47dad
commit 903cb0679d
1 changed files with 62 additions and 74 deletions

View File

@ -177,6 +177,14 @@ describe("sanitizeSessionHistory", () => {
AgentMessage & { usage?: unknown; content?: unknown } AgentMessage & { usage?: unknown; content?: unknown }
>; >;
const getSingleAssistantUsage = async (messages: AgentMessage[]) => {
vi.mocked(mockedHelpers.isGoogleModelApi).mockReturnValue(false);
const result = await sanitizeOpenAIHistory(messages);
return result.find((message) => message.role === "assistant") as
| (AgentMessage & { usage?: unknown })
| undefined;
};
beforeEach(async () => { beforeEach(async () => {
testTimestamp = 1; testTimestamp = 1;
const harness = await loadSanitizeSessionHistoryWithCleanMocks(); const harness = await loadSanitizeSessionHistoryWithCleanMocks();
@ -358,43 +366,33 @@ describe("sanitizeSessionHistory", () => {
}); });
it("adds a zeroed assistant usage snapshot when usage is missing", async () => { it("adds a zeroed assistant usage snapshot when usage is missing", async () => {
vi.mocked(mockedHelpers.isGoogleModelApi).mockReturnValue(false); const assistant = await getSingleAssistantUsage(
castAgentMessages([
const messages = castAgentMessages([ { role: "user", content: "question" },
{ role: "user", content: "question" }, {
{ role: "assistant",
role: "assistant", content: [{ type: "text", text: "answer without usage" }],
content: [{ type: "text", text: "answer without usage" }], },
}, ]),
]); );
const result = await sanitizeOpenAIHistory(messages);
const assistant = result.find((message) => message.role === "assistant") as
| (AgentMessage & { usage?: unknown })
| undefined;
expect(assistant?.usage).toEqual(makeZeroUsageSnapshot()); expect(assistant?.usage).toEqual(makeZeroUsageSnapshot());
}); });
it("normalizes mixed partial assistant usage fields to numeric totals", async () => { it("normalizes mixed partial assistant usage fields to numeric totals", async () => {
vi.mocked(mockedHelpers.isGoogleModelApi).mockReturnValue(false); const assistant = await getSingleAssistantUsage(
castAgentMessages([
const messages = castAgentMessages([ { role: "user", content: "question" },
{ role: "user", content: "question" }, {
{ role: "assistant",
role: "assistant", content: [{ type: "text", text: "answer with partial usage" }],
content: [{ type: "text", text: "answer with partial usage" }], usage: {
usage: { output: 3,
output: 3, cache_read_input_tokens: 9,
cache_read_input_tokens: 9, },
}, },
}, ]),
]); );
const result = await sanitizeOpenAIHistory(messages);
const assistant = result.find((message) => message.role === "assistant") as
| (AgentMessage & { usage?: unknown })
| undefined;
expect(assistant?.usage).toEqual({ expect(assistant?.usage).toEqual({
input: 0, input: 0,
@ -406,31 +404,26 @@ describe("sanitizeSessionHistory", () => {
}); });
it("preserves existing usage cost while normalizing token fields", async () => { it("preserves existing usage cost while normalizing token fields", async () => {
vi.mocked(mockedHelpers.isGoogleModelApi).mockReturnValue(false); const assistant = await getSingleAssistantUsage(
castAgentMessages([
const messages = castAgentMessages([ { role: "user", content: "question" },
{ role: "user", content: "question" }, {
{ role: "assistant",
role: "assistant", content: [{ type: "text", text: "answer with partial usage and cost" }],
content: [{ type: "text", text: "answer with partial usage and cost" }], usage: {
usage: { output: 3,
output: 3, cache_read_input_tokens: 9,
cache_read_input_tokens: 9, cost: {
cost: { input: 1.25,
input: 1.25, output: 2.5,
output: 2.5, cacheRead: 0.25,
cacheRead: 0.25, cacheWrite: 0,
cacheWrite: 0, total: 4,
total: 4, },
}, },
}, },
}, ]),
]); );
const result = await sanitizeOpenAIHistory(messages);
const assistant = result.find((message) => message.role === "assistant") as
| (AgentMessage & { usage?: unknown })
| undefined;
expect(assistant?.usage).toEqual({ expect(assistant?.usage).toEqual({
...makeZeroUsageSnapshot(), ...makeZeroUsageSnapshot(),
@ -450,27 +443,22 @@ describe("sanitizeSessionHistory", () => {
}); });
it("preserves unknown cost when token fields already match", async () => { it("preserves unknown cost when token fields already match", async () => {
vi.mocked(mockedHelpers.isGoogleModelApi).mockReturnValue(false); const assistant = await getSingleAssistantUsage(
castAgentMessages([
const messages = castAgentMessages([ { role: "user", content: "question" },
{ role: "user", content: "question" }, {
{ role: "assistant",
role: "assistant", content: [{ type: "text", text: "answer with complete numeric usage but no cost" }],
content: [{ type: "text", text: "answer with complete numeric usage but no cost" }], usage: {
usage: { input: 1,
input: 1, output: 2,
output: 2, cacheRead: 3,
cacheRead: 3, cacheWrite: 4,
cacheWrite: 4, totalTokens: 10,
totalTokens: 10, },
}, },
}, ]),
]); );
const result = await sanitizeOpenAIHistory(messages);
const assistant = result.find((message) => message.role === "assistant") as
| (AgentMessage & { usage?: unknown })
| undefined;
expect(assistant?.usage).toEqual({ expect(assistant?.usage).toEqual({
input: 1, input: 1,