Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@ exports[`runNonInteractive > should emit appropriate error event in streaming JS
"{"type":"init","timestamp":"<TIMESTAMP>","session_id":"test-session-id","model":"test-model"}
{"type":"message","timestamp":"<TIMESTAMP>","role":"user","content":"Loop test"}
{"type":"error","timestamp":"<TIMESTAMP>","severity":"warning","message":"Loop detected, stopping execution"}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0}}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0,"models":{}}}
"
`;

exports[`runNonInteractive > should emit appropriate error event in streaming JSON mode: 'max session turns' 1`] = `
"{"type":"init","timestamp":"<TIMESTAMP>","session_id":"test-session-id","model":"test-model"}
{"type":"message","timestamp":"<TIMESTAMP>","role":"user","content":"Max turns test"}
{"type":"error","timestamp":"<TIMESTAMP>","severity":"error","message":"Maximum session turns exceeded"}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0}}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0,"models":{}}}
"
`;

Expand All @@ -23,7 +23,7 @@ exports[`runNonInteractive > should emit appropriate events for streaming JSON o
{"type":"tool_use","timestamp":"<TIMESTAMP>","tool_name":"testTool","tool_id":"tool-1","parameters":{"arg1":"value1"}}
{"type":"tool_result","timestamp":"<TIMESTAMP>","tool_id":"tool-1","status":"success","output":"Tool executed successfully"}
{"type":"message","timestamp":"<TIMESTAMP>","role":"assistant","content":"Final answer","delta":true}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0}}
{"type":"result","timestamp":"<TIMESTAMP>","status":"success","stats":{"total_tokens":0,"input_tokens":0,"output_tokens":0,"cached":0,"input":0,"duration_ms":<DURATION>,"tool_calls":0,"models":{}}}
"
`;

Expand Down
1 change: 1 addition & 0 deletions packages/cli/src/utils/errors.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
input: 0,
duration_ms: 0,
tool_calls: 0,
models: {},
}),
})),
uiTelemetryService: {
Expand Down
39 changes: 39 additions & 0 deletions packages/core/src/output/stream-json-formatter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ describe('StreamJsonFormatter', () => {
input: 50,
duration_ms: 1200,
tool_calls: 2,
models: {},
},
};

Expand All @@ -180,6 +181,7 @@ describe('StreamJsonFormatter', () => {
input: 50,
duration_ms: 1200,
tool_calls: 0,
models: {},
},
};

Expand Down Expand Up @@ -304,6 +306,15 @@ describe('StreamJsonFormatter', () => {
input: 50,
duration_ms: 1200,
tool_calls: 2,
models: {
'gemini-2.0-flash': {
total_tokens: 80,
input_tokens: 50,
output_tokens: 30,
cached: 0,
input: 50,
},
},
});
});

Expand Down Expand Up @@ -347,6 +358,22 @@ describe('StreamJsonFormatter', () => {
input: 150,
duration_ms: 3000,
tool_calls: 5,
models: {
'gemini-pro': {
total_tokens: 80,
input_tokens: 50,
output_tokens: 30,
cached: 0,
input: 50,
},
'gemini-ultra': {
total_tokens: 170,
input_tokens: 100,
output_tokens: 70,
cached: 0,
input: 100,
},
},
});
});

Expand Down Expand Up @@ -376,6 +403,15 @@ describe('StreamJsonFormatter', () => {
input: 20,
duration_ms: 1200,
tool_calls: 0,
models: {
'gemini-pro': {
total_tokens: 80,
input_tokens: 50,
output_tokens: 30,
cached: 30,
input: 20,
},
},
});
});

Expand All @@ -392,6 +428,7 @@ describe('StreamJsonFormatter', () => {
input: 0,
duration_ms: 100,
tool_calls: 0,
models: {},
});
});

Expand Down Expand Up @@ -521,6 +558,7 @@ describe('StreamJsonFormatter', () => {
input: 0,
duration_ms: 0,
tool_calls: 0,
models: {},
},
} as ResultEvent,
];
Expand All @@ -544,6 +582,7 @@ describe('StreamJsonFormatter', () => {
input: 50,
duration_ms: 1200,
tool_calls: 2,
models: {},
},
};

Expand Down
50 changes: 35 additions & 15 deletions packages/core/src/output/stream-json-formatter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
* SPDX-License-Identifier: Apache-2.0
*/

import type { JsonStreamEvent, StreamStats } from './types.js';
import type {
JsonStreamEvent,
ModelStreamStats,
StreamStats,
} from './types.js';
import type { SessionMetrics } from '../telemetry/uiTelemetry.js';

/**
Expand All @@ -31,7 +35,7 @@ export class StreamJsonFormatter {

/**
* Converts SessionMetrics to simplified StreamStats format.
* Aggregates token counts across all models.
* Includes per-model token breakdowns and aggregated totals.
* @param metrics - The session metrics from telemetry
* @param durationMs - The session duration in milliseconds
* @returns Simplified stats for streaming output
Expand All @@ -40,20 +44,35 @@ export class StreamJsonFormatter {
metrics: SessionMetrics,
durationMs: number,
): StreamStats {
let totalTokens = 0;
let inputTokens = 0;
let outputTokens = 0;
let cached = 0;
let input = 0;
const { totalTokens, inputTokens, outputTokens, cached, input, models } =
Object.entries(metrics.models).reduce(
(acc, [modelName, modelMetrics]) => {
const modelStats: ModelStreamStats = {
total_tokens: modelMetrics.tokens.total,
input_tokens: modelMetrics.tokens.prompt,
output_tokens: modelMetrics.tokens.candidates,
cached: modelMetrics.tokens.cached,
input: modelMetrics.tokens.input,
};

// Aggregate token counts across all models
for (const modelMetrics of Object.values(metrics.models)) {
totalTokens += modelMetrics.tokens.total;
inputTokens += modelMetrics.tokens.prompt;
outputTokens += modelMetrics.tokens.candidates;
cached += modelMetrics.tokens.cached;
input += modelMetrics.tokens.input;
}
acc.models[modelName] = modelStats;
acc.totalTokens += modelStats.total_tokens;
acc.inputTokens += modelStats.input_tokens;
acc.outputTokens += modelStats.output_tokens;
acc.cached += modelStats.cached;
acc.input += modelStats.input;

return acc;
},
{
totalTokens: 0,
inputTokens: 0,
outputTokens: 0,
cached: 0,
input: 0,
models: {} as Record<string, ModelStreamStats>,
},
);

return {
total_tokens: totalTokens,
Expand All @@ -63,6 +82,7 @@ export class StreamJsonFormatter {
input,
duration_ms: durationMs,
tool_calls: metrics.tools.totalCalls,
models,
};
}
}
9 changes: 9 additions & 0 deletions packages/core/src/output/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,14 @@ export interface ErrorEvent extends BaseJsonStreamEvent {
message: string;
}

export interface ModelStreamStats {
total_tokens: number;
input_tokens: number;
output_tokens: number;
cached: number;
input: number;
}

export interface StreamStats {
total_tokens: number;
input_tokens: number;
Expand All @@ -86,6 +94,7 @@ export interface StreamStats {
input: number;
duration_ms: number;
tool_calls: number;
models: Record<string, ModelStreamStats>;
}

export interface ResultEvent extends BaseJsonStreamEvent {
Expand Down
Loading