Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ Only write entries that are worth mentioning to users.

## Unreleased

- Core: Pass session ID as `user_id` metadata to Anthropic API

## 1.17.0 (2026-03-03)

- Core: Add `/export` command to export current session context (messages, metadata) to a Markdown file, and `/import` command to import context from a file or another session ID into the current session
Expand Down
2 changes: 2 additions & 0 deletions docs/en/release-notes/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ This page documents the changes in each Kimi Code CLI release.

## Unreleased

- Core: Pass session ID as `user_id` metadata to Anthropic API

## 1.17.0 (2026-03-03)

- Core: Add `/export` command to export current session context (messages, metadata) to a Markdown file, and `/import` command to import context from a file or another session ID into the current session
Expand Down
2 changes: 2 additions & 0 deletions docs/zh/release-notes/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

## 未发布

- Core:将会话 ID 作为 `user_id` 元数据传递给 Anthropic API

## 1.17.0 (2026-03-03)

- Core:新增 `/export` 命令,支持将当前会话上下文(消息、元数据)导出为 Markdown 文件;新增 `/import` 命令,支持从文件或其他会话 ID 导入上下文到当前会话
Expand Down
2 changes: 2 additions & 0 deletions packages/kosong/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

## Unreleased

- Anthropic: Support optional `metadata` parameter in `Anthropic` chat provider for passing metadata (e.g., `user_id`) to the API

## 0.43.0 (2026-02-24)

- Add `RetryableChatProvider` protocol for providers that can recover from retryable transport errors
Expand Down
4 changes: 4 additions & 0 deletions packages/kosong/src/kosong/contrib/chat_provider/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
MessageDeltaUsage,
MessageParam,
MessageStartEvent,
MetadataParam,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawMessageStreamEvent,
Expand Down Expand Up @@ -127,12 +128,14 @@ def __init__(
tool_message_conversion: ToolMessageConversion | None = None,
# Must provide a max_tokens. Can be overridden by .with_generation_kwargs()
default_max_tokens: int,
metadata: MetadataParam | None = None,
**client_kwargs: Any,
):
self._model = model
self._stream = stream
self._client = AsyncAnthropic(api_key=api_key, base_url=base_url, **client_kwargs)
self._tool_message_conversion: ToolMessageConversion | None = tool_message_conversion
self._metadata = metadata
self._generation_kwargs: Anthropic.GenerationKwargs = {
"max_tokens": default_max_tokens,
"beta_features": ["interleaved-thinking-2025-05-14"],
Expand Down Expand Up @@ -222,6 +225,7 @@ async def generate(
tools=tools_,
stream=self._stream,
extra_headers=extra_headers,
metadata=self._metadata if self._metadata is not None else omit,
**generation_kwargs,
Comment on lines 225 to 229
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New behavior: when metadata is set it will be forwarded into the Anthropic messages.create request, but the snapshot tests for the Anthropic provider don’t currently assert that metadata is included/omitted correctly. Add a test case that constructs Anthropic(metadata={"user_id": "..."}), calls generate(), and asserts the request body includes the expected metadata (and optionally another case where metadata is omitted when None).

Copilot uses AI. Check for mistakes.
)
return AnthropicStreamedMessage(response)
Expand Down
35 changes: 35 additions & 0 deletions packages/kosong/tests/api_snapshot_tests/test_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,6 +542,41 @@ async def test_anthropic_opus_46_thinking_off():
assert body["thinking"] == snapshot({"type": "disabled"})


async def test_anthropic_metadata():
"""Metadata should be forwarded to the Anthropic API request."""
with respx.mock(base_url="https://api.anthropic.com") as mock:
mock.post("/v1/messages").mock(return_value=Response(200, json=make_anthropic_response()))
provider = Anthropic(
model="claude-sonnet-4-20250514",
api_key="test-key",
default_max_tokens=1024,
stream=False,
metadata={"user_id": "test-session-id"},
)
stream = await provider.generate("", [], [Message(role="user", content="Hi")])
async for _ in stream:
pass
body = json.loads(mock.calls.last.request.content.decode())
assert body["metadata"] == snapshot({"user_id": "test-session-id"})


async def test_anthropic_metadata_omitted_when_none():
"""Metadata should not be included in the request when not provided."""
with respx.mock(base_url="https://api.anthropic.com") as mock:
mock.post("/v1/messages").mock(return_value=Response(200, json=make_anthropic_response()))
provider = Anthropic(
model="claude-sonnet-4-20250514",
api_key="test-key",
default_max_tokens=1024,
stream=False,
)
stream = await provider.generate("", [], [Message(role="user", content="Hi")])
async for _ in stream:
pass
body = json.loads(mock.calls.last.request.content.decode())
assert "metadata" not in body


async def test_anthropic_opus_46_thinking_effort_property():
"""thinking_effort should return 'high' for adaptive thinking config."""
provider = Anthropic(
Expand Down
1 change: 1 addition & 0 deletions src/kimi_cli/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ def create_llm(
base_url=provider.base_url,
api_key=resolved_api_key,
default_max_tokens=50000,
metadata={"user_id": session_id} if session_id else None,
)
Comment on lines 169 to 173
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

create_llm now conditionally passes Anthropic metadata derived from session_id, but tests/core/test_create_llm.py currently has no coverage for the Anthropic branch (or the session_id→metadata wiring). Add a unit test that calls create_llm(..., session_id="...") with an anthropic provider and asserts the constructed provider carries the expected metadata (or that its outgoing request includes it, if that’s the chosen assertion style).

Copilot uses AI. Check for mistakes.
case "google_genai" | "gemini":
from kosong.contrib.chat_provider.google_genai import GoogleGenAI
Expand Down
40 changes: 40 additions & 0 deletions tests/core/test_create_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,46 @@ def test_create_llm_echo_provider():
assert llm.max_context_size == 1234


def test_create_llm_anthropic_with_session_id():
from kosong.contrib.chat_provider.anthropic import Anthropic

provider = LLMProvider(
type="anthropic",
base_url="https://api.anthropic.com",
api_key=SecretStr("test-key"),
)
model = LLMModel(
provider="anthropic",
model="claude-sonnet-4-20250514",
max_context_size=200000,
)

llm = create_llm(provider, model, session_id="sess-abc-123")
assert llm is not None
assert isinstance(llm.chat_provider, Anthropic)
assert llm.chat_provider._metadata == snapshot({"user_id": "sess-abc-123"})


def test_create_llm_anthropic_without_session_id():
from kosong.contrib.chat_provider.anthropic import Anthropic

provider = LLMProvider(
type="anthropic",
base_url="https://api.anthropic.com",
api_key=SecretStr("test-key"),
)
model = LLMModel(
provider="anthropic",
model="claude-sonnet-4-20250514",
max_context_size=200000,
)

llm = create_llm(provider, model)
assert llm is not None
assert isinstance(llm.chat_provider, Anthropic)
assert llm.chat_provider._metadata is None


def test_create_llm_requires_base_url_for_kimi():
provider = LLMProvider(type="kimi", base_url="", api_key=SecretStr("test-key"))
model = LLMModel(provider="kimi", model="kimi-base", max_context_size=4096)
Expand Down
Loading