Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 12 additions & 10 deletions py/plugins/anthropic/src/genkit/plugins/anthropic/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@ async def generate(self, request: GenerateRequest, ctx: ActionRunContext | None

if streaming:
response = await self._generate_streaming(params, ctx)
content = []
else:
response = await self.client.messages.create(**params)
content = self._to_genkit_content(response.content)

content = self._to_genkit_content(response.content)

response_message = Message(role=Role.MODEL, content=content)
basic_usage = get_basic_usage_stats(input_=request.messages, response=response_message)
Expand Down Expand Up @@ -155,7 +155,7 @@ async def _generate_streaming(self, params: dict[str, Any], ctx: ActionRunContex
GenerateResponseChunk(
role=Role.MODEL,
index=0,
content=[TextPart(text=chunk.delta.text)],
content=[Part(root=TextPart(text=chunk.delta.text))],
)
)
return await stream.get_final_message()
Expand Down Expand Up @@ -223,15 +223,17 @@ def _to_genkit_content(self, content_blocks: list) -> list[Part]:
parts = []
for block in content_blocks:
if block.type == 'text':
parts.append(TextPart(text=block.text))
parts.append(Part(root=TextPart(text=block.text)))
elif block.type == 'tool_use':
parts.append(
ToolRequestPart(
tool_request={
'ref': block.id,
'name': block.name,
'input': block.input,
}
Part(
root=ToolRequestPart(
tool_request={
'ref': block.id,
'name': block.name,
'input': block.input,
}
)
)
)
return parts
13 changes: 11 additions & 2 deletions py/plugins/anthropic/tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,9 +180,10 @@ def test_to_anthropic_messages():
class MockStreamManager:
"""Mock stream manager for testing streaming."""

def __init__(self, chunks):
def __init__(self, chunks, final_content=None):
self.chunks = chunks
self.final_message = MagicMock()
self.final_message.content = final_content if final_content else []
self.final_message.usage = MagicMock(input_tokens=10, output_tokens=20)
self.final_message.stop_reason = 'end_turn'

Expand Down Expand Up @@ -217,7 +218,8 @@ async def test_streaming_generation():
MagicMock(type='content_block_delta', delta=MagicMock(text='!')),
]

mock_stream = MockStreamManager(chunks)
final_content = [MagicMock(type='text', text='Hello world!')]
mock_stream = MockStreamManager(chunks, final_content=final_content)
mock_client.messages.stream.return_value = mock_stream

model = AnthropicModel(model_name='claude-sonnet-4', client=mock_client)
Expand Down Expand Up @@ -248,3 +250,10 @@ def send_chunk(chunk: GenerateResponseChunk):

assert response.usage.input_tokens == 10
assert response.usage.output_tokens == 20

# Verify final response content is populated
assert len(response.message.content) == 1
final_part = response.message.content[0]
assert isinstance(final_part, Part)
assert isinstance(final_part.root, TextPart)
assert final_part.root.text == 'Hello world!'
Loading