Skip to content

Anthropic plugin

The genkit-plugin-anthropic package provides access to Anthropic’s Claude models through the Genkit framework.

Terminal window
uv add genkit-plugin-anthropic
from genkit import Genkit
from genkit.plugins.anthropic import Anthropic, anthropic_name
ai = Genkit(
plugins=[Anthropic()],
model=anthropic_name('claude-3-5-haiku'),
)

Set the ANTHROPIC_API_KEY environment variable or pass it directly:

ai = Genkit(
plugins=[Anthropic(api_key='your-api-key')],
)
  • claude-3-5-haiku - Fast and efficient
  • claude-sonnet-4 - Balanced performance
  • claude-sonnet-4-5 - Latest with thinking/reasoning support
response = await ai.generate(
prompt='Explain quantum computing in simple terms.',
)
print(response.text)
from pydantic import BaseModel, Field
class Character(BaseModel):
name: str = Field(description='Character name')
backstory: str = Field(description='Character backstory')
abilities: list[str] = Field(description='List of abilities')
response = await ai.generate(
prompt='Generate a fantasy RPG character',
output_schema=Character,
)
print(response.output) # Character instance

Claude Sonnet 4.5 and later models support extended thinking:

response = await ai.generate(
model=anthropic_name('claude-sonnet-4-5'),
prompt='Solve this logic puzzle step by step...',
config={
'thinking': {'type': 'enabled', 'budget_tokens': 1024},
'max_output_tokens': 4096,
},
)
print(response.text)
from pydantic import BaseModel, Field
class WeatherInput(BaseModel):
location: str = Field(description='City name')
@ai.tool()
def get_weather(input: WeatherInput) -> str:
"""Get current weather for a location."""
return f'72°F and sunny in {input.location}'
response = await ai.generate(
prompt='What is the weather in San Francisco?',
tools=['get_weather'],
)
print(response.text)
from genkit import ActionRunContext
@ai.flow()
async def streaming_story(topic: str, ctx: ActionRunContext) -> str:
stream_response = ai.generate_stream(
prompt=f'Write a short story about {topic}',
)
async for chunk in stream_response.stream:
ctx.send_chunk(chunk.text)
return (await stream_response.response).text
from genkit import Part, TextPart, MediaPart, Media
response = await ai.generate(
prompt=[
Part(root=TextPart(text='Describe this image')),
Part(root=MediaPart(media=Media(
url='https://example.com/image.jpg',
content_type='image/jpeg',
))),
],
)
print(response.text)
from genkit import ModelConfig
response = await ai.generate(
prompt='Your prompt here',
config=ModelConfig(
temperature=0.7,
max_output_tokens=1000,
),
)