Commit 57f4f6f
Changed files (6)
lib
net
llm
spec
fixtures
Net_Llm_Anthropic
_messages
with_streaming
with_system_prompt
with_tools
POST_
without_streaming
POST_
v1
net
lib/net/llm/anthropic.rb
@@ -5,7 +5,7 @@ module Net
class Anthropic
attr_reader :api_key, :model, :http
- def initialize(api_key:, model: "claude-3-5-sonnet-20241022", http: Net::Llm.http)
+ def initialize(api_key:, model: "claude-sonnet-4-20250514", http: Net::Llm.http)
@api_key = api_key
@model = model
@http = http
spec/fixtures/Net_Llm_Anthropic/_messages/with_streaming/yields_SSE_events_to_the_block.yml
@@ -0,0 +1,71 @@
+---
+http_interactions:
+- request:
+ method: post
+ uri: https://api.anthropic.com/v1/messages
+ body:
+ encoding: UTF-8
+ string: '{"model":"claude-sonnet-4-20250514","max_tokens":1024,"messages":[{"role":"user","content":"Hello"}],"stream":true}'
+ headers:
+ Accept:
+ - application/json
+ Content-Type:
+ - application/json
+ X-Api-Key:
+ - "<ANTHROPIC_API_KEY>"
+ Accept-Encoding:
+ - gzip;q=1.0,deflate;q=0.6,identity;q=0.3
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Content-Type:
+ - text/event-stream; charset=utf-8
+ Transfer-Encoding:
+ - chunked
+ Connection:
+ - keep-alive
+ Cache-Control:
+ - no-cache
+ body:
+ encoding: UTF-8
+ string: |+
+ event: message_start
+ data: {"type":"message_start","message":{"model":"claude-sonnet-4-20250514","id":"msg_01WXdwr9yUcs9pzoYfhZ2d4Q","type":"message","role":"assistant","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":3,"service_tier":"standard"}} }
+
+ event: content_block_start
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} }
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello! It"} }
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"'s nice to meet you. How"} }
+
+ event: ping
+ data: {"type": "ping"}
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" are"} }
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" you doing today?"} }
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Is"} }
+
+ event: content_block_delta
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" there anything I can help you with?"} }
+
+ event: content_block_stop
+ data: {"type":"content_block_stop","index":0 }
+
+ event: message_delta
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":27} }
+
+ event: message_stop
+ data: {"type":"message_stop" }
+
+ recorded_at: Wed, 07 Jan 2026 18:42:07 GMT
+recorded_with: VCR 6.3.1
spec/fixtures/Net_Llm_Anthropic/_messages/with_system_prompt/includes_system_in_request.yml
@@ -0,0 +1,33 @@
+---
+http_interactions:
+- request:
+ method: post
+ uri: https://api.anthropic.com/v1/messages
+ body:
+ encoding: UTF-8
+ string: '{"model":"claude-sonnet-4-20250514","max_tokens":1024,"messages":[{"role":"user","content":"Hello"}],"stream":false,"system":"You are a helpful assistant."}'
+ headers:
+ Accept:
+ - application/json
+ Content-Type:
+ - application/json
+ X-Api-Key:
+ - "<ANTHROPIC_API_KEY>"
+ Accept-Encoding:
+ - gzip;q=1.0,deflate;q=0.6,identity;q=0.3
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Content-Type:
+ - application/json
+ Transfer-Encoding:
+ - chunked
+ Connection:
+ - keep-alive
+ body:
+ encoding: ASCII-8BIT
+ string: '{"model":"claude-sonnet-4-20250514","id":"msg_01H4j7QDNkiHvcQ68xEZszrZ","type":"message","role":"assistant","content":[{"type":"text","text":"Hello! How can I help you today?"}],"stop_reason":"end_turn","stop_sequence":null,"usage":{"input_tokens":14,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":12,"service_tier":"standard"}}'
+ recorded_at: Wed, 07 Jan 2026 18:42:10 GMT
+recorded_with: VCR 6.3.1
spec/fixtures/Net_Llm_Anthropic/_messages/with_tools/POST_/v1/messages_with_tools.yml
@@ -0,0 +1,31 @@
+---
+http_interactions:
+- request:
+ method: post
+ uri: https://api.anthropic.com/v1/messages
+ body:
+ encoding: UTF-8
+ string: '{"model":"claude-sonnet-4-20250514","max_tokens":1024,"messages":[{"role":"user","content":"What is the weather in Tokyo?"}],"stream":false,"tools":[{"name":"get_weather","description":"Get weather for a city","input_schema":{"type":"object","properties":{"city":{"type":"string"}},"required":["city"]}}]}'
+ headers:
+ Accept:
+ - application/json
+ Content-Type:
+ - application/json
+ X-Api-Key:
+ - "<ANTHROPIC_API_KEY>"
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Content-Type:
+ - application/json
+ Transfer-Encoding:
+ - chunked
+ Connection:
+ - keep-alive
+ body:
+ encoding: ASCII-8BIT
+ string: '{"model":"claude-sonnet-4-20250514","id":"msg_01JH9rAobgF5ViZHes8SHoJY","type":"message","role":"assistant","content":[{"type":"text","text":"I''ll get the current weather information for Tokyo."},{"type":"tool_use","id":"toolu_01LdH7dwMAqg6TnNwdw9RP4L","name":"get_weather","input":{"city":"Tokyo"}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":380,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":64,"service_tier":"standard"}}'
+ recorded_at: Wed, 07 Jan 2026 18:42:12 GMT
+recorded_with: VCR 6.3.1
spec/fixtures/Net_Llm_Anthropic/_messages/without_streaming/POST_/v1/messages.yml
@@ -0,0 +1,31 @@
+---
+http_interactions:
+- request:
+ method: post
+ uri: https://api.anthropic.com/v1/messages
+ body:
+ encoding: UTF-8
+ string: '{"model":"claude-sonnet-4-20250514","max_tokens":1024,"messages":[{"role":"user","content":"Hello"}],"stream":false}'
+ headers:
+ Accept:
+ - application/json
+ Content-Type:
+ - application/json
+ X-Api-Key:
+ - "<ANTHROPIC_API_KEY>"
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Content-Type:
+ - application/json
+ Transfer-Encoding:
+ - chunked
+ Connection:
+ - keep-alive
+ body:
+ encoding: ASCII-8BIT
+ string: '{"model":"claude-sonnet-4-20250514","id":"msg_01Hn3tV1qZeLqGqaBX7Z8xR8","type":"message","role":"assistant","content":[{"type":"text","text":"Hello! How are you doing today? Is there anything I can help you with?"}],"stop_reason":"end_turn","stop_sequence":null,"usage":{"input_tokens":8,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":20,"service_tier":"standard"}}'
+ recorded_at: Wed, 07 Jan 2026 18:42:05 GMT
+recorded_with: VCR 6.3.1
spec/net/llm/anthropic_spec.rb
@@ -1,115 +1,88 @@
# frozen_string_literal: true
RSpec.describe Net::Llm::Anthropic do
- let(:api_key) { "test-key" }
- let(:client) { described_class.new(api_key: api_key) }
+ subject(:client) do
+ described_class.new(
+ api_key: ENV.fetch("ANTHROPIC_API_KEY", "test-key")
+ )
+ end
describe "#initialize" do
it "sets default model" do
- expect(client.model).to eq("claude-3-5-sonnet-20241022")
+ expect(client.model).to eq("claude-sonnet-4-20250514")
end
it "allows custom model" do
- custom_client = described_class.new(api_key: api_key, model: "claude-3-opus-20240229")
- expect(custom_client.model).to eq("claude-3-opus-20240229")
+ custom = described_class.new(
+ api_key: ENV.fetch("ANTHROPIC_API_KEY", "test-key"),
+ model: "claude-3-opus-20240229"
+ )
+ expect(custom.model).to eq("claude-3-opus-20240229")
end
end
describe "#messages" do
let(:messages) { [{ role: "user", content: "Hello" }] }
- context "without streaming" do
- let(:response_body) do
- {
- id: "msg_123",
- type: "message",
- role: "assistant",
- content: [{ type: "text", text: "Hi there!" }],
- model: "claude-3-5-sonnet-20241022",
- stop_reason: "end_turn"
- }.to_json
- end
-
- it "makes a POST request to /v1/messages" do
- stub_request(:post, "https://api.anthropic.com/v1/messages")
- .with(
- headers: {
- "x-api-key" => api_key,
- "anthropic-version" => "2023-06-01",
- "Content-Type" => "application/json"
- },
- body: hash_including(
- model: "claude-3-5-sonnet-20241022",
- max_tokens: 1024,
- messages: messages,
- stream: false
- )
- )
- .to_return(status: 200, body: response_body)
-
+ context "without streaming", :vcr do
+ it "POST /v1/messages" do
result = client.messages(messages)
- expect(result["content"][0]["text"]).to eq("Hi there!")
+
+ expect(result["content"]).not_to be_empty
+ expect(result["role"]).to eq("assistant")
end
+ end
- it "includes system prompt when provided" do
- stub_request(:post, "https://api.anthropic.com/v1/messages")
- .with(body: hash_including(system: "You are helpful"))
- .to_return(status: 200, body: response_body)
+ context "with streaming", :vcr do
+ it "yields SSE events to the block" do
+ results = []
+ client.messages(messages) { |event| results << event }
- client.messages(messages, system: "You are helpful")
+ expect(results).not_to be_empty
+ expect(results.last["type"]).to eq("message_stop")
end
+ end
- it "includes tools when provided" do
- tools = [{ name: "get_weather", description: "Get weather" }]
+ context "with system prompt", :vcr do
+ it "includes system in request" do
+ result = client.messages(messages, system: "You are a helpful assistant.")
- stub_request(:post, "https://api.anthropic.com/v1/messages")
- .with(body: hash_including(tools: tools))
- .to_return(status: 200, body: response_body)
+ expect(result["content"]).not_to be_empty
+ end
+ end
- client.messages(messages, tools: tools)
+ context "with tools", :vcr do
+ let(:tools) do
+ [{
+ name: "get_weather",
+ description: "Get weather for a city",
+ input_schema: {
+ type: "object",
+ properties: { city: { type: "string" } },
+ required: ["city"]
+ }
+ }]
end
- it "allows custom max_tokens" do
- stub_request(:post, "https://api.anthropic.com/v1/messages")
- .with(body: hash_including(max_tokens: 2048))
- .to_return(status: 200, body: response_body)
+ it "POST /v1/messages with tools" do
+ messages = [{ role: "user", content: "What is the weather in Tokyo?" }]
+ result = client.messages(messages, tools: tools)
- client.messages(messages, max_tokens: 2048)
+ expect(result["content"]).not_to be_empty
+ expect(result["stop_reason"]).to eq("tool_use")
end
+ end
- it "raises on HTTP error" do
+ context "error handling" do
+ it "returns error hash on HTTP failure" do
+ bad_client = described_class.new(api_key: "invalid-key")
stub_request(:post, "https://api.anthropic.com/v1/messages")
.to_return(status: 401, body: "Unauthorized")
- result = client.messages(messages)
+ result = bad_client.messages(messages)
expect(result["code"]).to eq("401")
expect(result["body"]).to eq("Unauthorized")
end
end
-
- context "with streaming" do
- it "yields SSE events to the block" do
- sse_events = [
- "event: message_start\ndata: {\"type\":\"message_start\"}\n\n",
- "event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"delta\":{\"type\":\"text_delta\",\"text\":\"H\"}}\n\n",
- "event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"delta\":{\"type\":\"text_delta\",\"text\":\"i\"}}\n\n",
- "event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n"
- ]
- response_body = sse_events.join
-
- stub_request(:post, "https://api.anthropic.com/v1/messages")
- .with(body: hash_including(stream: true))
- .to_return(status: 200, body: response_body)
-
- results = []
- client.messages(messages) { |event| results << event }
-
- expect(results.size).to eq(4)
- expect(results[0]["type"]).to eq("message_start")
- expect(results[1]["delta"]["text"]).to eq("H")
- expect(results[2]["delta"]["text"]).to eq("i")
- expect(results[3]["type"]).to eq("message_stop")
- end
- end
end
end