Commit f7ae62d

mo khan <mo@mokhan.ca>
2025-10-15 15:53:25
feat: add tool/function calling for ollama provider tag: v0.4.0
1 parent f690e76
lib/net/llm/ollama.rb
@@ -11,9 +11,10 @@ module Net
         @http = http
       end
 
-      def chat(messages, &block)
+      def chat(messages, tools = [], &block)
         url = build_url("/api/chat")
         payload = { model: model, messages: messages, stream: block_given? }
+        payload[:tools] = tools unless tools.empty?
 
         if block_given?
           stream_request(url, payload, &block)
lib/net/llm/version.rb
@@ -2,6 +2,6 @@
 
 module Net
   module Llm
-    VERSION = "0.3.1"
+    VERSION = "0.4.0"
   end
 end
spec/net/llm/ollama_spec.rb
@@ -61,6 +61,52 @@ RSpec.describe Net::Llm::Ollama do
         expect(result["code"]).to eq("500")
         expect(result["body"]).to eq("Server error")
       end
+
+      it "includes tools in request when provided" do
+        tools = [{
+          type: "function",
+          function: {
+            name: "get_weather",
+            description: "Get weather for a city",
+            parameters: {
+              type: "object",
+              properties: {
+                city: { type: "string" }
+              },
+              required: ["city"]
+            }
+          }
+        }]
+        response_body = { message: { content: "Weather data", tool_calls: [] }, done: true }.to_json
+
+        stub_request(:post, "http://localhost:11434/api/chat")
+          .with(
+            body: hash_including(
+              model: "llama2",
+              messages: messages,
+              tools: tools,
+              stream: false
+            )
+          )
+          .to_return(status: 200, body: response_body)
+
+        result = client.chat(messages, tools)
+        expect(result["message"]["content"]).to eq("Weather data")
+      end
+
+      it "omits tools from request when empty array provided" do
+        stub_request(:post, "http://localhost:11434/api/chat")
+          .with { |req|
+            body = JSON.parse(req.body)
+            body["model"] == "llama2" &&
+            body["stream"] == false &&
+            !body.key?("tools")
+          }
+          .to_return(status: 200, body: { message: { content: "Hi" }, done: true }.to_json)
+
+        result = client.chat(messages, [])
+        expect(result["message"]["content"]).to eq("Hi")
+      end
     end
 
     context "with streaming" do
@@ -84,6 +130,44 @@ RSpec.describe Net::Llm::Ollama do
         expect(results[1]["message"]["content"]).to eq("i")
         expect(results[2]["done"]).to eq(true)
       end
+
+      it "includes tools in streaming request when provided" do
+        tools = [{
+          type: "function",
+          function: {
+            name: "get_weather",
+            description: "Get weather for a city",
+            parameters: {
+              type: "object",
+              properties: {
+                city: { type: "string" }
+              }
+            }
+          }
+        }]
+        chunks = [
+          { message: { content: "Checking weather" }, done: false }.to_json,
+          { message: { content: "" }, done: true }.to_json
+        ]
+        response_body = chunks.join("\n") + "\n"
+
+        stub_request(:post, "http://localhost:11434/api/chat")
+          .with(
+            body: hash_including(
+              model: "llama2",
+              messages: messages,
+              tools: tools,
+              stream: true
+            )
+          )
+          .to_return(status: 200, body: response_body)
+
+        results = []
+        client.chat(messages, tools) { |chunk| results << chunk }
+
+        expect(results.size).to eq(2)
+        expect(results[0]["message"]["content"]).to eq("Checking weather")
+      end
     end
   end
 
CHANGELOG.md
@@ -1,5 +1,16 @@
 ## [Unreleased]
 
+## [0.4.0] - 2025-10-15
+### Added
+- Added tool/function calling support to Ollama provider
+- Ollama `chat` method now accepts optional `tools` parameter matching OpenAI signature
+- Tools work in both streaming and non-streaming modes
+- Added comprehensive test coverage for tool functionality
+
+### Changed
+- Updated README with Ollama tools example
+- Updated API coverage documentation
+
 ## [0.3.1] - 2025-10-08
 ### Fixed
 - Added missing net-hippie runtime dependency to gemspec
Gemfile.lock
@@ -1,7 +1,7 @@
 PATH
   remote: .
   specs:
-    net-llm (0.3.1)
+    net-llm (0.4.0)
       json (~> 2.0)
       net-hippie (~> 1.0)
       uri (~> 1.0)
README.md
@@ -87,6 +87,29 @@ response = client.chat(messages)
 puts response['message']['content']
 ```
 
+#### With Tools
+
+```ruby
+tools = [
+  {
+    type: 'function',
+    function: {
+      name: 'get_weather',
+      description: 'Get current weather',
+      parameters: {
+        type: 'object',
+        properties: {
+          location: { type: 'string' }
+        },
+        required: ['location']
+      }
+    }
+  }
+]
+
+response = client.chat(messages, tools)
+```
+
 #### Streaming
 
 ```ruby
@@ -195,7 +218,7 @@ Streaming methods still raise exceptions on HTTP errors.
 - `/v1/embeddings`
 
 ### Ollama
-- `/api/chat` (with streaming)
+- `/api/chat` (with streaming and tools)
 - `/api/generate` (with streaming)
 - `/api/embed`
 - `/api/tags`