Commit d87a8b6
Changed files (2)
lib
elelem
lib/elelem/agent.rb
@@ -74,6 +74,21 @@ module Elelem
""
end
+ def format_tool_calls_for_api(tool_calls)
+ tool_calls.map do |tc|
+ args = openai_client? ? JSON.dump(tc[:arguments]) : tc[:arguments]
+ {
+ id: tc[:id],
+ type: "function",
+ function: { name: tc[:name], arguments: args }
+ }
+ end
+ end
+
+ def openai_client?
+ client.is_a?(Net::Llm::OpenAI)
+ end
+
def execute_turn(messages, tools:)
turn_context = []
@@ -82,28 +97,30 @@ module Elelem
tool_calls = []
print "Thinking> "
- client.chat(messages + turn_context, tools) do |chunk|
- msg = chunk["message"]
- if msg
- print msg["thinking"] if msg["thinking"]
- content += msg["content"] if msg["content"]
-
- tool_calls += msg["tool_calls"] if msg["tool_calls"]
+ client.fetch(messages + turn_context, tools) do |chunk|
+ case chunk[:type]
+ when :delta
+ print chunk[:thinking] if chunk[:thinking]
+ content += chunk[:content] if chunk[:content]
+ when :complete
+ content = chunk[:content] if chunk[:content]
+ tool_calls = chunk[:tool_calls] || []
end
end
- puts "\nAssistant> #{content}" unless content.empty?
- turn_context << { role: "assistant", content: content, tool_calls: tool_calls }.compact
+ puts "\nAssistant> #{content}" unless content.to_s.empty?
+ api_tool_calls = tool_calls.any? ? format_tool_calls_for_api(tool_calls) : nil
+ turn_context << { role: "assistant", content: content, tool_calls: api_tool_calls }.compact
if tool_calls.any?
tool_calls.each do |call|
- name = call.dig("function", "name")
- args = call.dig("function", "arguments")
+ name = call[:name]
+ args = call[:arguments]
puts "\nTool> #{name}(#{args})"
result = toolbox.run_tool(name, args)
puts format_tool_call_result(result)
- turn_context << { role: "tool", content: JSON.dump(result) }
+ turn_context << { role: "tool", tool_call_id: call[:id], content: JSON.dump(result) }
end
tool_calls = []
lib/elelem/application.rb
@@ -2,27 +2,40 @@
module Elelem
class Application < Thor
+ PROVIDERS = %w[ollama anthropic openai vertex-ai].freeze
+
desc "chat", "Start the REPL"
- method_option :host,
- aliases: "--host",
+ method_option :provider,
+ aliases: "-p",
type: :string,
- desc: "Ollama host",
- default: ENV.fetch("OLLAMA_HOST", "localhost:11434")
+ desc: "LLM provider (#{PROVIDERS.join(', ')})",
+ default: ENV.fetch("ELELEM_PROVIDER", "ollama")
method_option :model,
- aliases: "--model",
+ aliases: "-m",
type: :string,
- desc: "Ollama model",
- default: ENV.fetch("OLLAMA_MODEL", "gpt-oss")
+ desc: "Model name (uses provider default if not specified)"
def chat(*)
- client = Net::Llm::Ollama.new(
- host: options[:host],
- model: options[:model],
- )
- say "Agent (#{options[:model]})", :green
+ client = build_client
+ say "Agent (#{options[:provider]}/#{client.model})", :green
agent = Agent.new(client, Toolbox.new)
agent.repl
end
+ private
+
+ def build_client
+ model_opts = options[:model] ? { model: options[:model] } : {}
+
+ case options[:provider]
+ when "ollama" then Net::Llm::Ollama.new(**model_opts)
+ when "anthropic" then Net::Llm::Anthropic.new(**model_opts)
+ when "openai" then Net::Llm::OpenAI.new(**model_opts)
+ when "vertex-ai" then Net::Llm::VertexAI.new(**model_opts)
+ else
+ raise Error, "Unknown provider: #{options[:provider]}. Use: #{PROVIDERS.join(', ')}"
+ end
+ end
+
desc "files", "Generate CXML of the files"
def files
puts '<documents>'