Commit 4ee5d35
Changed files (3)
exe/elelem
@@ -2,99 +2,83 @@
# frozen_string_literal: true
require "elelem"
+require "optparse"
Signal.trap("INT") { exit 1 }
-MODELS = {
- "ollama" => "gpt-oss:latest",
- "anthropic" => "claude-opus-4-5-20250514",
- "vertex" => "claude-opus-4-5@20251101",
- "openai" => "gpt-4o",
-}.freeze
+class App
+ MODELS = {
+ "ollama" => "gpt-oss:latest",
+ "anthropic" => "claude-opus-4-5-20250514",
+ "vertex" => "claude-opus-4-5@20251101",
+ "openai" => "gpt-4o"
+ }.freeze
-PROVIDERS = {
- "ollama" => -> (model) { Elelem::Net::Ollama.new(model: model || MODELS["ollama"], host: ENV.fetch("OLLAMA_HOST", "localhost:11434")) },
- "anthropic" => -> (model) { Elelem::Net::Claude.anthropic(model: model || MODELS["anthropic"], api_key: ENV.fetch("ANTHROPIC_API_KEY")) },
- "vertex" => -> (model) { Elelem::Net::Claude.vertex(model: model || MODELS["vertex"], project: ENV.fetch("GOOGLE_CLOUD_PROJECT"), region: ENV.fetch("GOOGLE_CLOUD_REGION", "us-east5")) },
- "openai" => -> (model) { Elelem::Net::OpenAI.new(model: model || MODELS["openai"], api_key: ENV.fetch("OPENAI_API_KEY")) }
-}.freeze
+ PROVIDERS = {
+ "ollama" => ->(model) { Elelem::Net::Ollama.new(model: model, host: ENV.fetch("OLLAMA_HOST", "localhost:11434")) },
+ "anthropic" => ->(model) { Elelem::Net::Claude.anthropic(model: model, api_key: ENV.fetch("ANTHROPIC_API_KEY")) },
+ "vertex" => ->(model) { Elelem::Net::Claude.vertex(model: model, project: ENV.fetch("GOOGLE_CLOUD_PROJECT"), region: ENV.fetch("GOOGLE_CLOUD_REGION", "us-east5")) },
+ "openai" => ->(model) { Elelem::Net::OpenAI.new(model: model, api_key: ENV.fetch("OPENAI_API_KEY")) }
+ }.freeze
-def parse_args(args)
- opts = { provider: "ollama", model: nil, command: nil, args: [] }
+ def initialize(args)
+ @provider = "ollama"
+ @model = nil
+ @args = parse(args)
+ end
- while args.any?
- case args.first
- when "-p", "--provider"
- args.shift
- opts[:provider] = args.shift
- when "-m", "--model"
- args.shift
- opts[:model] = args.shift
- when "chat", "ask", "pipe", "files", "init", "help"
- opts[:command] = args.shift
- else
- opts[:args] << args.shift
- end
+ def run
+ command = @args.shift || "chat"
+ send(command.tr("-", "_"))
+ rescue NoMethodError
+ abort "Unknown command: #{command}"
end
- opts[:command] ||= "chat"
- opts
-end
+ private
-def help
- puts <<~HELP
- elelem - Ruby coding agent
+ def parse(args)
+ @parser = OptionParser.new do |o|
+ o.banner = "Usage: elelem [command] [options] [args]"
+ o.separator "\nCommands: chat, ask, pipe, files, help"
+ o.separator "\nOptions:"
+ o.on("-p", "--provider NAME", "ollama, anthropic, vertex, openai") { |p| @provider = p }
+ o.on("-m", "--model NAME", "Override default model") { |m| @model = m }
+ o.on("-h", "--help") { puts o; exit }
+ end
+ @parser.parse!(args)
+ end
- Usage: elelem [command] [options] [args]
+ def help
+ puts @parser
+ end
- Commands:
- chat Interactive REPL (default)
- ask <question> One-shot query
- pipe <prompt> Process stdin with prompt
- files Output files as XML (stdin or git ls-files)
- init Create .elelem/plugins/ directory
+ def client
+ model = @model || MODELS.fetch(@provider)
+ PROVIDERS.fetch(@provider).call(model)
+ end
- Options:
- -p, --provider ollama, anthropic, vertex, openai (default: ollama)
- -m, --model Override default model for provider
+ def chat = Elelem.start(client)
- Examples:
- elelem # Ollama chat
- elelem -p anthropic # Anthropic chat
- elelem ask "what is 2+2" # One-shot with Ollama
- elelem -p vertex ask "explain this" # One-shot with Vertex
- echo "code" | elelem pipe "review" # Pipe stdin
- HELP
-end
+ def ask
+ abort "Usage: elelem ask <question>" if @args.empty?
+ puts Elelem.ask(client, @args.join(" "))
+ end
-opts = parse_args(ARGV.dup)
+ def pipe
+ abort "Usage: elelem pipe <prompt>" if @args.empty?
+ puts Elelem.pipe(client, $stdin.read, @args.join(" "))
+ end
-case opts[:command]
-when "help"
- help
-when "chat"
- client = PROVIDERS.fetch(opts[:provider]).call(opts[:model])
- Elelem.start(client)
-when "ask"
- prompt = opts[:args].join(" ")
- abort "Usage: elelem ask <question>" if prompt.empty?
- client = PROVIDERS.fetch(opts[:provider]).call(opts[:model])
- puts Elelem.ask(client, prompt)
-when "pipe"
- instruction = opts[:args].join(" ")
- abort "Usage: elelem pipe <prompt>" if instruction.empty?
- client = PROVIDERS.fetch(opts[:provider]).call(opts[:model])
- puts Elelem.pipe(client, $stdin.read, instruction)
-when "files"
- files = $stdin.stat.pipe? ? $stdin.readlines : `git ls-files`.lines
- puts "<documents>"
- files.each_with_index do |line, i|
- path = line.strip
- next if path.empty? || !File.file?(path)
- puts %Q{<document index="#{i + 1}">}
- puts %Q{<source>#{path}</source>}
- puts %Q{<content><![CDATA[#{File.read(path)}]]></content>}
- puts "</document>"
+ def files
+ files = $stdin.stat.pipe? ? $stdin.readlines : `git ls-files`.lines
+ puts "<documents>"
+ files.each_with_index do |line, i|
+ path = line.strip
+ next if path.empty? || !File.file?(path)
+ puts %Q{<document index="#{i + 1}"><source>#{path}</source><content><![CDATA[#{File.read(path)}]]></content></document>}
+ end
+ puts "</documents>"
end
- puts "</documents>"
end
+
+App.new(ARGV).run
elelem.gemspec
@@ -48,6 +48,7 @@ Gem::Specification.new do |spec|
spec.add_dependency "json", "~> 2.0"
spec.add_dependency "net-hippie", "~> 1.0"
spec.add_dependency "open3", "~> 0.1"
+ spec.add_dependency "optparse", "~> 0.1"
spec.add_dependency "pathname", "~> 0.1"
spec.add_dependency "reline", "~> 0.6"
spec.add_dependency "stringio", "~> 3.0"
Gemfile.lock
@@ -7,6 +7,7 @@ PATH
json (~> 2.0)
net-hippie (~> 1.0)
open3 (~> 0.1)
+ optparse (~> 0.1)
pathname (~> 0.1)
reline (~> 0.6)
stringio (~> 3.0)
@@ -38,6 +39,7 @@ GEM
uri (>= 0.11.1)
open3 (0.2.1)
openssl (3.3.2)
+ optparse (0.8.1)
pathname (0.4.0)
pp (0.6.3)
prettyprint