Commit 8558ee3
Changed files (5)
lib/elelem/version.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
module Elelem
- VERSION = "0.4.2"
+ VERSION = "0.5.0"
end
CHANGELOG.md
@@ -1,5 +1,18 @@
## [Unreleased]
+## [0.5.0] - 2025-01-07
+
+### Added
+- Multi-provider support: Ollama, Anthropic, OpenAI, and VertexAI
+- `--provider` CLI option to select LLM provider (default: ollama)
+- `--model` CLI option to override default model
+- Tool aliases (`bash` also accepts `exec`, `shell`, `command`, `terminal`, `run`)
+- Thinking text output for models that support extended thinking
+
+### Changed
+- Requires net-llm >= 0.5.0 with unified fetch interface
+- Updated gem description to reflect multi-provider support
+
## [0.4.2] - 2025-12-01
### Changed
elelem.gemspec
@@ -8,25 +8,17 @@ Gem::Specification.new do |spec|
spec.authors = ["mo khan"]
spec.email = ["mo@mokhan.ca"]
- spec.summary = "A REPL for Ollama."
- spec.description = "A REPL for Ollama."
- spec.homepage = "https://github.com/xlgmokha/elelem"
+ spec.summary = "A minimal coding agent for LLMs."
+ spec.description = "A minimal coding agent supporting Ollama, Anthropic, OpenAI, and VertexAI."
+ spec.homepage = "https://src.mokhan.ca/xlgmokha/elelem"
spec.license = "MIT"
spec.required_ruby_version = ">= 3.4.0"
spec.required_rubygems_version = ">= 3.3.11"
spec.metadata["allowed_push_host"] = "https://rubygems.org"
spec.metadata["homepage_uri"] = spec.homepage
- spec.metadata["source_code_uri"] = "https://github.com/xlgmokha/elelem"
- spec.metadata["changelog_uri"] = "https://github.com/xlgmokha/elelem/blob/main/CHANGELOG.md"
+ spec.metadata["source_code_uri"] = "https://src.mokhan.ca/xlgmokha/elelem"
+ spec.metadata["changelog_uri"] = "https://src.mokhan.ca/xlgmokha/elelem/blob/main/CHANGELOG.md.html"
- # Specify which files should be added to the gem when it is released.
- # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
- # gemspec = File.basename(__FILE__)
- # spec.files = IO.popen(%w[git ls-files -z], chdir: __dir__, err: IO::NULL) do |ls|
- # ls.readlines("\x0", chomp: true).reject do |f|
- # (f == gemspec) || f.start_with?(*%w[bin/ test/ spec/ features/ .git Gemfile])
- # end
- # end
spec.files = [
"CHANGELOG.md",
"LICENSE.txt",
@@ -46,16 +38,16 @@ Gem::Specification.new do |spec|
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
- spec.add_dependency "erb"
- spec.add_dependency "fileutils"
- spec.add_dependency "json"
- spec.add_dependency "json-schema"
- spec.add_dependency "logger"
- spec.add_dependency "net-llm"
- spec.add_dependency "open3"
- spec.add_dependency "pathname"
- spec.add_dependency "reline"
- spec.add_dependency "set"
- spec.add_dependency "thor"
- spec.add_dependency "timeout"
+ spec.add_dependency "erb", "~> 6.0"
+ spec.add_dependency "fileutils", "~> 1.0"
+ spec.add_dependency "json", "~> 2.0"
+ spec.add_dependency "json-schema", "~> 6.0"
+ spec.add_dependency "logger", "~> 1.0"
+ spec.add_dependency "net-llm", ">= 0.5.0"
+ spec.add_dependency "open3", "~> 0.1"
+ spec.add_dependency "pathname", "~> 0.1"
+ spec.add_dependency "reline", "~> 0.6"
+ spec.add_dependency "set", "~> 1.0"
+ spec.add_dependency "thor", "~> 1.0"
+ spec.add_dependency "timeout", "~> 0.1"
end
Gemfile.lock
@@ -1,19 +1,19 @@
PATH
remote: .
specs:
- elelem (0.4.2)
- erb
- fileutils
- json
- json-schema
- logger
- net-llm
- open3
- pathname
- reline
- set
- thor
- timeout
+ elelem (0.5.0)
+ erb (~> 6.0)
+ fileutils (~> 1.0)
+ json (~> 2.0)
+ json-schema (~> 6.0)
+ logger (~> 1.0)
+ net-llm (>= 0.5.0)
+ open3 (~> 0.1)
+ pathname (~> 0.1)
+ reline (~> 0.6)
+ set (~> 1.0)
+ thor (~> 1.0)
+ timeout (~> 0.1)
GEM
remote: https://rubygems.org/
@@ -24,7 +24,7 @@ GEM
bigdecimal (3.2.2)
date (3.4.1)
diff-lcs (1.6.2)
- erb (5.0.2)
+ erb (6.0.1)
fileutils (1.8.0)
io-console (0.8.1)
irb (1.15.2)
@@ -44,7 +44,7 @@ GEM
openssl (~> 3.0)
net-http (0.6.0)
uri
- net-llm (0.4.0)
+ net-llm (0.5.0)
json (~> 2.0)
net-hippie (~> 1.0)
uri (~> 1.0)
README.md
@@ -63,7 +63,7 @@ gem install elelem
## Usage
-Start an interactive chat session with an Ollama model:
+Start an interactive chat session:
```bash
elelem chat
@@ -71,20 +71,36 @@ elelem chat
### Options
-* `--host` – Ollama host (default: `localhost:11434`).
-* `--model` – Ollama model (default: `gpt-oss`).
-* `--token` – Authentication token.
+* `--provider` – LLM provider: `ollama`, `anthropic`, `openai`, or `vertex-ai` (default: `ollama`).
+* `--model` – Override the default model for the selected provider.
### Examples
```bash
-# Default model
+# Default (Ollama)
elelem chat
-# Specific model and host
-elelem chat --model llama2 --host remote-host:11434
+# Anthropic Claude
+ANTHROPIC_API_KEY=sk-... elelem chat --provider anthropic
+
+# OpenAI
+OPENAI_API_KEY=sk-... elelem chat --provider openai
+
+# VertexAI (uses gcloud ADC)
+elelem chat --provider vertex-ai --model claude-sonnet-4@20250514
```
+### Provider Configuration
+
+Each provider reads its configuration from environment variables:
+
+| Provider | Environment Variables |
+|-------------|---------------------------------------------------|
+| ollama | `OLLAMA_HOST` (default: localhost:11434) |
+| anthropic | `ANTHROPIC_API_KEY` |
+| openai | `OPENAI_API_KEY`, `OPENAI_BASE_URL` |
+| vertex-ai | `GOOGLE_CLOUD_PROJECT`, `GOOGLE_CLOUD_REGION` |
+
## Mode System
The agent exposes seven built‑in tools. You can switch which ones are
@@ -148,8 +164,7 @@ arguments as a hash.
## Contributing
-Feel free to open issues or pull requests. The repository follows the
-GitHub Flow.
+Send me an email. For instructions see https://git-send-email.io/.
## License