Comparing changes
v0.1.0
→
v0.1.1
21 commits
16 files changed
Commits
Changed files (16)
exe/elelem
@@ -3,7 +3,7 @@
require "elelem"
-Signal.trap('INT') do
+Signal.trap("INT") do
exit(1)
end
lib/elelem/agent.rb
@@ -2,86 +2,47 @@
module Elelem
class Agent
- attr_reader :configuration, :conversation, :tools
+ attr_reader :api, :conversation, :logger
def initialize(configuration)
+ @api = configuration.api
@configuration = configuration
@conversation = configuration.conversation
- @tools = configuration.tools
+ @logger = configuration.logger
+ transition_to(Idle.new)
end
def repl
loop do
- print "\n> "
- user = STDIN.gets&.chomp
- break if user.nil? || user.empty? || user == 'exit'
- process_input(user)
- puts("\u001b[32mDone!\u001b[0m")
+ current_state.run(self)
end
end
- private
-
- def process_input(text)
- conversation.add(role: 'user', content: text)
-
- done = false
- loop do
- call_api(conversation.history) do |chunk|
- debug_print(chunk)
-
- response = JSON.parse(chunk)
- done = response['done']
- message = response['message'] || {}
-
- if message['thinking']
- print("\u001b[90m#{message['thinking']}\u001b[0m")
- elsif message['tool_calls']&.any?
- message['tool_calls'].each do |t|
- conversation.add(role: 'tool', content: tools.execute(t))
- end
- done = false
- elsif message['content'].to_s.strip
- print message['content'].to_s.strip
- else
- raise chunk.inspect
- end
- end
-
- break if done
- end
+ def transition_to(next_state)
+ logger.debug("Transition to: #{next_state.class.name}")
+ @current_state = next_state
end
- def call_api(messages)
- body = {
- messages: messages,
- model: configuration.model,
- stream: true,
- keep_alive: '5m',
- options: { temperature: 0.1 },
- tools: tools.to_h
- }
- json_body = body.to_json
- debug_print(json_body)
-
- req = Net::HTTP::Post.new(configuration.uri)
- req['Content-Type'] = 'application/json'
- req.body = json_body
- req['Authorization'] = "Bearer #{configuration.token}" if configuration.token
+ def prompt(message)
+ configuration.tui.prompt(message)
+ end
- configuration.http.request(req) do |response|
- raise response.inspect unless response.code == "200"
+ def say(message, colour: :default, newline: false)
+ configuration.tui.say(message, colour: colour, newline: newline)
+ end
- response.read_body do |chunk|
- debug_print(chunk)
- yield(chunk) if block_given?
- $stdout.flush
- end
- end
+ def execute(tool_call)
+ logger.debug("Execute: #{tool_call}")
+ configuration.tools.execute(tool_call)
end
- def debug_print(body = nil)
- configuration.logger.debug(body) if configuration.debug && body
+ def quit
+ logger.debug("Exiting...")
+ exit
end
+
+ private
+
+ attr_reader :configuration, :current_state
end
end
lib/elelem/api.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Elelem
+ class Api
+ attr_reader :configuration
+
+ def initialize(configuration)
+ @configuration = configuration
+ end
+
+ def chat(messages)
+ body = {
+ messages: messages,
+ model: configuration.model,
+ stream: true,
+ keep_alive: "5m",
+ options: { temperature: 0.1 },
+ tools: configuration.tools.to_h
+ }
+ configuration.logger.debug(JSON.pretty_generate(body))
+ json_body = body.to_json
+
+ req = Net::HTTP::Post.new(configuration.uri)
+ req["Content-Type"] = "application/json"
+ req.body = json_body
+ req["Authorization"] = "Bearer #{configuration.token}" if configuration.token
+
+ configuration.http.request(req) do |response|
+ raise response.inspect unless response.code == "200"
+
+ response.read_body do |chunk|
+ yield(chunk)
+ end
+ end
+ end
+ end
+end
lib/elelem/application.rb
@@ -2,21 +2,40 @@
module Elelem
class Application < Thor
- desc 'chat', 'Start the REPL'
- method_option :help, aliases: '-h', type: :boolean, desc: 'Display usage information'
- method_option :host, aliases: '--host', type: :string, desc: 'Ollama host', default: ENV.fetch('OLLAMA_HOST', 'localhost:11434')
- method_option :model, aliases: '--model', type: :string, desc: 'Ollama model', default: ENV.fetch('OLLAMA_MODEL', 'gpt-oss')
- method_option :token, aliases: '--token', type: :string, desc: 'Ollama token', default: ENV.fetch('OLLAMA_API_KEY', nil)
- method_option :debug, aliases: '--debug', type: :boolean, desc: 'Debug mode', default: false
+ desc "chat", "Start the REPL"
+ method_option :help,
+ aliases: "-h",
+ type: :boolean,
+ desc: "Display usage information"
+ method_option :host,
+ aliases: "--host",
+ type: :string,
+ desc: "Ollama host",
+ default: ENV.fetch("OLLAMA_HOST", "localhost:11434")
+ method_option :model,
+ aliases: "--model",
+ type: :string,
+ desc: "Ollama model",
+ default: ENV.fetch("OLLAMA_MODEL", "gpt-oss")
+ method_option :token,
+ aliases: "--token",
+ type: :string,
+ desc: "Ollama token",
+ default: ENV.fetch("OLLAMA_API_KEY", nil)
+ method_option :debug,
+ aliases: "--debug",
+ type: :boolean,
+ desc: "Debug mode",
+ default: false
def chat(*)
if options[:help]
- invoke :help, ['chat']
+ invoke :help, ["chat"]
else
configuration = Configuration.new(
host: options[:host],
model: options[:model],
token: options[:token],
- debug: options[:debug],
+ debug: options[:debug]
)
say "Ollama Agent (#{configuration.model})", :green
say "Tools:\n #{configuration.tools.banner}", :green
@@ -26,7 +45,7 @@ module Elelem
end
end
- desc 'version', 'spandx version'
+ desc "version", "spandx version"
def version
puts "v#{Spandx::VERSION}"
end
lib/elelem/configuration.rb
@@ -18,11 +18,17 @@ module Elelem
end
end
+ def tui
+ @tui ||= TUI.new($stdin, $stdout)
+ end
+
+ def api
+ @api ||= Api.new(self)
+ end
+
def logger
- @logger ||= begin
- Logger.new(debug ? $stderr : "/dev/null").tap do |logger|
- logger.formatter = ->(_, _, _, msg) { msg }
- end
+ @logger ||= Logger.new(debug ? "elelem.log" : "/dev/null").tap do |logger|
+ logger.formatter = ->(_, _, _, message) { message.strip + "\n" }
end
end
@@ -41,7 +47,7 @@ module Elelem
private
def scheme
- host.match?(/\A(?:localhost|127\.0\.0\.1|0\.0\.0\.0)(:\d+)?\z/) ? 'http' : 'https'
+ host.match?(/\A(?:localhost|127\.0\.0\.1|0\.0\.0\.0)(:\d+)?\z/) ? "http" : "https"
end
end
end
lib/elelem/conversation.rb
@@ -4,14 +4,14 @@ module Elelem
class Conversation
SYSTEM_MESSAGE = <<~SYS
You are ChatGPT, a helpful assistant with reasoning capabilities.
- Current date: #{Time.now.strftime('%Y-%m-%d')}.
+ Current date: #{Time.now.strftime("%Y-%m-%d")}.
System info: `uname -a` output: #{`uname -a`.strip}
Reasoning: high
SYS
- ROLES = ['system', 'user', 'tool'].freeze
+ ROLES = [:system, :assistant, :user, :tool].freeze
- def initialize(items = [{ role: 'system', content: SYSTEM_MESSAGE }])
+ def initialize(items = [{ role: "system", content: SYSTEM_MESSAGE }])
@items = items
end
@@ -20,10 +20,16 @@ module Elelem
end
# :TODO truncate conversation history
- def add(role: 'user', content: '')
+ def add(role: :user, content: "")
+ role = role.to_sym
raise "unknown role: #{role}" unless ROLES.include?(role)
+ return if content.empty?
- @items << { role: role, content: content }
+ if @items.last && @items.last[:role] == role
+ @items.last[:content] += content
+ else
+ @items.push({ role: role, content: content })
+ end
end
end
end
lib/elelem/state.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module Elelem
+ class Idle
+ def run(agent)
+ agent.logger.debug("Idling...")
+ input = agent.prompt("\n> ")
+ agent.quit if input.nil? || input.empty? || input == "exit"
+
+ agent.conversation.add(role: :user, content: input)
+ agent.transition_to(Working.new)
+ end
+ end
+
+ class Working
+ class State
+ attr_reader :agent
+
+ def initialize(agent)
+ @agent = agent
+ end
+
+ def display_name
+ self.class.name.split("::").last
+ end
+ end
+
+ class Waiting < State
+ def process(message)
+ state = self
+
+ if message["thinking"] && !message["thinking"].empty?
+ state = Thinking.new(agent)
+ elsif message["tool_calls"]&.any?
+ state = Executing.new(agent)
+ elsif message["content"] && !message["content"].empty?
+ state = Talking.new(agent)
+ else
+ state = nil
+ end
+
+ state&.process(message)
+ end
+ end
+
+ class Thinking < State
+ def process(message)
+ if message["thinking"] && !message["thinking"]&.empty?
+ agent.say(message["thinking"], colour: :gray, newline: false)
+ self
+ else
+ agent.say("", newline: true)
+ Waiting.new(agent).process(message)
+ end
+ end
+ end
+
+ class Executing < State
+ def process(message)
+ if message["tool_calls"]&.any?
+ message["tool_calls"].each do |tool_call|
+ agent.conversation.add(role: :tool, content: agent.execute(tool_call))
+ end
+ end
+
+ Waiting.new(agent)
+ end
+ end
+
+ class Talking < State
+ def process(message)
+ if message["content"] && !message["content"]&.empty?
+ agent.conversation.add(role: message["role"], content: message["content"])
+ agent.say(message["content"], colour: :default, newline: false)
+ self
+ else
+ agent.say("", newline: true)
+ Waiting.new(agent).process(message)
+ end
+ end
+ end
+
+ def run(agent)
+ agent.logger.debug("Working...")
+ state = Waiting.new(agent)
+ done = false
+
+ loop do
+ agent.api.chat(agent.conversation.history) do |chunk|
+ response = JSON.parse(chunk)
+ message = normalize(response["message"] || {})
+ done = response["done"]
+
+ agent.logger.debug("#{state.display_name}: #{message}")
+ state = state.process(message)
+ end
+
+ break if state.nil?
+ break if done && agent.conversation.history.last[:role] != :tool
+ end
+
+ agent.transition_to(Idle.new)
+ end
+
+ private
+
+ def normalize(message)
+ message.reject { |_key, value| value.empty? }
+ end
+ end
+end
lib/elelem/tools.rb
@@ -4,38 +4,23 @@ module Elelem
class Tools
DEFAULT_TOOLS = [
{
- type: 'function',
+ type: "function",
function: {
- name: 'execute_command',
- description: 'Execute a shell command.',
+ name: "execute_command",
+ description: "Execute a shell command.",
parameters: {
- type: 'object',
- properties: { command: { type: 'string' } },
- required: ['command']
+ type: "object",
+ properties: {
+ command: { type: "string" },
+ },
+ required: ["command"]
}
},
- handler: -> (args) {
- stdout, stderr, _status = Open3.capture3('/bin/sh', '-c', args['command'])
+ handler: lambda { |args|
+ stdout, stderr, _status = Open3.capture3("/bin/sh", "-c", args["command"])
stdout + stderr
}
},
- {
- type: 'function',
- function: {
- name: 'ask_user',
- description: 'Ask the user to answer a question.',
- parameters: {
- type: 'object',
- properties: { question: { type: 'string' } },
- required: ['question']
- }
- },
- handler: ->(args) {
- puts("\u001b[35m#{args['question']}\u001b[0m")
- print "> "
- STDIN.gets&.chomp
- }
- }
]
def initialize(tools = DEFAULT_TOOLS)
@@ -52,13 +37,13 @@ module Elelem
end
def execute(tool_call)
- name = tool_call.dig('function', 'name')
- args = tool_call.dig('function', 'arguments')
+ name = tool_call.dig("function", "name")
+ args = tool_call.dig("function", "arguments")
tool = @tools.find do |tool|
tool.dig(:function, :name) == name
end
- tool.fetch(:handler).call(args)
+ tool&.fetch(:handler)&.call(args)
end
def to_h
lib/elelem/tui.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Elelem
+ class TUI
+ attr_reader :stdin, :stdout
+
+ def initialize(stdin = $stdin, stdout = $stdout)
+ @stdin = stdin
+ @stdout = stdout
+ end
+
+ def prompt(message)
+ say(message)
+ stdin.gets&.chomp
+ end
+
+ def say(message, colour: :default, newline: false)
+ formatted_message = colourize(message, colour: colour)
+ if newline
+ stdout.puts(formatted_message)
+ else
+ stdout.print(formatted_message)
+ end
+ stdout.flush
+ end
+
+ private
+
+ def colourize(text, colour: :default)
+ case colour
+ when :gray
+ "\e[90m#{text}\e[0m"
+ else
+ text
+ end
+ end
+ end
+end
lib/elelem/version.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
module Elelem
- VERSION = "0.1.0"
+ VERSION = "0.1.1"
end
lib/elelem.rb
@@ -8,10 +8,13 @@ require "thor"
require "uri"
require_relative "elelem/agent"
+require_relative "elelem/api"
require_relative "elelem/application"
require_relative "elelem/configuration"
require_relative "elelem/conversation"
+require_relative "elelem/state"
require_relative "elelem/tools"
+require_relative "elelem/tui"
require_relative "elelem/version"
module Elelem
.gitignore
@@ -12,6 +12,7 @@
*.a
mkmf.log
target/
+*.log
# rspec failure tracking
.rspec_status
CHANGELOG.md
@@ -1,5 +1,12 @@
## [Unreleased]
+## [0.1.1] - 2025-08-12
+
+### Fixed
+- Fixed infinite loop bug after tool execution - loop now continues until assistant provides final response
+- Fixed conversation history accumulating streaming chunks as separate entries - now properly combines same-role consecutive messages
+- Improved state machine logging with better debug output
+
## [0.1.0] - 2025-08-08
- Initial release
elelem.gemspec
@@ -33,10 +33,10 @@ Gem::Specification.new do |spec|
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
- spec.add_dependency 'json'
- spec.add_dependency 'logger'
- spec.add_dependency 'net-http'
- spec.add_dependency 'open3'
- spec.add_dependency 'thor'
- spec.add_dependency 'uri'
+ spec.add_dependency "json"
+ spec.add_dependency "logger"
+ spec.add_dependency "net-http"
+ spec.add_dependency "open3"
+ spec.add_dependency "thor"
+ spec.add_dependency "uri"
end
Gemfile.lock
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
- elelem (0.1.0)
+ elelem (0.1.1)
json
logger
net-http
README.md
@@ -1,28 +1,56 @@
# Elelem
-TODO: Delete this and the text below, and describe your gem
-
-Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/elelem`. To experiment with that code, run `bin/console` for an interactive prompt.
+Elelem is an interactive REPL (Read-Eval-Print Loop) for Ollama that provides a command-line chat interface for communicating with AI models. It features tool calling capabilities, streaming responses, and a clean state machine architecture.
## Installation
-TODO: Replace `UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG` with your gem name right after releasing it to RubyGems.org. Please do not do it earlier due to security reasons. Alternatively, replace this section with instructions to install your gem from git if you don't plan to release to RubyGems.org.
-
Install the gem and add to the application's Gemfile by executing:
```bash
-bundle add UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
+bundle add elelem
```
If bundler is not being used to manage dependencies, install the gem by executing:
```bash
-gem install UPDATE_WITH_YOUR_GEM_NAME_IMMEDIATELY_AFTER_RELEASE_TO_RUBYGEMS_ORG
+gem install elelem
```
## Usage
-TODO: Write usage instructions here
+Start an interactive chat session with an Ollama model:
+
+```bash
+elelem chat
+```
+
+### Options
+
+- `--host`: Specify Ollama host (default: localhost:11434)
+- `--model`: Specify Ollama model (default: gpt-oss, currently only tested with gpt-oss)
+- `--token`: Provide authentication token
+- `--debug`: Enable debug logging
+
+### Examples
+
+```bash
+# Chat with default model
+elelem chat
+
+# Chat with specific model and host
+elelem chat --model llama2 --host remote-host:11434
+
+# Enable debug mode
+elelem chat --debug
+```
+
+### Features
+
+- **Interactive REPL**: Clean command-line interface for chatting
+- **Tool Execution**: Execute shell commands when requested by the AI
+- **Streaming Responses**: Real-time streaming of AI responses
+- **State Machine**: Robust state management for different interaction modes
+- **Conversation History**: Maintains context across the session
## Development
@@ -30,6 +58,100 @@ After checking out the repo, run `bin/setup` to install dependencies. Then, run
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
+REPL State Diagram
+
+```
+ ┌─────────────────┐
+ │ START/INIT │
+ └─────────┬───────┘
+ │
+ v
+ ┌─────────────────┐
+ ┌────▶│ IDLE (Prompt) │◄────┐
+ │ │ Shows "> " │ │
+ │ └─────────┬───────┘ │
+ │ │ │
+ │ │ User input │
+ │ v │
+ │ ┌─────────────────┐ │
+ │ │ PROCESSING │ │
+ │ │ INPUT │ │
+ │ └─────────┬───────┘ │
+ │ │ │
+ │ │ API call │
+ │ v │
+ │ ┌─────────────────┐ │
+ │ │ STREAMING │ │
+ │ ┌──▶│ RESPONSE │─────┤
+ │ │ └─────────┬───────┘ │
+ │ │ │ │ done=true
+ │ │ │ Parse chunk │
+ │ │ v │
+ │ │ ┌─────────────────┐ │
+ │ │ │ MESSAGE TYPE │ │
+ │ │ │ ROUTING │ │
+ │ │ └─────┬─┬─┬───────┘ │
+ │ │ │ │ │ │
+ ┌────────┴─┴─────────┘ │ └─────────────┴──────────┐
+ │ │ │
+ v v v
+ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐
+ │ THINKING │ │ TOOL │ │ CONTENT │
+ │ STATE │ │ EXECUTION │ │ OUTPUT │
+ │ │ │ STATE │ │ STATE │
+ └─────────────┘ └─────┬───────┘ └─────────────┘
+ │ │ │
+ │ │ done=false │
+ └───────────────────┼──────────────────────────┘
+ │
+ v
+ ┌─────────────────┐
+ │ CONTINUE │
+ │ STREAMING │
+ └─────────────────┘
+ │
+ └─────────────────┐
+ │
+ ┌─────────────────┐ │
+ │ ERROR STATE │ │
+ │ (Exception) │ │
+ └─────────────────┘ │
+ ▲ │
+ │ Invalid response │
+ └────────────────────────────┘
+
+ EXIT CONDITIONS:
+ ┌─────────────────────────┐
+ │ • User enters "" │
+ │ • User enters "exit" │
+ │ • EOF (Ctrl+D) │
+ │ • nil input │
+ └─────────────────────────┘
+ │
+ v
+ ┌─────────────────────────┐
+ │ TERMINATE │
+ └─────────────────────────┘
+```
+
+Key Transitions:
+
+1. IDLE → PROCESSING: User enters any non-empty, non-"exit" input
+2. PROCESSING → STREAMING: API call initiated to Ollama
+3. STREAMING → MESSAGE ROUTING: Each chunk received is parsed
+4. MESSAGE ROUTING → States: Based on message content:
+ - thinking → THINKING STATE
+ - tool_calls → TOOL EXECUTION STATE
+ - content → CONTENT OUTPUT STATE
+ - Invalid format → ERROR STATE
+5. All States → IDLE: When done=true from API response
+6. TOOL EXECUTION → STREAMING: Sets done=false to continue conversation
+7. Any State → TERMINATE: On exit conditions
+
+The REPL operates as a continuous loop where the primary flow is IDLE → PROCESSING → STREAMING →
+back to IDLE, with the streaming phase potentially cycling through multiple message types before
+completion.
+
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/xlgmokha/elelem.