Commit e5de3d4
Changed files (1)
cmd
del
cmd/del/main.go
@@ -1846,22 +1846,8 @@ func (d *Del) streamResponseChunks(ctx context.Context, text string) {
func (d *Del) buildOllamaTools() []api.Tool {
var tools []api.Tool
- // === TEMPORARY: SINGLE TOOL FOR DEBUGGING ===
- // Test with just one simple tool to see if that works
-
- // list_dir tool only
- listDirFunc := api.ToolFunction{
- Name: "list_dir",
- Description: "List directory contents",
- }
- listDirFunc.Parameters.Type = "object"
- listDirFunc.Parameters.Required = []string{}
- listDirFunc.Parameters.Properties = make(map[string]struct {
- Type api.PropertyType `json:"type"`
- Items any `json:"items,omitempty"`
- Description string `json:"description"`
- Enum []any `json:"enum,omitempty"`
- })
+ // === TESTING: MEMORY TOOLS ONLY FOR STABILITY ===
+ // Test with just memory tools to see if that's more stable
// Helper function to create property
makeProperty := func(propType string, description string) struct {
@@ -1881,14 +1867,7 @@ func (d *Del) buildOllamaTools() []api.Tool {
}
}
- listDirFunc.Parameters.Properties["path"] = makeProperty("string", "Path to the directory to list (defaults to current directory)")
-
- tools = append(tools, api.Tool{
- Type: "function",
- Function: listDirFunc,
- })
-
- // Add memory tools for testing
+ // Memory tools only
// remember tool
rememberFunc := api.ToolFunction{
Name: "remember",
@@ -1909,7 +1888,9 @@ func (d *Del) buildOllamaTools() []api.Tool {
Function: rememberFunc,
})
- // recall tool
+ return tools
+
+ // recall tool (temporarily disabled)
recallFunc := api.ToolFunction{
Name: "recall",
Description: "Retrieve information from persistent memory",
@@ -2623,11 +2604,50 @@ func (d *Del) processMessage(ctx context.Context, userInput string) {
// Add all tool results to history
d.chatHistory = append(d.chatHistory, toolResults...)
- // === TEMPORARY: SKIP FINAL AI RESPONSE TO FIX HANGING ===
- // The final AI response generation is causing hangs
- // For now, just show that tool execution completed
- d.updateThinking("โ
Skipping final response generation...")
- fullResponse = "โ
Tool execution completed successfully."
+ // Get final AI response after tool execution with simplified history
+ d.updateThinking("๐ง Generating final response...")
+
+ // Create simplified chat history for final response (avoid complex tool structures)
+ simplifiedHistory := []api.Message{
+ {Role: "user", Content: userInput},
+ }
+
+ // Add a summary of tool execution results instead of raw tool data
+ var toolSummary strings.Builder
+ toolSummary.WriteString("I executed the following tools:\n")
+ for _, toolCall := range toolCalls {
+ toolSummary.WriteString(fmt.Sprintf("- %s: completed successfully\n", toolCall.Function.Name))
+ }
+ toolSummary.WriteString("\nPlease provide a helpful response based on the tool execution.")
+
+ simplifiedHistory = append(simplifiedHistory, api.Message{
+ Role: "assistant",
+ Content: toolSummary.String(),
+ })
+
+ finalCtx, finalCancel := context.WithTimeout(ctx, 15*time.Second) // Reduced timeout
+ defer finalCancel()
+
+ var finalResponse string
+ err = d.client.Chat(finalCtx, &api.ChatRequest{
+ Model: d.model,
+ Messages: simplifiedHistory,
+ // Don't include tools in final response to avoid infinite loops
+ }, func(resp api.ChatResponse) error {
+ finalResponse += resp.Message.Content
+ return nil
+ })
+
+ if err == nil && strings.TrimSpace(finalResponse) != "" {
+ d.chatHistory = append(d.chatHistory, api.Message{Role: "assistant", Content: finalResponse})
+ fullResponse = finalResponse
+ } else {
+ // If final response fails or is empty, provide a helpful fallback
+ if err != nil {
+ d.updateThinking(fmt.Sprintf("โ ๏ธ Final response failed: %v", err))
+ }
+ fullResponse = "โ
Tool execution completed successfully."
+ }
}
d.stopThinking()