Documentation
¶
Index ¶
- Constants
- Variables
- func AfterLog()
- func AppendMemoryTools(tools []string) []string
- func AppendSearchTools(tools []string) []string
- func AppendSkillTools(tools []string) []string
- func AppendSubagentTools(tools []string) []string
- func AvailableEmbeddingTool(toolName string) bool
- func AvailableMemoryTool(toolName string) bool
- func AvailableSearchTool(toolName string) bool
- func AvailableSkillTool(toolName string) bool
- func AvailableSubagentTool(toolName string) bool
- func BeforeLog()
- func BuildAnthropicMessages(messages []UniversalMessage) []anthropic.MessageParam
- func BuildGeminiMessages(messages []UniversalMessage) []*gemini.Content
- func BuildOpenAIMessages(messages []UniversalMessage) []openai.ChatCompletionMessage
- func BuildOpenChatMessages(messages []UniversalMessage) []*model.ChatCompletionMessage
- func CallAgent(op *AgentOptions) error
- func CheckIfImageFromBytes(data []byte) (bool, string, error)
- func CheckIfImageFromPath(filePath string) (bool, string, error)
- func ClearEmptyConvosAsync()
- func ClearTokenCache()
- func Contains(list []string, item string) bool
- func ConvertMessages(data []byte, sourceProvider, targetProvider string) ([]byte, error)
- func Debugf(format string, args ...interface{})
- func Debugln(args ...interface{})
- func DetectAnthropicKeyMessage(msg *anthropic.MessageParam) bool
- func DetectGeminiKeyMessage(msg *gemini.Content) bool
- func DetectMessageProvider(path string) string
- func DetectMessageProviderByContent(input []byte) string
- func DetectMessageProviderFromLine(line []byte) string
- func DetectModelProvider(endPoint string, modelName string) string
- func DetectOpenAIKeyMessage(msg *openai.ChatCompletionMessage) bool
- func DisableAgentMemory(capabilities []string) []string
- func DisableAgentSkills(capabilities []string) []string
- func DisableCodeExecution()
- func DisableMCPServers(capabilities []string) []string
- func DisableMarkdown(capabilities []string) []string
- func DisableSubAgents(capabilities []string) []string
- func DisableTokenUsage(capabilities []string) []string
- func DisableWebSearch(capabilities []string) []string
- func EnableAgentMemory(capabilities []string) []string
- func EnableAgentSkills(capabilities []string) []string
- func EnableCodeExecution()
- func EnableMCPServers(capabilities []string) []string
- func EnableMarkdown(capabilities []string) []string
- func EnableSubAgents(capabilities []string) []string
- func EnableTokenUsage(capabilities []string) []string
- func EnableWebSearch(capabilities []string) []string
- func EndWithNewline(s string) bool
- func Errorf(format string, args ...interface{})
- func Errorln(args ...interface{})
- func EstimateAnthropicMessageTokens(msg anthropic.MessageParam) int
- func EstimateAnthropicMessagesTokens(messages []anthropic.MessageParam) int
- func EstimateAnthropicToolTokens(tools []anthropic.ToolUnionParam) int
- func EstimateGeminiMessageTokens(msg *genai.Content) int
- func EstimateGeminiMessagesTokens(messages []*genai.Content) int
- func EstimateGeminiToolTokens(tools []*genai.Tool) int
- func EstimateJSONTokens(data interface{}) int
- func EstimateOpenAIMessageTokens(msg openai.ChatCompletionMessage) int
- func EstimateOpenAIMessagesTokens(messages []openai.ChatCompletionMessage) int
- func EstimateOpenAIToolTokens(tools []openai.Tool) int
- func EstimateOpenChatMessageTokens(msg *openchat.ChatCompletionMessage) int
- func EstimateOpenChatMessagesTokens(messages []*openchat.ChatCompletionMessage) int
- func EstimateOpenChatToolTokens(tools []*openchat.Tool) int
- func EstimateTokens(text string) int
- func ExtractTextFromURL(url string, config *ExtractorConfig) ([]string, error)
- func ExtractThinkTags(content string) (thinking, cleaned string)
- func FetchProcess(urls []string) []string
- func FilterOpenToolArguments(argsMap map[string]interface{}, ignoreKeys []string) map[string]interface{}
- func FindConvosByIndex(idx string) (string, error)
- func FormatMinutesSeconds(d time.Duration) string
- func GenerateTempFileName() string
- func GetAllEmbeddingTools() []string
- func GetAllMemoryTools() []string
- func GetAllOpenTools() []string
- func GetAllSearchTools() []string
- func GetAllSkillTools() []string
- func GetAllSubagentTools() []string
- func GetAnthropicMessageKey(msg anthropic.MessageParam) string
- func GetConvoDir() string
- func GetCurrentTokenCount(messages []openai.ChatCompletionMessage) int
- func GetCurrentTokenCountGemini(messages []*genai.Content) int
- func GetCurrentTokenCountOpenChat(messages []*model.ChatCompletionMessage) int
- func GetDefaultSearchEngineName() string
- func GetFileContent(filePath string) (string, error)
- func GetFilePath(dir string, filename string) string
- func GetGeminiMessageKey(msg *genai.Content) string
- func GetLogger() *log.Logger
- func GetMIMEType(filePath string) string
- func GetMIMETypeByContent(data []byte) string
- func GetNoneSearchEngineName() string
- func GetOpenAIMessageKey(msg openai.ChatCompletionMessage) string
- func GetOpenChatMessageKey(msg *model.ChatCompletionMessage) string
- func GetSanitizeTitle(title string) string
- func GetStringValue(data map[string]interface{}, key string) string
- func GetUserConfigDir() string
- func HasContent(s *string) bool
- func Infof(format string, args ...interface{})
- func Infoln(args ...interface{})
- func InitLogger()
- func IsAgentMemoryEnabled(capabilities []string) bool
- func IsAgentSkillsEnabled(capabilities []string) bool
- func IsAudioMIMEType(mimeType string) bool
- func IsAvailableMCPTool(toolName string, client *MCPClient) bool
- func IsAvailableOpenTool(toolName string) bool
- func IsCodeExecutionEnabled() bool
- func IsExcelMIMEType(mimeType string) bool
- func IsImageMIMEType(mimeType string) bool
- func IsMCPServersEnabled(capabilities []string) bool
- func IsMarkdownEnabled(capabilities []string) bool
- func IsModelGemini3(modelName string) bool
- func IsPDFMIMEType(mimeType string) bool
- func IsStdinPipe(source string) bool
- func IsSubAgentsEnabled(capabilities []string) bool
- func IsSwitchAgentError(err error) bool
- func IsTextMIMEType(mimeType string) bool
- func IsTokenUsageEnabled(capabilities []string) bool
- func IsUnknownMIMEType(mimeType string) bool
- func IsUserCancelError(err error) bool
- func IsVideoMIMEType(mimeType string) bool
- func IsWebSearchEnabled(capabilities []string) bool
- func MakeUserSubDir(subparts ...string) string
- func NewLogger() *log.Logger
- func Ptr[T any](t T) *T
- func RemoveMemoryTools(tools []string) []string
- func RemoveSearchTools(tools []string) []string
- func RemoveSkillTools(tools []string) []string
- func RemoveSubagentTools(tools []string) []string
- func RenderAnthropicConversationLog(input []byte) string
- func RenderGeminiConversationLog(input []byte) string
- func RenderOpenAIConversationLog(input []byte) string
- func Successf(format string, args ...interface{})
- func Successln(args ...interface{})
- func TruncateString(s string, maxLen int) string
- func Warnf(format string, args ...interface{})
- func Warnln(args ...interface{})
- type Agent
- func (ag *Agent) CompleteReasoning()
- func (ag *Agent) Error(text string)
- func (ag *Agent) GenerateAnthropicStream() error
- func (ag *Agent) GenerateGeminiStream() error
- func (ag *Agent) GenerateOpenAIStream() error
- func (ag *Agent) GenerateOpenChatStream() error
- func (ag *Agent) SortAnthropicMessagesByOrder() error
- func (ag *Agent) SortGeminiMessagesByOrder() error
- func (ag *Agent) SortOpenAIMessagesByOrder() error
- func (ag *Agent) SortOpenChatMessagesByOrder() error
- func (ag *Agent) StartIndicator(text string)
- func (ag *Agent) StartReasoning()
- func (ag *Agent) StopIndicator()
- func (ag *Agent) Warn(text string)
- func (ag *Agent) WriteDiffConfirm(text string)
- func (ag *Agent) WriteEnd()
- func (ag *Agent) WriteFunctionCall(text string)
- func (ag *Agent) WriteFunctionCallOver()
- func (ag *Agent) WriteMarkdown()
- func (ag *Agent) WriteReasoning(text string)
- func (ag *Agent) WriteText(text string)
- func (ag *Agent) WriteUsage()
- type AgentOptions
- type AgentRunner
- type Anthropic
- type AnthropicConversation
- func (c *AnthropicConversation) Clear() error
- func (c *AnthropicConversation) GetMessages() interface{}
- func (c *AnthropicConversation) Load() error
- func (c *AnthropicConversation) MarshalMessages(messages []anthropic.MessageParam) []byte
- func (c *AnthropicConversation) Push(messages ...interface{}) error
- func (c *AnthropicConversation) Save() error
- func (c *AnthropicConversation) SetMessages(messages interface{})
- type AtRefProcessor
- func (p *AtRefProcessor) AddExcludePattern(pattern string)
- func (p *AtRefProcessor) ParseAtReferences(text string) []AtReference
- func (p *AtRefProcessor) ProcessReferences(text string, references []AtReference) (string, error)
- func (p *AtRefProcessor) ProcessText(text string) (string, error)
- func (p *AtRefProcessor) SetMaxDirItems(count int)
- func (p *AtRefProcessor) SetMaxFileSize(size int64)
- type AtReference
- type BaseConversation
- func (c *BaseConversation) Clear() error
- func (c *BaseConversation) GetMessages() interface{}
- func (c *BaseConversation) GetPath() string
- func (c *BaseConversation) Load() error
- func (c *BaseConversation) Open(title string) error
- func (c *BaseConversation) Push(messages ...interface{})
- func (c *BaseConversation) Save() error
- func (c *BaseConversation) SetMessages(messages interface{})
- func (c *BaseConversation) SetPath(title string)
- type ContextManager
- func (cm *ContextManager) PrepareAnthropicMessages(messages []anthropic.MessageParam, systemPrompt string, ...) ([]anthropic.MessageParam, bool)
- func (cm *ContextManager) PrepareGeminiMessages(messages []*genai.Content, systemPrompt string, tools []*genai.Tool) ([]*genai.Content, bool)
- func (cm *ContextManager) PrepareOpenAIMessages(messages []openai.ChatCompletionMessage, tools []openai.Tool) ([]openai.ChatCompletionMessage, bool)
- func (cm *ContextManager) PrepareOpenChatMessages(messages []*model.ChatCompletionMessage, tools []*model.Tool) ([]*model.ChatCompletionMessage, bool)
- type ConversationManager
- type ConvoMeta
- type ExtractorConfig
- type FileData
- type GeminiAgent
- func (ga *GeminiAgent) GeminiActivateSkillToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiCloseDiffConfirm()
- func (ga *GeminiAgent) GeminiCopyToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiCreateDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiDeleteDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiDeleteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiEditFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiGetStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiListAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiListDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiListMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiListStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiMCPToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiMoveToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiReadFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiReadMultipleFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiSaveMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiSearchFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiSearchTextInFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiSetStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiShellToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiShowDiffConfirm(diff string)
- func (ga *GeminiAgent) GeminiSpawnSubAgentsToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiSwitchAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiWebFetchToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ga *GeminiAgent) GeminiWriteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- type GeminiConversation
- func (g *GeminiConversation) Clear() error
- func (g *GeminiConversation) GetMessages() interface{}
- func (g *GeminiConversation) Load() error
- func (g *GeminiConversation) MarshalMessages(messages []*genai.Content) []byte
- func (g *GeminiConversation) Push(messages ...interface{}) error
- func (g *GeminiConversation) Save() error
- func (g *GeminiConversation) SetMessages(messages interface{})
- type MCPClient
- func (mc *MCPClient) AddHttpServer(name string, url string, headers map[string]string) error
- func (mc *MCPClient) AddSseServer(name string, url string, headers map[string]string) error
- func (mc *MCPClient) AddStdServer(name string, cmd string, env map[string]string, cwd string, args ...string) error
- func (mc *MCPClient) CallTool(toolName string, args map[string]any) (*MCPToolResponse, error)
- func (mc *MCPClient) Close()
- func (mc *MCPClient) FindTool(toolName string) *MCPSession
- func (mc *MCPClient) GetAllServers() []*MCPServer
- func (mc *MCPClient) GetPrompts(session *MCPSession) (*[]MCPPrompt, error)
- func (mc *MCPClient) GetResources(session *MCPSession) (*[]MCPResource, error)
- func (mc *MCPClient) GetTools(session *MCPSession) (*[]MCPTool, error)
- func (mc *MCPClient) Init(servers map[string]*data.MCPServer, option MCPLoadOption) error
- type MCPLoadOption
- type MCPPrompt
- type MCPResource
- type MCPServer
- type MCPSession
- type MCPTool
- type MCPToolResponse
- type MCPToolResponseType
- type Markdown
- type ModelInfo
- type ModelLimits
- type OpenAI
- type OpenAIConversation
- func (c *OpenAIConversation) Clear() error
- func (c *OpenAIConversation) GetMessages() interface{}
- func (c *OpenAIConversation) Load() error
- func (c *OpenAIConversation) MarshalMessages(messages []openai.ChatCompletionMessage) []byte
- func (c *OpenAIConversation) Push(messages ...interface{}) error
- func (c *OpenAIConversation) Save() error
- func (c *OpenAIConversation) SetMessages(messages interface{})
- type OpenChat
- type OpenChatConversation
- func (c *OpenChatConversation) Clear() error
- func (c *OpenChatConversation) GetMessages() interface{}
- func (c *OpenChatConversation) Load() error
- func (c *OpenChatConversation) MarshalMessages(messages []*model.ChatCompletionMessage) []byte
- func (c *OpenChatConversation) Push(messages ...interface{}) error
- func (c *OpenChatConversation) Save() error
- func (c *OpenChatConversation) SetMessages(messages interface{})
- type OpenFunctionDefinition
- type OpenProcessor
- func (op *OpenProcessor) AnthropicActivateSkillToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicCopyToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicCreateDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicDeleteDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicDeleteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicEditFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicGetStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicMCPToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicMoveToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicReadFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicReadMultipleFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSaveMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSearchFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSearchTextInFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSetStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicShellToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSpawnSubAgentsToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSwitchAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWebFetchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWebSearchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWriteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) OpenAIActivateSkillToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAICopyToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAICreateDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIDeleteDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIDeleteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIEditFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIGetStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIMCPToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIMoveToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIReadFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIReadMultipleFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISaveMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISearchFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISearchTextInFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISetStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIShellToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISpawnSubAgentsToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISwitchAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWebFetchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWebSearchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWriteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatActivateSkillToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatCopyToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatCreateDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatDeleteDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatDeleteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatEditFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatGetStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatMCPToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatMoveToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatReadFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatReadMultipleFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSaveMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSearchFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSearchTextInFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSetStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatShellToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSpawnSubAgentsToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSwitchAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWebFetchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWebSearchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWriteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- type OpenTool
- type SearchEngine
- func (s *SearchEngine) BingSearch(query string) (map[string]any, error)
- func (s *SearchEngine) GoogleSearch(query string) (map[string]any, error)
- func (s *SearchEngine) NoneSearch(query string) (map[string]any, error)
- func (s *SearchEngine) RetrieveQueries(queries []string) string
- func (s *SearchEngine) RetrieveReferences(references []map[string]any) string
- func (s *SearchEngine) SerpAPISearch(query string, engine string) (map[string]any, error)
- func (s *SearchEngine) TavilySearch(query string) (map[string]any, error)
- type SkillManager
- func (sm *SkillManager) ActivateSkill(name string) (string, string, string, error)
- func (sm *SkillManager) CreateTestSkill(rootPath string) (string, error)
- func (sm *SkillManager) GenerateFileTree(dir string) (string, error)
- func (sm *SkillManager) GetAvailableSkills() string
- func (sm *SkillManager) GetAvailableSkillsMetadata() []data.SkillMetadata
- func (sm *SkillManager) LoadMetadata() error
- type StatusStack
- func (s *StatusStack) ChangeTo(proc chan<- StreamNotify, notify StreamNotify, proceed <-chan bool)
- func (s *StatusStack) Clear()
- func (s *StatusStack) Debug()
- func (s *StatusStack) IsEmpty() bool
- func (s *StatusStack) IsTop(status StreamStatus) bool
- func (s *StatusStack) Peek() StreamStatus
- func (s *StatusStack) Pop() StreamStatus
- func (s *StatusStack) Push(status StreamStatus)
- func (s *StatusStack) Size() int
- type StreamData
- type StreamDataType
- type StreamNotify
- type StreamStatus
- type SubAgentExecutor
- func (e *SubAgentExecutor) Cancel(taskID string) error
- func (e *SubAgentExecutor) CancelAll()
- func (e *SubAgentExecutor) Clear()
- func (e *SubAgentExecutor) ClearAll()
- func (e *SubAgentExecutor) Execute(timeout time.Duration) []SubAgentResult
- func (e *SubAgentExecutor) FormatProgress() string
- func (e *SubAgentExecutor) FormatSummary(results []SubAgentResult) string
- func (e *SubAgentExecutor) GetAllProgress() []SubAgentResult
- func (e *SubAgentExecutor) GetProgress(taskID string) *SubAgentResult
- func (e *SubAgentExecutor) GetProgressStatistics() (int, int)
- func (e *SubAgentExecutor) Submit(task *SubAgentTask) string
- func (e *SubAgentExecutor) SubmitBatch(tasks []*SubAgentTask) []string
- type SubAgentResult
- type SubAgentStatus
- type SubAgentTask
- type SwitchAgentError
- type TavilyError
- type TavilyErrorDetail
- type TavilyResponse
- type TavilyResult
- type ThinkingLevel
- func (t ThinkingLevel) Display() string
- func (t ThinkingLevel) IsEnabled() bool
- func (t ThinkingLevel) String() string
- func (t ThinkingLevel) ToAnthropicParams() anthropic.ThinkingConfigParamUnion
- func (t ThinkingLevel) ToGeminiConfig(modelName string) *genai.ThinkingConfig
- func (t ThinkingLevel) ToOpenAIReasoningEffort() string
- func (t ThinkingLevel) ToOpenChatParams() (*model.Thinking, *model.ReasoningEffort)
- type TokenCache
- func (tc *TokenCache) Clear()
- func (tc *TokenCache) Get(key string) (int, bool)
- func (tc *TokenCache) GetOrComputeAnthropicTokens(msg anthropic.MessageParam) int
- func (tc *TokenCache) GetOrComputeGeminiTokens(msg *genai.Content) int
- func (tc *TokenCache) GetOrComputeOpenAITokens(msg openai.ChatCompletionMessage) int
- func (tc *TokenCache) GetOrComputeOpenChatTokens(msg *model.ChatCompletionMessage) int
- func (tc *TokenCache) Set(key string, count int)
- func (tc *TokenCache) Size() int
- func (tc *TokenCache) Stats() (hits, misses int64, size int)
- type TokenUsage
- type ToolType
- type TruncationStrategy
- type UniversalMessage
- func ParseAnthropicMessages(messages []anthropic.MessageParam) []UniversalMessage
- func ParseGeminiMessages(messages []*gemini.Content) []UniversalMessage
- func ParseOpenAIMessages(messages []openai.ChatCompletionMessage) []UniversalMessage
- func ParseOpenChatMessages(messages []*model.ChatCompletionMessage) []UniversalMessage
- type UniversalRole
- type UserCancelError
- type WorkflowManager
- func (wm *WorkflowManager) CreateWorkflow(name, description, content string) error
- func (wm *WorkflowManager) GetCommands() map[string]string
- func (wm *WorkflowManager) GetWorkflowByName(name string) (string, string, error)
- func (wm *WorkflowManager) GetWorkflowNames() []string
- func (wm *WorkflowManager) IsReservedCommand(name string) bool
- func (wm *WorkflowManager) LoadMetadata(reservedCommands map[string]string) error
- func (wm *WorkflowManager) RemoveWorkflow(name string) error
- func (wm *WorkflowManager) RenameWorkflow(oldName, newName string) error
- func (wm *WorkflowManager) UpdateWorkflow(name, description, content string) error
Constants ¶
const ( CapabilityMCPServers = "mcp_servers" CapabilityAgentSkills = "agent_skills" CapabilityAgentMemory = "agent_memory" CapabilityTokenUsage = "token_usage" CapabilityMarkdown = "markdown_output" CapabilitySubAgents = "sub_agents" CapabilityWebSearch = "web_search" )
const ( UserCancelCommon = "[Operation Cancelled]" UserCancelReasonUnknown = "Unknown" UserCancelReasonTimeout = "Timeout" UserCancelReasonDeny = "User denied execution." UserCancelReasonCancel = "User canceled execution." )
const ( // Model types ModelProviderGemini string = "gemini" // for google gemini models ModelProviderOpenAI string = "openai" ModelProviderOpenAICompatible string = "openai-compatible" ModelProviderAnthropic string = "anthropic" // for anthropic models (official sdk) ModelProviderUnknown string = "unknown" )
const ( TavilyUrl = "https://api.tavily.com/search" GoogleSearchEngine = "google" BingSearchEngine = "bing" TavilySearchEngine = "tavily" NoneSearchEngine = "none" )
const ( CharsPerTokenEnglish = 4.0 // Average for English text CharsPerTokenChinese = 2.5 // Tuned: 3 bytes/char / 2.5 = 1.2 tokens/char (balanced) CharsPerTokenJapanese = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char CharsPerTokenKorean = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char CharsPerTokenCode = 3.5 // Tuned: Code is dense. 3.5 chars/token. CharsPerTokenJSON = 3.7 // JSON: Typically 3.5-4 characters per token. Tuned: 3.7 chars/token. CharsPerTokenDefault = 4.0 // Default fallback MessageOverheadTokens = 3 // Standard overhead per message (<|start|>role and <|end|>) ToolCallOverhead = 24 // Reduced from 100 to 24 (closer to reality for JSON overhead) // Media Token Costs (Heuristics) // 1MB = 1000 tokens TokenCostImageDefault = 1000 // Safe upper bound average for high-res images (OpenAI high detail is ~1105, low is 85) TokenCostImageGemini = 1000 // Fixed cost for Gemini images <= 384px (often tiled, but 258 is the base unit) // Video/Audio Heuristics (Tokens per MB - heavily estimated as we don't have duration) // Assumptions: // - Video: 2Mbps (.25MB/s). 1MB = 4s. Gemini Video: 263 tokens/s. 4s * 263 = 1052 tokens. // - Audio: 128kbps (16KB/s). 1MB = 64s. Gemini Audio: 32 tokens/s. 64s * 32 = 2048 tokens. TokenCostVideoPerMBGemini = 1000 TokenCostVideoPerMBOpenChat = 1000 // For base64 encoded video TokenCostAudioPerMBGemini = 2000 )
Token estimation constants These are refined based on modern tokenizer behavior (cl100k_base, qwen, etc.):
- English: ~4 chars/token (ASCII)
- Chinese: ~0.6-2.0 tokens/char (Qwen is efficient, OpenAI is 2.0). due to the different tokenization methods used by different models, the conversion ratios can vary We use 2.5 bytes/token => ~1.2 tokens/char as a balanced estimate.
- Japanese/Korean: ~1.5 tokens/char. 3 bytes/char / 2.0 => 1.5 tokens/char.
- Tool Calls: JSON structure overhead is small (~20 tokens), not 100.
const ( ToolTypeFunction ToolType = "function" // Tool Names ToolShell = "shell" ToolReadFile = "read_file" ToolWriteFile = "write_file" ToolEditFile = "edit_file" ToolDeleteFile = "delete_file" ToolCreateDirectory = "create_directory" ToolListDirectory = "list_directory" ToolDeleteDirectory = "delete_directory" ToolMove = "move" ToolCopy = "copy" ToolSearchFiles = "search_files" ToolSearchTextInFile = "search_text_in_file" ToolReadMultipleFiles = "read_multiple_files" ToolWebFetch = "web_fetch" ToolSwitchAgent = "switch_agent" ToolWebSearch = "web_search" ToolActivateSkill = "activate_skill" ToolListMemory = "list_memory" ToolSaveMemory = "save_memory" ToolListAgent = "list_agent" ToolSpawnSubAgents = "spawn_subagents" ToolGetState = "get_state" ToolSetState = "set_state" ToolListState = "list_state" )
const ( // ToolRespConfirmShell is the template for the response to the user before executing a command. ToolRespConfirmShell = "```\n%s\n```\n%s" // ToolRespShellOutput is the template for the response to the user after executing a command. ToolRespShellOutput = `shell executed: %s Status: %s %s` ToolUserConfirmPrompt = "Do you want to proceed?" // ToolRespConfirmEdityFile is the template for the response to the user before modifying a file, including the diff. ToolRespDiscardEditFile = "Based on your request, the OPERATION is CANCELLED: " + "Cancel edit file: %s\n" + "The user has explicitly declined to apply these file edits. The file will remain unchanged. Do not proceed with any file modifications or ask for further confirmation without explicit new user instruction." )
const ( DefaultShellTimeout = 60 * time.Second MaxFileSize = 20 * 1024 * 1024 // 20MB )
Tool robustness constants
const ( CachedTokensInPrompt = true CachedTokensNotInPrompt = false )
const DefaultMaxCacheSize = 10000
DefaultMaxCacheSize is the default maximum number of entries in the cache
const (
MaxWorkersParalleled = 5
)
Variables ¶
var ( // RoleColors for message roles (initialized in init) RoleColors map[string]string // ContentTypeColors for special content (initialized in init) ContentTypeColors map[string]string )
var DefaultLimitsLegacy = ModelLimits{
ContextWindow: 32000,
MaxOutputTokens: 4096,
}
DefaultLimits is the fallback for unknown models
var DefaultLimitsModern = ModelLimits{
ContextWindow: 128000,
MaxOutputTokens: 8192,
}
var DefaultModelLimits = map[string]ModelLimits{}/* 152 elements not displayed */
DefaultModelLimits is the registry of known model limits. Context window values must be from official documentation or verified by tests
var ExecutorPath string
ExecutorPath is the path to the executable to run for filtering. Defaults to os.Executable(). Can be overridden for testing.
Functions ¶
func AppendMemoryTools ¶ added in v1.13.18
AppendMemoryTools appends memory tools to the given tools slice if they are not already present.
func AppendSearchTools ¶ added in v1.14.1
AppendSearchTools appends search tools to the given tools slice if they are not already present.
func AppendSkillTools ¶ added in v1.13.18
AppendSkillTools appends skill tools to the given tools slice if they are not already present.
func AppendSubagentTools ¶ added in v1.13.18
AppendSubagentTools appends subagent tools to the given tools slice if they are not already present.
func AvailableEmbeddingTool ¶ added in v1.9.2
AvailableEmbeddingTool checks if a tool is available in the embedding tools.
func AvailableMemoryTool ¶ added in v1.13.18
AvailableMemoryTool checks if a tool is available in the memory tools.
func AvailableSearchTool ¶ added in v1.9.12
AvailableSearchTool checks if a tool is available in the search tools.
func AvailableSkillTool ¶ added in v1.13.18
AvailableSkillTool checks if a tool is available in the skill tools.
func AvailableSubagentTool ¶ added in v1.13.18
AvailableSubagentTool checks if a tool is available in the subagent tools.
func BuildAnthropicMessages ¶ added in v1.13.10
func BuildAnthropicMessages(messages []UniversalMessage) []anthropic.MessageParam
BuildAnthropicMessages converts universal messages to Anthropic format. Handles: System role is inlined into the first user message. Preserves: OfText, OfThinking blocks
func BuildGeminiMessages ¶ added in v1.13.10
func BuildGeminiMessages(messages []UniversalMessage) []*gemini.Content
BuildGeminiMessages converts universal messages to Gemini format. Handles: System role is inlined into the first user message. Preserves: Parts with Text, Thought Maps: "assistant" → "model"
func BuildOpenAIMessages ¶ added in v1.13.10
func BuildOpenAIMessages(messages []UniversalMessage) []openai.ChatCompletionMessage
BuildOpenAIMessages converts universal messages to OpenAI format. Preserves: system role, Content, ReasoningContent
func BuildOpenChatMessages ¶ added in v1.13.10
func BuildOpenChatMessages(messages []UniversalMessage) []*model.ChatCompletionMessage
BuildOpenChatMessages converts universal messages to OpenChat (Volcengine) format. Preserves: system role, Content, ReasoningContent
func CallAgent ¶ added in v1.9.4
func CallAgent(op *AgentOptions) error
func CheckIfImageFromPath ¶
checkIfImage attempts to decode a file as an image
func ClearEmptyConvosAsync ¶ added in v1.14.9
func ClearEmptyConvosAsync()
ClearEmptyConvosAsync clears all empty conversations in background
func ClearTokenCache ¶ added in v1.12.14
func ClearTokenCache()
ClearTokenCache clears the global token cache (useful for testing)
func ConvertMessages ¶ added in v1.13.10
ConvertMessages parses source provider data and builds target provider messages. Returns the converted data encoded as JSON.
Supported source/target providers: - ModelProviderOpenAI - ModelProviderOpenAICompatible (OpenChat) - ModelProviderAnthropic - ModelProviderGemini
func DetectAnthropicKeyMessage ¶ added in v1.13.10
func DetectAnthropicKeyMessage(msg *anthropic.MessageParam) bool
Detects if a message is definitely an Anthropic message
func DetectGeminiKeyMessage ¶ added in v1.13.10
Detects if a message is definitely a Gemini message
func DetectMessageProvider ¶ added in v1.5.1
Detects the conversation provider based on message format using a scanner This is more efficient for large files as it doesn't read the entire file into memory
func DetectMessageProviderByContent ¶ added in v1.14.14
* Detects the conversation provider based on message format. * Supports both JSONL (preferred) and legacy JSON array formats.
func DetectMessageProviderFromLine ¶ added in v1.14.14
* Helper function to detect provider from a single JSONL line or message object
func DetectModelProvider ¶ added in v1.6.0
DetectModelProvider detects the model provider based on endpoint and model name. It first checks the endpoint domain, then falls back to model name patterns. This dual detection handles Chinese models hosted on US platforms (AWS, CoreWeave, etc.)
func DetectOpenAIKeyMessage ¶ added in v1.13.10
func DetectOpenAIKeyMessage(msg *openai.ChatCompletionMessage) bool
Detects if a message is definitely an OpenAI message
func DisableAgentMemory ¶ added in v1.13.18
func DisableAgentSkills ¶ added in v1.13.18
func DisableCodeExecution ¶ added in v1.7.1
func DisableCodeExecution()
func DisableMCPServers ¶ added in v1.13.18
func DisableMarkdown ¶ added in v1.13.18
func DisableSubAgents ¶ added in v1.13.18
func DisableTokenUsage ¶ added in v1.13.18
func DisableWebSearch ¶ added in v1.14.1
func EnableAgentMemory ¶ added in v1.13.18
func EnableAgentSkills ¶ added in v1.13.18
func EnableCodeExecution ¶ added in v1.7.1
func EnableCodeExecution()
func EnableMCPServers ¶ added in v1.13.18
func EnableMarkdown ¶ added in v1.13.18
func EnableSubAgents ¶ added in v1.13.18
func EnableTokenUsage ¶ added in v1.13.18
func EnableWebSearch ¶ added in v1.14.1
func EndWithNewline ¶ added in v1.9.7
func EstimateAnthropicMessageTokens ¶ added in v1.13.5
func EstimateAnthropicMessageTokens(msg anthropic.MessageParam) int
EstimateAnthropicMessageTokens estimates tokens for an Anthropic message.
func EstimateAnthropicMessagesTokens ¶ added in v1.13.5
func EstimateAnthropicMessagesTokens(messages []anthropic.MessageParam) int
EstimateAnthropicMessagesTokens estimates total tokens for a slice of Anthropic messages.
func EstimateAnthropicToolTokens ¶ added in v1.13.5
func EstimateAnthropicToolTokens(tools []anthropic.ToolUnionParam) int
EstimateAnthropicToolTokens estimates tokens for a slice of Anthropic tools.
func EstimateGeminiMessageTokens ¶ added in v1.12.14
EstimateGeminiMessageTokens estimates tokens for a Gemini content message.
func EstimateGeminiMessagesTokens ¶ added in v1.12.14
EstimateGeminiMessagesTokens estimates total tokens for a slice of Gemini messages.
func EstimateGeminiToolTokens ¶ added in v1.12.14
EstimateGeminiToolTokens estimates tokens for a slice of Gemini tools
func EstimateJSONTokens ¶ added in v1.12.14
func EstimateJSONTokens(data interface{}) int
EstimateJSONTokens estimates tokens for arbitrary JSON data. Useful for estimating tool results or complex structured content.
func EstimateOpenAIMessageTokens ¶ added in v1.12.14
func EstimateOpenAIMessageTokens(msg openai.ChatCompletionMessage) int
EstimateOpenAIMessageTokens estimates tokens for an OpenAI chat message. This accounts for role tokens, content, and tool calls.
func EstimateOpenAIMessagesTokens ¶ added in v1.12.14
func EstimateOpenAIMessagesTokens(messages []openai.ChatCompletionMessage) int
EstimateOpenAIMessagesTokens estimates total tokens for a slice of OpenAI messages.
func EstimateOpenAIToolTokens ¶ added in v1.12.14
EstimateOpenAIToolTokens estimates tokens for a slice of OpenAI tools
func EstimateOpenChatMessageTokens ¶ added in v1.12.14
func EstimateOpenChatMessageTokens(msg *openchat.ChatCompletionMessage) int
EstimateOpenChatMessageTokens estimates tokens for an OpenChat (Volcengine) message.
func EstimateOpenChatMessagesTokens ¶ added in v1.12.14
func EstimateOpenChatMessagesTokens(messages []*openchat.ChatCompletionMessage) int
EstimateOpenChatMessagesTokens estimates total tokens for a slice of OpenChat messages.
func EstimateOpenChatToolTokens ¶ added in v1.12.14
EstimateOpenChatToolTokens estimates tokens for a slice of OpenChat tools
func EstimateTokens ¶ added in v1.12.14
EstimateTokens provides fast character-based estimation for text. This is approximately 90% accurate compared to tiktoken.
func ExtractTextFromURL ¶ added in v1.6.14
func ExtractTextFromURL(url string, config *ExtractorConfig) ([]string, error)
ExtractTextFromURL fetches a URL and extracts the main text content Automatically detects content type and routes to appropriate handler: - text/plain, text/markdown: returns content directly - application/pdf: extracts text using PDF reader - text/html: parses and extracts text with boilerplate removal
func ExtractThinkTags ¶ added in v1.12.11
ExtractThinkTags extracts thinking content from <think>...</think> tags. Some providers (like MiniMax, some Qwen endpoints) embed reasoning content in <think> tags within the regular content field instead of using a separate reasoning_content field.
Returns:
- thinking: the extracted thinking content (empty if no tags found)
- cleaned: the content with <think> tags removed
func FetchProcess ¶ added in v1.6.14
func FilterOpenToolArguments ¶ added in v1.14.2
func FindConvosByIndex ¶ added in v1.10.6
FindConvosByIndex finds a conversation by index If the index is out of range, it returns an error If the index is valid, it returns the conversation name
func FormatMinutesSeconds ¶ added in v1.10.1
func GenerateTempFileName ¶ added in v1.10.9
func GenerateTempFileName() string
func GetAllEmbeddingTools ¶ added in v1.9.2
func GetAllEmbeddingTools() []string
func GetAllMemoryTools ¶ added in v1.13.18
func GetAllMemoryTools() []string
func GetAllOpenTools ¶ added in v1.14.2
func GetAllOpenTools() []string
func GetAllSearchTools ¶ added in v1.9.12
func GetAllSearchTools() []string
func GetAllSkillTools ¶ added in v1.13.18
func GetAllSkillTools() []string
func GetAllSubagentTools ¶ added in v1.13.18
func GetAllSubagentTools() []string
func GetAnthropicMessageKey ¶ added in v1.13.5
func GetAnthropicMessageKey(msg anthropic.MessageParam) string
GetAnthropicMessageKey generates a cache key for an Anthropic message.
func GetConvoDir ¶ added in v1.6.10
func GetConvoDir() string
func GetCurrentTokenCount ¶ added in v1.12.14
func GetCurrentTokenCount(messages []openai.ChatCompletionMessage) int
GetCurrentTokenCount returns the current token count for OpenAI messages
func GetCurrentTokenCountGemini ¶ added in v1.12.14
GetCurrentTokenCountGemini returns the current token count for Gemini messages
func GetCurrentTokenCountOpenChat ¶ added in v1.12.14
func GetCurrentTokenCountOpenChat(messages []*model.ChatCompletionMessage) int
GetCurrentTokenCountOpenChat returns the current token count for OpenChat messages
func GetDefaultSearchEngineName ¶ added in v1.6.0
func GetDefaultSearchEngineName() string
func GetFileContent ¶ added in v1.10.0
func GetFilePath ¶ added in v1.5.1
func GetGeminiMessageKey ¶ added in v1.12.14
GetGeminiMessageKey generates a cache key for a Gemini message.
func GetMIMEType ¶ added in v1.4.0
func GetMIMETypeByContent ¶ added in v1.4.0
func GetNoneSearchEngineName ¶ added in v1.6.2
func GetNoneSearchEngineName() string
func GetOpenAIMessageKey ¶ added in v1.12.14
func GetOpenAIMessageKey(msg openai.ChatCompletionMessage) string
GetOpenAIMessageKey generates a cache key for an OpenAI message by JSON marshaling. This captures ALL fields (Content, ReasoningContent, ToolCalls, MultiContent, etc.) ensuring different messages never produce the same key.
func GetOpenChatMessageKey ¶ added in v1.12.14
func GetOpenChatMessageKey(msg *model.ChatCompletionMessage) string
GetOpenChatMessageKey generates a cache key for an OpenChat (Volcengine) message.
func GetSanitizeTitle ¶ added in v1.5.1
func GetStringValue ¶ added in v1.6.2
Helper function to safely extract string values
func GetUserConfigDir ¶ added in v1.5.1
func GetUserConfigDir() string
func HasContent ¶ added in v1.8.1
func InitLogger ¶ added in v1.2.0
func InitLogger()
func IsAgentMemoryEnabled ¶ added in v1.13.18
* Agent Memory
func IsAgentSkillsEnabled ¶ added in v1.13.18
* Agent Skills
func IsAudioMIMEType ¶ added in v1.7.1
func IsAvailableMCPTool ¶ added in v1.14.2
IsAvailableMCPTool checks if a tool is available in the MCP tools.
func IsAvailableOpenTool ¶ added in v1.14.2
IsAvailableTool checks if a tool is available for the current agent. It checks if the tool is available in the embedding tools, search tools, skill tools, memory tools, subagent tools, or MCP tools.
func IsCodeExecutionEnabled ¶ added in v1.7.1
func IsCodeExecutionEnabled() bool
func IsExcelMIMEType ¶ added in v1.4.0
func IsImageMIMEType ¶ added in v1.4.0
func IsMCPServersEnabled ¶ added in v1.13.18
* MCP Servers
func IsMarkdownEnabled ¶ added in v1.13.18
* Markdown
func IsModelGemini3 ¶ added in v1.13.5
IsModelGemini3 checks if the model name is a Gemini 3 model
func IsPDFMIMEType ¶ added in v1.4.0
func IsStdinPipe ¶ added in v1.4.0
func IsSubAgentsEnabled ¶ added in v1.13.18
* Sub Agents
func IsSwitchAgentError ¶ added in v1.13.10
func IsTextMIMEType ¶ added in v1.4.0
func IsTokenUsageEnabled ¶ added in v1.13.18
* Token Usage
func IsUnknownMIMEType ¶ added in v1.4.0
func IsUserCancelError ¶ added in v1.14.13
func IsVideoMIMEType ¶ added in v1.13.1
func IsWebSearchEnabled ¶ added in v1.14.1
* Web Search
func MakeUserSubDir ¶ added in v1.5.1
func RemoveMemoryTools ¶ added in v1.13.18
RemoveMemoryTools removes memory tools from the given tools slice.
func RemoveSearchTools ¶ added in v1.14.1
RemoveSearchTools removes search tools from the given tools slice.
func RemoveSkillTools ¶ added in v1.13.18
RemoveSkillTools removes skill tools from the given tools slice.
func RemoveSubagentTools ¶ added in v1.13.18
RemoveSubagentTools removes subagent tools from the given tools slice.
func RenderAnthropicConversationLog ¶ added in v1.13.6
RenderAnthropicConversationLog returns a string summary of Anthropic conversation (JSONL or JSON array format)
func RenderGeminiConversationLog ¶ added in v1.13.6
RenderGeminiConversationLog returns a string summary of Gemini conversation (JSONL or JSON array format)
func RenderOpenAIConversationLog ¶ added in v1.13.6
RenderOpenAIConversationLog returns a string summary of OpenAI conversation (JSONL or JSON array format)
func TruncateString ¶ added in v1.5.1
Helper function to truncate strings with ellipsis
Types ¶
type Agent ¶ added in v1.9.4
type Agent struct {
Model *ModelInfo
SystemPrompt string
UserPrompt string
Files []*FileData // Attachment files
NotifyChan chan<- StreamNotify // Sub Channel to send notifications
DataChan chan<- StreamData // Sub Channel to receive streamed text data
ProceedChan <-chan bool // Sub Channel to receive proceed signal
SearchEngine *SearchEngine // Search engine name
ToolsUse data.ToolsUse // Use tools
EnabledTools []string // List of enabled embedding tools
UseCodeTool bool // Use code tool
ThinkingLevel ThinkingLevel // Thinking level: off, low, medium, high
MCPClient *MCPClient // MCP client for MCP tools
MaxRecursions int // Maximum number of recursions for model calls
Markdown *Markdown // Markdown renderer
TokenUsage *TokenUsage // Token usage metainfo
Std *ui.StdRenderer // Standard renderer
OutputFile *ui.FileRenderer // File renderer
Status StatusStack // Stack to manage streaming status
Convo ConversationManager // Conversation manager
LastWrittenData string // Last written data
// Sub-agent orchestration
AgentName string // Current agent name for metadata tracking
Verbose bool // Whether verbose output mode is enabled
}
func (*Agent) CompleteReasoning ¶ added in v1.9.7
func (ag *Agent) CompleteReasoning()
func (*Agent) GenerateAnthropicStream ¶ added in v1.13.5
GenerateAnthropicStream generates a streaming response using Anthropic API
func (*Agent) GenerateGeminiStream ¶ added in v1.13.14
func (*Agent) GenerateOpenAIStream ¶ added in v1.10.4
GenerateOpenAIStream generates a streaming response using OpenAI API
func (*Agent) GenerateOpenChatStream ¶ added in v1.9.4
In current openchat api, we can't use cached tokens The context api and response api are not available for current golang lib
func (*Agent) SortAnthropicMessagesByOrder ¶ added in v1.13.5
func (*Agent) SortGeminiMessagesByOrder ¶ added in v1.14.13
func (*Agent) SortOpenAIMessagesByOrder ¶ added in v1.12.20
* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt
func (*Agent) SortOpenChatMessagesByOrder ¶ added in v1.12.20
* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt
func (*Agent) StartIndicator ¶ added in v1.9.7
func (*Agent) StartReasoning ¶ added in v1.9.7
func (ag *Agent) StartReasoning()
StartReasoning notifies the user and logs to file that the agent has started thinking. It writes a status message to both Std and OutputFile if they are available.
func (*Agent) StopIndicator ¶ added in v1.9.7
func (ag *Agent) StopIndicator()
func (*Agent) WriteDiffConfirm ¶ added in v1.11.10
func (*Agent) WriteFunctionCall ¶ added in v1.9.7
func (*Agent) WriteFunctionCallOver ¶ added in v1.14.8
func (ag *Agent) WriteFunctionCallOver()
func (*Agent) WriteMarkdown ¶ added in v1.9.7
func (ag *Agent) WriteMarkdown()
func (*Agent) WriteReasoning ¶ added in v1.9.7
WriteReasoning writes the provided reasoning text to both the standard output and an output file, applying specific formatting to each if they are available.
func (*Agent) WriteText ¶ added in v1.9.7
WriteText writes the given text to the Agent's Std, Markdown, and OutputFile writers if they are set.
func (*Agent) WriteUsage ¶ added in v1.9.7
func (ag *Agent) WriteUsage()
type AgentOptions ¶ added in v1.9.7
type AgentOptions struct {
Prompt string
SysPrompt string
Files []*FileData
ModelInfo *data.Model
MaxRecursions int
ThinkingLevel string
EnabledTools []string // List of enabled embedding tools
Capabilities []string // List of enabled capabilities
YoloMode bool // Whether to automatically approve tools
OutputFile string
QuietMode bool
ConvoName string
MCPConfig map[string]*data.MCPServer
// Sub-agent orchestration fields
AgentName string // Name of the agent running this task
}
type AgentRunner ¶ added in v1.13.14
type AgentRunner func(*AgentOptions) error
AgentRunner defines the function signature for executing an agent
type Anthropic ¶ added in v1.13.5
type Anthropic struct {
// contains filtered or unexported fields
}
type AnthropicConversation ¶ added in v1.13.5
type AnthropicConversation struct {
BaseConversation
Messages []anthropic.MessageParam
}
AnthropicConversation represents a conversation using Anthropic format
func (*AnthropicConversation) Clear ¶ added in v1.13.5
func (c *AnthropicConversation) Clear() error
Clear removes all messages from the conversation
func (*AnthropicConversation) GetMessages ¶ added in v1.13.5
func (c *AnthropicConversation) GetMessages() interface{}
func (*AnthropicConversation) Load ¶ added in v1.13.5
func (c *AnthropicConversation) Load() error
Load retrieves the conversation from disk (JSONL format).
func (*AnthropicConversation) MarshalMessages ¶ added in v1.14.14
func (c *AnthropicConversation) MarshalMessages(messages []anthropic.MessageParam) []byte
func (*AnthropicConversation) Push ¶ added in v1.13.5
func (c *AnthropicConversation) Push(messages ...interface{}) error
PushMessages adds multiple messages to the conversation (high performance) Uses append-mode for incremental saves using JSONL format (one message per line)
func (*AnthropicConversation) Save ¶ added in v1.13.5
func (c *AnthropicConversation) Save() error
Save persists the conversation to disk using JSONL format (one message per line).
func (*AnthropicConversation) SetMessages ¶ added in v1.13.5
func (c *AnthropicConversation) SetMessages(messages interface{})
type AtRefProcessor ¶ added in v1.12.9
type AtRefProcessor struct {
// contains filtered or unexported fields
}
AtRefProcessor handles @ reference processing
func NewAtRefProcessor ¶ added in v1.12.9
func NewAtRefProcessor() *AtRefProcessor
NewAtRefProcessor creates a new @ reference processor
func (*AtRefProcessor) AddExcludePattern ¶ added in v1.12.9
func (p *AtRefProcessor) AddExcludePattern(pattern string)
AddExcludePattern adds a pattern to exclude from directory listings
func (*AtRefProcessor) ParseAtReferences ¶ added in v1.12.9
func (p *AtRefProcessor) ParseAtReferences(text string) []AtReference
ParseAtReferences finds all @ references in the given text
func (*AtRefProcessor) ProcessReferences ¶ added in v1.12.9
func (p *AtRefProcessor) ProcessReferences(text string, references []AtReference) (string, error)
ProcessReferences processes all @ references and returns augmented text
func (*AtRefProcessor) ProcessText ¶ added in v1.12.9
func (p *AtRefProcessor) ProcessText(text string) (string, error)
ProcessText processes text containing @ references and returns augmented text
func (*AtRefProcessor) SetMaxDirItems ¶ added in v1.12.9
func (p *AtRefProcessor) SetMaxDirItems(count int)
SetMaxDirItems sets the maximum number of directory items to list
func (*AtRefProcessor) SetMaxFileSize ¶ added in v1.12.9
func (p *AtRefProcessor) SetMaxFileSize(size int64)
SetMaxFileSize sets the maximum file size to include
type AtReference ¶ added in v1.12.9
type AtReference struct {
Original string // Original @ reference text (e.g., "@main.go")
Path string // Resolved file/directory path
}
AtReference represents a single @ reference found in text
type BaseConversation ¶ added in v1.6.0
BaseConversation holds common fields and methods for all conversation types
func (*BaseConversation) Clear ¶ added in v1.6.0
func (c *BaseConversation) Clear() error
func (*BaseConversation) GetMessages ¶ added in v1.10.6
func (c *BaseConversation) GetMessages() interface{}
func (*BaseConversation) GetPath ¶ added in v1.6.2
func (c *BaseConversation) GetPath() string
func (*BaseConversation) Load ¶ added in v1.10.6
func (c *BaseConversation) Load() error
func (*BaseConversation) Open ¶ added in v1.10.6
func (c *BaseConversation) Open(title string) error
Open initializes an OpenChatConversation with the provided title, resolving an index to the actual conversation name if necessary. It resets the messages, sanitizes the conversation name for the path, and sets the internal path accordingly. Returns an error if the title cannot be resolved.
func (*BaseConversation) Push ¶ added in v1.10.6
func (c *BaseConversation) Push(messages ...interface{})
func (*BaseConversation) Save ¶ added in v1.10.6
func (c *BaseConversation) Save() error
func (*BaseConversation) SetMessages ¶ added in v1.10.6
func (c *BaseConversation) SetMessages(messages interface{})
func (*BaseConversation) SetPath ¶ added in v1.6.0
func (c *BaseConversation) SetPath(title string)
SetPath sets the file path for saving the conversation
type ContextManager ¶ added in v1.12.14
type ContextManager struct {
MaxInputTokens int // Maximum input tokens allowed
MaxOutputTokens int // Maximum output tokens allowed (new field for Anthropic)
Strategy TruncationStrategy // Strategy for handling overflow
BufferPercent float64 // Safety buffer (0.0-1.0)
}
ContextManager handles context window limits for LLM conversations
func NewContextManager ¶ added in v1.12.14
func NewContextManager(limits ModelLimits, strategy TruncationStrategy) *ContextManager
NewContextManager creates a context manager with the given model limits
func NewContextManagerForModel ¶ added in v1.12.14
func NewContextManagerForModel(modelName string, strategy TruncationStrategy) *ContextManager
NewContextManagerForModel creates a context manager by looking up the model name
func (*ContextManager) PrepareAnthropicMessages ¶ added in v1.13.5
func (cm *ContextManager) PrepareAnthropicMessages(messages []anthropic.MessageParam, systemPrompt string, tools []anthropic.ToolUnionParam) ([]anthropic.MessageParam, bool)
PrepareAnthropicMessages processes messages to fit within context window limits.
func (*ContextManager) PrepareGeminiMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareGeminiMessages(messages []*genai.Content, systemPrompt string, tools []*genai.Tool) ([]*genai.Content, bool)
PrepareGeminiMessages processes messages to fit within context window limits.
func (*ContextManager) PrepareOpenAIMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareOpenAIMessages(messages []openai.ChatCompletionMessage, tools []openai.Tool) ([]openai.ChatCompletionMessage, bool)
PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred. PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred.
func (*ContextManager) PrepareOpenChatMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareOpenChatMessages(messages []*model.ChatCompletionMessage, tools []*model.Tool) ([]*model.ChatCompletionMessage, bool)
PrepareOpenChatMessages processes messages to fit within context window limits for OpenChat format.
type ConversationManager ¶ added in v1.6.0
type ConversationManager interface {
SetPath(title string)
GetPath() string
Load() error
Save() error
Open(title string) error
Clear() error
Push(messages ...interface{}) error
GetMessages() interface{}
SetMessages(messages interface{})
}
ConversationManager is an interface for handling conversation history
func ConstructConversationManager ¶ added in v1.10.6
func ConstructConversationManager(convoName string, provider string) (ConversationManager, error)
type ConvoMeta ¶ added in v1.6.10
func ListSortedConvos ¶ added in v1.6.10
ListSortedConvos returns a slice of convoMeta sorted by modTime descending ListSortedConvos(dir, false, false) // Fast - no file reads ListSortedConvos(dir, true, false) // Fast - only metadata ListSortedConvos(dir, false, true) // Slow - reads all files for provider
type ExtractorConfig ¶ added in v1.6.14
type ExtractorConfig struct {
UserAgent string
HeaderAccept string
Timeout time.Duration
MinTextLength int
BoilerplateIDs []string
BoilerplateClasses []string
}
Configuration options for the text extractor
type FileData ¶ added in v1.4.0
type FileData struct {
// contains filtered or unexported fields
}
func NewFileData ¶ added in v1.4.0
type GeminiAgent ¶ added in v1.13.14
type GeminiAgent struct {
// With *Agent embedded, GeminiAgent automatically has access to all of Agent's fields and methods.
*Agent // Embedded pointer to Agent
// contains filtered or unexported fields
}
func (*GeminiAgent) GeminiActivateSkillToolCall ¶ added in v1.13.18
func (ga *GeminiAgent) GeminiActivateSkillToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiCloseDiffConfirm ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiCloseDiffConfirm()
Diff close func
func (*GeminiAgent) GeminiCopyToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiCopyToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiCreateDirectoryToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiCreateDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiDeleteDirectoryToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiDeleteDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiDeleteFileToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiDeleteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiEditFileToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiEditFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiGetStateToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiGetStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiListAgentToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiListAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiListDirectoryToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiListDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiListMemoryToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiListMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiListStateToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiListStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiMCPToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiMCPToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiMoveToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiMoveToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiReadFileToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiReadFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiReadMultipleFilesToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiReadMultipleFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiSaveMemoryToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiSaveMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiSearchFilesToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiSearchFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiSearchTextInFileToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiSearchTextInFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiSetStateToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiSetStateToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiShellToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiShellToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiShowDiffConfirm ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiShowDiffConfirm(diff string)
Diff confirm func
func (*GeminiAgent) GeminiSpawnSubAgentsToolCall ¶ added in v1.14.3
func (ga *GeminiAgent) GeminiSpawnSubAgentsToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiSwitchAgentToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiSwitchAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiWebFetchToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiWebFetchToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*GeminiAgent) GeminiWriteFileToolCall ¶ added in v1.13.14
func (ga *GeminiAgent) GeminiWriteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
type GeminiConversation ¶ added in v1.6.0
type GeminiConversation struct {
BaseConversation
Messages []*genai.Content
}
- Google Gemini Conversation
GeminiConversation manages conversations for Google's Gemini model
func (*GeminiConversation) Clear ¶ added in v1.13.14
func (g *GeminiConversation) Clear() error
Clear removes all messages from the conversation
func (*GeminiConversation) GetMessages ¶ added in v1.13.14
func (g *GeminiConversation) GetMessages() interface{}
func (*GeminiConversation) Load ¶ added in v1.6.0
func (g *GeminiConversation) Load() error
Load retrieves the Gemini conversation from disk (JSONL format).
func (*GeminiConversation) MarshalMessages ¶ added in v1.14.14
func (g *GeminiConversation) MarshalMessages(messages []*genai.Content) []byte
func (*GeminiConversation) Push ¶ added in v1.13.14
func (g *GeminiConversation) Push(messages ...interface{}) error
PushMessages adds multiple content items to the history (high performance) Uses append-mode for incremental saves using JSONL format (one message per line)
func (*GeminiConversation) Save ¶ added in v1.6.0
func (g *GeminiConversation) Save() error
Save persists the Gemini conversation to disk using JSONL format (one message per line).
func (*GeminiConversation) SetMessages ¶ added in v1.13.14
func (g *GeminiConversation) SetMessages(messages interface{})
type MCPClient ¶ added in v1.11.4
type MCPClient struct {
// contains filtered or unexported fields
}
func GetMCPClient ¶ added in v1.11.4
func GetMCPClient() *MCPClient
func (*MCPClient) AddHttpServer ¶ added in v1.11.4
func (*MCPClient) AddSseServer ¶ added in v1.11.4
func (*MCPClient) AddStdServer ¶ added in v1.11.4
func (*MCPClient) FindTool ¶ added in v1.11.4
func (mc *MCPClient) FindTool(toolName string) *MCPSession
func (*MCPClient) GetAllServers ¶ added in v1.11.4
Returns a map grouping tools by MCP server session name, with each session containing a slice of its available tools.
func (*MCPClient) GetPrompts ¶ added in v1.11.8
func (mc *MCPClient) GetPrompts(session *MCPSession) (*[]MCPPrompt, error)
func (*MCPClient) GetResources ¶ added in v1.11.8
func (mc *MCPClient) GetResources(session *MCPSession) (*[]MCPResource, error)
func (*MCPClient) GetTools ¶ added in v1.11.4
func (mc *MCPClient) GetTools(session *MCPSession) (*[]MCPTool, error)
type MCPLoadOption ¶ added in v1.11.8
type MCPResource ¶ added in v1.11.8
type MCPServer ¶ added in v1.11.4
type MCPServer struct {
Name string
Allowed bool
Tools *[]MCPTool
Resources *[]MCPResource
Prompts *[]MCPPrompt
}
type MCPSession ¶ added in v1.11.4
type MCPSession struct {
// contains filtered or unexported fields
}
type MCPToolResponse ¶ added in v1.11.6
type MCPToolResponse struct {
Types []MCPToolResponseType
Contents []string
}
type MCPToolResponseType ¶ added in v1.11.6
type MCPToolResponseType string
const ( MCPResponseText MCPToolResponseType = "text" MCPResponseImage MCPToolResponseType = "image" MCPResponseAudio MCPToolResponseType = "audio" )
type Markdown ¶ added in v1.9.7
type Markdown struct {
// contains filtered or unexported fields
}
func NewMarkdown ¶ added in v1.9.7
func NewMarkdown() *Markdown
NewMarkdown creates a new instance of Markdown
type ModelLimits ¶ added in v1.12.14
type ModelLimits struct {
ContextWindow int // Total context window in tokens
MaxOutputTokens int // Maximum output tokens allowed
}
ModelLimits contains context window configuration for a model
func GetModelLimits ¶ added in v1.12.14
func GetModelLimits(modelName string) ModelLimits
GetModelLimits retrieves the limits for a given model name. It performs exact match first, then pattern matching, then returns defaults.
func (ModelLimits) MaxInputTokens ¶ added in v1.12.14
func (ml ModelLimits) MaxInputTokens(bufferPercent float64) int
MaxInputTokens calculates the maximum input tokens with a safety buffer. The buffer ensures there's always room for the model's response.
type OpenAI ¶ added in v1.10.4
type OpenAI struct {
// contains filtered or unexported fields
}
OpenAI manages the state of an ongoing conversation with an AI assistant
type OpenAIConversation ¶ added in v1.10.4
type OpenAIConversation struct {
BaseConversation
Messages []openai.ChatCompletionMessage
}
OpenAIConversation represents a conversation using OpenAI format
func (*OpenAIConversation) Clear ¶ added in v1.10.4
func (c *OpenAIConversation) Clear() error
Clear removes all messages from the conversation
func (*OpenAIConversation) GetMessages ¶ added in v1.10.6
func (c *OpenAIConversation) GetMessages() interface{}
func (*OpenAIConversation) Load ¶ added in v1.10.4
func (c *OpenAIConversation) Load() error
Load retrieves the conversation from disk (JSONL format).
func (*OpenAIConversation) MarshalMessages ¶ added in v1.14.14
func (c *OpenAIConversation) MarshalMessages(messages []openai.ChatCompletionMessage) []byte
func (*OpenAIConversation) Push ¶ added in v1.10.6
func (c *OpenAIConversation) Push(messages ...interface{}) error
PushMessages adds multiple messages to the conversation (high performance) Uses append-mode for incremental saves using JSONL format (one message per line)
func (*OpenAIConversation) Save ¶ added in v1.10.4
func (c *OpenAIConversation) Save() error
Save persists the conversation to disk using JSONL format (one message per line).
func (*OpenAIConversation) SetMessages ¶ added in v1.10.6
func (c *OpenAIConversation) SetMessages(messages interface{})
type OpenChat ¶ added in v1.5.1
type OpenChat struct {
// contains filtered or unexported fields
}
Conversation manages the state of an ongoing conversation with an AI assistant
type OpenChatConversation ¶ added in v1.6.0
type OpenChatConversation struct {
BaseConversation
Messages []*model.ChatCompletionMessage
}
OpenChatConversation manages conversations for Volcengine model
func (*OpenChatConversation) Clear ¶ added in v1.10.6
func (c *OpenChatConversation) Clear() error
Clear removes all messages from the conversation
func (*OpenChatConversation) GetMessages ¶ added in v1.10.6
func (c *OpenChatConversation) GetMessages() interface{}
func (*OpenChatConversation) Load ¶ added in v1.6.0
func (c *OpenChatConversation) Load() error
Load retrieves the conversation from disk (JSONL format).
func (*OpenChatConversation) MarshalMessages ¶ added in v1.14.14
func (c *OpenChatConversation) MarshalMessages(messages []*model.ChatCompletionMessage) []byte
func (*OpenChatConversation) Push ¶ added in v1.10.6
func (c *OpenChatConversation) Push(messages ...interface{}) error
PushMessages adds multiple messages to the conversation (high performance) Uses append-mode for incremental saves using JSONL format (one message per line)
func (*OpenChatConversation) Save ¶ added in v1.6.0
func (c *OpenChatConversation) Save() error
Save persists the conversation to disk using JSONL format (one message per line).
func (*OpenChatConversation) SetMessages ¶ added in v1.10.6
func (c *OpenChatConversation) SetMessages(messages interface{})
type OpenFunctionDefinition ¶ added in v1.10.4
type OpenFunctionDefinition struct {
Name string
Description string
Parameters map[string]interface{}
}
OpenFunctionDefinition is a generic function definition that is not tied to any specific model.
type OpenProcessor ¶ added in v1.10.4
type OpenProcessor struct {
// contains filtered or unexported fields
}
OpenProcessor is the main processor for OpenAI-like models For tools implementation - It manages the context, notifications, data streaming, and tool usage - It handles queries and references, and maintains the status stack
func (*OpenProcessor) AnthropicActivateSkillToolCall ¶ added in v1.13.18
func (op *OpenProcessor) AnthropicActivateSkillToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicCopyToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicCopyToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicCreateDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicCreateDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicDeleteDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicDeleteDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicDeleteFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicDeleteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicEditFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicEditFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicGetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) AnthropicGetStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListAgentToolCall ¶ added in v1.13.14
func (op *OpenProcessor) AnthropicListAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicListDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListMemoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicListMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) AnthropicListStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicMCPToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicMCPToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicMoveToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicMoveToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicReadFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicReadFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicReadMultipleFilesToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicReadMultipleFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSaveMemoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSaveMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSearchFilesToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSearchFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSearchTextInFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSearchTextInFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) AnthropicSetStateToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicShellToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicShellToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
Anthropic tool implementations (wrapper functions)
func (*OpenProcessor) AnthropicSpawnSubAgentsToolCall ¶ added in v1.14.3
func (op *OpenProcessor) AnthropicSpawnSubAgentsToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) AnthropicSwitchAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWebFetchToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWebFetchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWebSearchToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWebSearchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWriteFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWriteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) OpenAIActivateSkillToolCall ¶ added in v1.13.18
func (op *OpenProcessor) OpenAIActivateSkillToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAICopyToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAICopyToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAICreateDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAICreateDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIDeleteDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIDeleteDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIDeleteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIDeleteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIEditFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIEditFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIGetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenAIGetStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListAgentToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenAIListAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIListDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenAIListMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenAIListStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIMCPToolCall ¶ added in v1.11.4
func (op *OpenProcessor) OpenAIMCPToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIMoveToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIMoveToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIReadFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIReadFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIReadMultipleFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIReadMultipleFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISaveMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenAISaveMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISearchFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAISearchFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISearchTextInFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAISearchTextInFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenAISetStateToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIShellToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIShellToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
OpenAI tool implementations (wrapper functions)
func (*OpenProcessor) OpenAISpawnSubAgentsToolCall ¶ added in v1.14.3
func (op *OpenProcessor) OpenAISpawnSubAgentsToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) OpenAISwitchAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWebFetchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWebFetchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWebSearchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWebSearchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWriteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWriteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatActivateSkillToolCall ¶ added in v1.13.18
func (op *OpenProcessor) OpenChatActivateSkillToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatCopyToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatCopyToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatCreateDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatCreateDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatDeleteDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatDeleteDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatDeleteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatDeleteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatEditFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatEditFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatGetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenChatGetStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListAgentToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenChatListAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatListDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenChatListMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenChatListStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatMCPToolCall ¶ added in v1.11.4
func (op *OpenProcessor) OpenChatMCPToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatMoveToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatMoveToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatReadFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatReadFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
OpenChat tool implementations (wrapper functions)
func (*OpenProcessor) OpenChatReadMultipleFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatReadMultipleFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSaveMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenChatSaveMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSearchFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatSearchFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSearchTextInFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatSearchTextInFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSetStateToolCall ¶ added in v1.13.14
func (op *OpenProcessor) OpenChatSetStateToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatShellToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatShellToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSpawnSubAgentsToolCall ¶ added in v1.14.3
func (op *OpenProcessor) OpenChatSpawnSubAgentsToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) OpenChatSwitchAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWebFetchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWebFetchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWebSearchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWebSearchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWriteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWriteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
type OpenTool ¶ added in v1.10.4
type OpenTool struct {
Type ToolType
Function *OpenFunctionDefinition
}
OpenTool is a generic tool definition that is not tied to any specific model.
func GetOpenToolsFiltered ¶ added in v1.14.2
GetOpenToolsFiltered returns tools filtered by the allowed list. If allowedTools is nil or empty, returns all tools. Unknown tool names are gracefully ignored.
func MCPToolsToOpenTool ¶ added in v1.11.4
MCPToolsToOpenTool converts an MCPTools struct to an OpenTool with proper JSON schema
func (*OpenTool) ToAnthropicTool ¶ added in v1.13.5
func (ot *OpenTool) ToAnthropicTool() anthropic.ToolUnionParam
ToAnthropicTool converts a GenericTool to an anthropic.ToolUnionParam
func (*OpenTool) ToGeminiFunctions ¶ added in v1.10.4
func (ot *OpenTool) ToGeminiFunctions() *genai.FunctionDeclaration
ToGeminiFunctions converts a GenericTool to a genai.Tool
func (*OpenTool) ToOpenAITool ¶ added in v1.10.4
ToOpenAITool converts a GenericTool to an openai.Tool
func (*OpenTool) ToOpenChatTool ¶ added in v1.10.4
ToOpenChatTool converts a GenericTool to a model.Tool
type SearchEngine ¶ added in v1.9.4
type SearchEngine struct {
UseSearch bool
Name string
ApiKey string
CxKey string
MaxReferences int
// DeepDive indicates how many links to fetch content from
// If 0, it defaults to a small number (e.g. 3) for efficiency.
DeepDive int
}
func (*SearchEngine) BingSearch ¶ added in v1.9.4
func (s *SearchEngine) BingSearch(query string) (map[string]any, error)
--- Simulation of Bing Search ---
func (*SearchEngine) GoogleSearch ¶ added in v1.9.4
func (s *SearchEngine) GoogleSearch(query string) (map[string]any, error)
Alternative approach with explicit conversions for protocol buffer compatibility
func (*SearchEngine) NoneSearch ¶ added in v1.9.4
func (s *SearchEngine) NoneSearch(query string) (map[string]any, error)
func (*SearchEngine) RetrieveQueries ¶ added in v1.9.4
func (s *SearchEngine) RetrieveQueries(queries []string) string
func (*SearchEngine) RetrieveReferences ¶ added in v1.9.4
func (s *SearchEngine) RetrieveReferences(references []map[string]any) string
func (*SearchEngine) SerpAPISearch ¶ added in v1.9.4
func (*SearchEngine) TavilySearch ¶ added in v1.9.4
func (s *SearchEngine) TavilySearch(query string) (map[string]any, error)
type SkillManager ¶ added in v1.13.18
type SkillManager struct {
// contains filtered or unexported fields
}
SkillManager handles skill operations
func GetSkillManager ¶ added in v1.13.18
func GetSkillManager() *SkillManager
GetSkillManager returns the singleton instance of SkillManager
func NewSkillManager ¶ added in v1.13.18
func NewSkillManager() *SkillManager
NewSkillManager creates a new SkillManager
func (*SkillManager) ActivateSkill ¶ added in v1.13.18
ActivateSkill activates a skill by name and returns its instructions, description, and available resources.
func (*SkillManager) CreateTestSkill ¶ added in v1.13.18
func (sm *SkillManager) CreateTestSkill(rootPath string) (string, error)
CreateTestSkill creates a temporary test skill for verification
func (*SkillManager) GenerateFileTree ¶ added in v1.13.18
func (sm *SkillManager) GenerateFileTree(dir string) (string, error)
GenerateFileTree generates a professional tree representation of the skill directory utilizing Unicode box-drawing characters for enhanced structural clarity.
func (*SkillManager) GetAvailableSkills ¶ added in v1.13.18
func (sm *SkillManager) GetAvailableSkills() string
GetAvailableSkills returns the XML string for system prompt injection Skills that are disabled in settings.json are excluded from the output.
func (*SkillManager) GetAvailableSkillsMetadata ¶ added in v1.14.21
func (sm *SkillManager) GetAvailableSkillsMetadata() []data.SkillMetadata
GetAvailableSkillsMetadata returns the list of available skills
func (*SkillManager) LoadMetadata ¶ added in v1.13.18
func (sm *SkillManager) LoadMetadata() error
LoadMetadata scans and loads skill metadata
type StatusStack ¶ added in v1.9.2
type StatusStack struct {
// contains filtered or unexported fields
}
StateStack is a stack data structure for managing states.
func (*StatusStack) ChangeTo ¶ added in v1.9.2
func (s *StatusStack) ChangeTo( proc chan<- StreamNotify, notify StreamNotify, proceed <-chan bool)
func (*StatusStack) Clear ¶ added in v1.9.2
func (s *StatusStack) Clear()
func (*StatusStack) Debug ¶ added in v1.9.2
func (s *StatusStack) Debug()
func (*StatusStack) IsEmpty ¶ added in v1.9.2
func (s *StatusStack) IsEmpty() bool
func (*StatusStack) IsTop ¶ added in v1.9.2
func (s *StatusStack) IsTop(status StreamStatus) bool
func (*StatusStack) Peek ¶ added in v1.9.2
func (s *StatusStack) Peek() StreamStatus
Peek returns the state from the top of the stack without removing it. If the stack is empty, it returns StateNormal.
func (*StatusStack) Pop ¶ added in v1.9.2
func (s *StatusStack) Pop() StreamStatus
Pop removes and returns the state from the top of the stack. If the stack is empty, it returns StateNormal.
func (*StatusStack) Push ¶ added in v1.9.2
func (s *StatusStack) Push(status StreamStatus)
Push adds a state to the top of the stack.
func (*StatusStack) Size ¶ added in v1.9.2
func (s *StatusStack) Size() int
type StreamData ¶ added in v1.9.2
type StreamData struct {
Text string
Type StreamDataType
}
type StreamDataType ¶ added in v1.9.2
type StreamDataType int
const ( DataTypeUnknown StreamDataType = iota DataTypeNormal // 1 DataTypeReasoning // 2 DataTypeFinished // 3 )
type StreamNotify ¶
type StreamNotify struct {
Status StreamStatus
Data string // For text content or error messages
Extra interface{} // For additional metadata (e.g., switch instruction)
}
type StreamStatus ¶
type StreamStatus int
const ( StatusUnknown StreamStatus = iota StatusProcessing StatusStarted StatusFinished StatusWarn StatusError StatusReasoning StatusReasoningOver StatusFunctionCalling StatusFunctionCallingOver StatusDiffConfirm StatusDiffConfirmOver StatusSwitchAgent StatusUserCancel )
type SubAgentExecutor ¶ added in v1.13.14
type SubAgentExecutor struct {
// contains filtered or unexported fields
}
SubAgentExecutor manages sub-agent lifecycle and execution
func NewSubAgentExecutor ¶ added in v1.13.14
func NewSubAgentExecutor(state *data.SharedState, maxWorkers int) *SubAgentExecutor
NewSubAgentExecutor creates a new SubAgentExecutor
func (*SubAgentExecutor) Cancel ¶ added in v1.13.14
func (e *SubAgentExecutor) Cancel(taskID string) error
Cancel cancels a running task
func (*SubAgentExecutor) CancelAll ¶ added in v1.13.14
func (e *SubAgentExecutor) CancelAll()
CancelAll cancels all running tasks
func (*SubAgentExecutor) Clear ¶ added in v1.13.14
func (e *SubAgentExecutor) Clear()
Clear removes all completed/failed/cancelled tasks
func (*SubAgentExecutor) ClearAll ¶ added in v1.13.14
func (e *SubAgentExecutor) ClearAll()
ClearAll removes all tasks
func (*SubAgentExecutor) Execute ¶ added in v1.13.14
func (e *SubAgentExecutor) Execute(timeout time.Duration) []SubAgentResult
Execute runs all pending tasks and waits for completion Uses DAG-based dependency resolution: tasks with input_keys wait for those dependencies
func (*SubAgentExecutor) FormatProgress ¶ added in v1.13.14
func (e *SubAgentExecutor) FormatProgress() string
FormatProgress returns a formatted string of all task progress
func (*SubAgentExecutor) FormatSummary ¶ added in v1.13.14
func (e *SubAgentExecutor) FormatSummary(results []SubAgentResult) string
FormatSummary returns a brief summary of task execution
func (*SubAgentExecutor) GetAllProgress ¶ added in v1.13.14
func (e *SubAgentExecutor) GetAllProgress() []SubAgentResult
GetAllProgress returns progress for all tasks
func (*SubAgentExecutor) GetProgress ¶ added in v1.13.14
func (e *SubAgentExecutor) GetProgress(taskID string) *SubAgentResult
GetProgress returns the current result for a task
func (*SubAgentExecutor) GetProgressStatistics ¶ added in v1.13.14
func (e *SubAgentExecutor) GetProgressStatistics() (int, int)
GetProgressStatistics returns the number of done and total tasks
func (*SubAgentExecutor) Submit ¶ added in v1.13.14
func (e *SubAgentExecutor) Submit(task *SubAgentTask) string
Submit submits a single task for execution and returns the task ID
func (*SubAgentExecutor) SubmitBatch ¶ added in v1.13.14
func (e *SubAgentExecutor) SubmitBatch(tasks []*SubAgentTask) []string
SubmitBatch submits multiple tasks and returns their IDs
type SubAgentResult ¶ added in v1.13.14
type SubAgentResult struct {
TaskID string // Task ID
AgentName string // Agent that executed
Status SubAgentStatus // Execution status
Progress string // Human-readable progress description
OutputFile string // Path to detailed output
TaskKey string // Key where result was stored in SharedState
Error error // Error if failed
Duration time.Duration // Execution duration
StartTime time.Time // When execution started
EndTime time.Time // When execution ended
}
SubAgentResult represents the outcome of a sub-agent execution
type SubAgentStatus ¶ added in v1.13.14
type SubAgentStatus int
SubAgentStatus represents the execution status of a sub-agent task
const ( StatusPending SubAgentStatus = iota StatusRunning StatusCompleted StatusFailed StatusCancelled )
func (SubAgentStatus) String ¶ added in v1.13.14
func (s SubAgentStatus) String() string
type SubAgentTask ¶ added in v1.13.14
type SubAgentTask struct {
ID string // Unique task ID
AgentName string // Agent profile to use
Instruction string // Task instruction/prompt
TaskKey string // Key to store result in SharedState
InputKeys []string // Keys to read as input context (virtual files)
Wait bool // If true, wait for ALL prior tasks before starting
}
SubAgentTask represents a single sub-agent invocation request
type SwitchAgentError ¶ added in v1.13.10
func AsSwitchAgentError ¶ added in v1.14.21
func AsSwitchAgentError(err error) (SwitchAgentError, bool)
AsSwitchAgentError safely extracts a SwitchAgentError from an error, handling both value and pointer variants.
func (SwitchAgentError) Error ¶ added in v1.13.10
func (e SwitchAgentError) Error() string
type TavilyError ¶ added in v1.2.0
type TavilyError struct {
Detail TavilyErrorDetail `json:"detail"`
}
type TavilyErrorDetail ¶ added in v1.2.0
type TavilyErrorDetail struct {
Error string `json:"error"`
}
type TavilyResponse ¶ added in v1.2.0
type TavilyResponse struct {
Query string `json:"query"`
Answer string `json:"answer"`
Images []string `json:"images"`
Results []TavilyResult `json:"results"`
ResponseTime float32 `json:"response_time"` // e.g., "1.67"
}
Define a struct for the overall Tavily API response.
type TavilyResult ¶ added in v1.2.0
type TavilyResult struct {
Title string `json:"title"`
URL string `json:"url"`
Content string `json:"content"`
Score float64 `json:"score"`
RawContent *string `json:"raw_content"`
}
Define a struct for each result in the Tavily API response.
type ThinkingLevel ¶ added in v1.13.7
type ThinkingLevel string
ThinkingLevel represents the unified thinking/reasoning level across providers. Maps to provider-specific configurations: - OpenAI: reasoning_effort ("low"/"medium"/"high") - OpenChat: model.Thinking + ReasoningEffort - Gemini 2.5: ThinkingBudget (token count, -1 for dynamic) - Gemini 3: ThinkingLevel ("LOW"/"MEDIUM"/"HIGH") - Anthropic: thinking.budget_tokens
const ( ThinkingLevelOff ThinkingLevel = "off" ThinkingLevelLow ThinkingLevel = "low" ThinkingLevelMedium ThinkingLevel = "medium" ThinkingLevelHigh ThinkingLevel = "high" )
func AllThinkingLevels ¶ added in v1.13.7
func AllThinkingLevels() []ThinkingLevel
AllThinkingLevels returns all valid thinking levels in order
func ParseThinkingLevel ¶ added in v1.13.7
func ParseThinkingLevel(s string) ThinkingLevel
ParseThinkingLevel normalizes user input to a valid ThinkingLevel. Supports backward compatibility with boolean values.
func (ThinkingLevel) Display ¶ added in v1.13.7
func (t ThinkingLevel) Display() string
Display returns a colorized display string for CLI output
func (ThinkingLevel) IsEnabled ¶ added in v1.13.7
func (t ThinkingLevel) IsEnabled() bool
IsEnabled returns true if thinking is enabled (not off)
func (ThinkingLevel) String ¶ added in v1.13.7
func (t ThinkingLevel) String() string
String returns the string representation
func (ThinkingLevel) ToAnthropicParams ¶ added in v1.13.7
func (t ThinkingLevel) ToAnthropicParams() anthropic.ThinkingConfigParamUnion
ToAnthropicParams returns the thinking budget tokens for Anthropic. Returns 0 for ThinkingLevelOff.
func (ThinkingLevel) ToGeminiConfig ¶ added in v1.13.7
func (t ThinkingLevel) ToGeminiConfig(modelName string) *genai.ThinkingConfig
ToGeminiConfig returns the Gemini ThinkingConfig based on model version. Gemini 3 uses ThinkingLevel, Gemini 2.5 uses ThinkingBudget.
func (ThinkingLevel) ToOpenAIReasoningEffort ¶ added in v1.13.7
func (t ThinkingLevel) ToOpenAIReasoningEffort() string
ToOpenAIReasoningEffort returns the OpenAI reasoning_effort parameter value. Returns empty string for ThinkingLevelOff (no param should be set).
func (ThinkingLevel) ToOpenChatParams ¶ added in v1.13.7
func (t ThinkingLevel) ToOpenChatParams() (*model.Thinking, *model.ReasoningEffort)
ToOpenChatParams returns the OpenChat model.Thinking and ReasoningEffort params.
type TokenCache ¶ added in v1.12.14
type TokenCache struct {
// contains filtered or unexported fields
}
TokenCache provides a thread-safe cache for storing token counts of LLM messages. It uses JSON-marshaled message content as keys to ensure correct uniqueness.
func GetGlobalTokenCache ¶ added in v1.12.14
func GetGlobalTokenCache() *TokenCache
GetGlobalTokenCache returns the global token cache instance
func NewTokenCache ¶ added in v1.12.14
func NewTokenCache(maxSize int) *TokenCache
NewTokenCache creates a new TokenCache with the specified maximum size
func (*TokenCache) Clear ¶ added in v1.12.14
func (tc *TokenCache) Clear()
Clear removes all entries from the cache
func (*TokenCache) Get ¶ added in v1.12.14
func (tc *TokenCache) Get(key string) (int, bool)
Get retrieves a cached token count for the given key. Returns the count and true if found, or 0 and false if not found.
func (*TokenCache) GetOrComputeAnthropicTokens ¶ added in v1.13.5
func (tc *TokenCache) GetOrComputeAnthropicTokens(msg anthropic.MessageParam) int
GetOrComputeAnthropicTokens retrieves cached tokens or computes and caches them.
func (*TokenCache) GetOrComputeGeminiTokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeGeminiTokens(msg *genai.Content) int
GetOrComputeGeminiTokens retrieves cached tokens or computes and caches them for Gemini.
func (*TokenCache) GetOrComputeOpenAITokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeOpenAITokens(msg openai.ChatCompletionMessage) int
GetOrComputeOpenAITokens retrieves cached tokens or computes and caches them.
func (*TokenCache) GetOrComputeOpenChatTokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeOpenChatTokens(msg *model.ChatCompletionMessage) int
GetOrComputeOpenChatTokens retrieves cached tokens or computes and caches them.
func (*TokenCache) Set ¶ added in v1.12.14
func (tc *TokenCache) Set(key string, count int)
Set stores a token count for the given key. If the cache is full, it evicts approximately half of the entries.
func (*TokenCache) Size ¶ added in v1.12.14
func (tc *TokenCache) Size() int
Size returns the current number of entries in the cache
func (*TokenCache) Stats ¶ added in v1.12.14
func (tc *TokenCache) Stats() (hits, misses int64, size int)
Stats returns cache statistics (hits, misses, size)
type TokenUsage ¶ added in v1.9.5
type TokenUsage struct {
InputTokens int
OutputTokens int
CachedTokens int
ThoughtTokens int
TotalTokens int
// For providers like Anthropic, cached tokens are not included in the prompt tokens
// OpenAI, OpenChat and Gemini all include cached tokens in the prompt tokens
CachedTokensInPrompt bool
}
func NewTokenUsage ¶ added in v1.9.7
func NewTokenUsage() *TokenUsage
func (*TokenUsage) RecordTokenUsage ¶ added in v1.9.5
func (tu *TokenUsage) RecordTokenUsage(input, output, cached, thought, total int)
func (*TokenUsage) Render ¶ added in v1.9.7
func (tu *TokenUsage) Render(render ui.Render)
type TruncationStrategy ¶ added in v1.12.14
type TruncationStrategy string
TruncationStrategy defines how to handle context overflow
const ( // StrategyTruncateOldest removes oldest messages first, preserving system prompt StrategyTruncateOldest TruncationStrategy = "truncate_oldest" // StrategySummarize replaces old context with a summary (future implementation) StrategySummarize TruncationStrategy = "summarize" // StrategyNone disables truncation - will fail if context exceeds limit StrategyNone TruncationStrategy = "none" // DefaultBufferPercent is the default safety buffer (80% of available space) DefaultBufferPercent = 0.80 )
type UniversalMessage ¶ added in v1.13.10
type UniversalMessage struct {
Role UniversalRole // "system", "user", "assistant"
Content string // Main text content
Reasoning string // Thinking/reasoning content (if any)
}
* UniversalMessage is a provider-agnostic representation of a chat message. * It extracts only the essential semantic content for cross-provider conversion. * * Key Design Decisions: * 1. Only text content and reasoning are preserved. * 2. Tool calls, tool responses, images, and other multimodal content are discarded. * 3. Role normalization: "model" (Gemini) → "assistant"
func ParseAnthropicMessages ¶ added in v1.13.10
func ParseAnthropicMessages(messages []anthropic.MessageParam) []UniversalMessage
ParseAnthropicMessages converts Anthropic messages to universal format. Extracts: OfText blocks, OfThinking/OfRedactedThinking blocks Ignores: OfToolUse, OfToolResult, OfImage, OfDocument
func ParseGeminiMessages ¶ added in v1.13.10
func ParseGeminiMessages(messages []*gemini.Content) []UniversalMessage
ParseGeminiMessages converts Gemini messages to universal format. Extracts: Parts.Text, Parts.Thought Ignores: FunctionCall, FunctionResponse, InlineData Maps: "model" → "assistant"
func ParseOpenAIMessages ¶ added in v1.13.10
func ParseOpenAIMessages(messages []openai.ChatCompletionMessage) []UniversalMessage
ParseOpenAIMessages converts OpenAI messages to universal format. Extracts: Content, MultiContent[].Text, ReasoningContent Ignores: ToolCalls, FunctionCall, ImageURL
func ParseOpenChatMessages ¶ added in v1.13.10
func ParseOpenChatMessages(messages []*model.ChatCompletionMessage) []UniversalMessage
ParseOpenChatMessages converts OpenChat (Volcengine) messages to universal format.
type UniversalRole ¶ added in v1.13.10
type UniversalRole string
const ( UniversalRoleSystem UniversalRole = "system" UniversalRoleUser UniversalRole = "user" UniversalRoleAssistant UniversalRole = "assistant" )
func ConvertToUniversalRole ¶ added in v1.13.10
func ConvertToUniversalRole(role string) UniversalRole
func (UniversalRole) ConvertToAnthropic ¶ added in v1.13.10
func (r UniversalRole) ConvertToAnthropic() anthropic.MessageParamRole
func (UniversalRole) ConvertToGemini ¶ added in v1.13.10
func (r UniversalRole) ConvertToGemini() string
func (UniversalRole) ConvertToOpenAI ¶ added in v1.13.10
func (r UniversalRole) ConvertToOpenAI() string
func (UniversalRole) ConvertToOpenChat ¶ added in v1.13.10
func (r UniversalRole) ConvertToOpenChat() string
func (UniversalRole) String ¶ added in v1.13.10
func (r UniversalRole) String() string
type UserCancelError ¶ added in v1.14.13
type UserCancelError struct {
Reason string
}
func AsUserCancelError ¶ added in v1.14.21
func AsUserCancelError(err error) (UserCancelError, bool)
AsUserCancelError safely extracts a UserCancelError from an error, handling both value and pointer variants.
func (UserCancelError) Error ¶ added in v1.14.13
func (e UserCancelError) Error() string
Error implements [error].
type WorkflowManager ¶ added in v1.14.11
type WorkflowManager struct {
// contains filtered or unexported fields
}
WorkflowManager handles workflow operations
func GetWorkflowManager ¶ added in v1.14.11
func GetWorkflowManager() *WorkflowManager
GetWorkflowManager returns the singleton instance of WorkflowManager
func (*WorkflowManager) CreateWorkflow ¶ added in v1.14.11
func (wm *WorkflowManager) CreateWorkflow(name, description, content string) error
CreateWorkflow creates a new workflow file
func (*WorkflowManager) GetCommands ¶ added in v1.14.11
func (wm *WorkflowManager) GetCommands() map[string]string
GetCommands returns a map of command->description for chat suggestions
func (*WorkflowManager) GetWorkflowByName ¶ added in v1.14.11
func (wm *WorkflowManager) GetWorkflowByName(name string) (string, string, error)
GetWorkflowByName retrieves a workflow by its name (case-insensitive) Returns content, description, and error
func (*WorkflowManager) GetWorkflowNames ¶ added in v1.14.11
func (wm *WorkflowManager) GetWorkflowNames() []string
GetWorkflowNames returns a sorted list of all available workflow names
func (*WorkflowManager) IsReservedCommand ¶ added in v1.14.11
func (wm *WorkflowManager) IsReservedCommand(name string) bool
IsReservedCommand checks if a command is reserved
func (*WorkflowManager) LoadMetadata ¶ added in v1.14.11
func (wm *WorkflowManager) LoadMetadata(reservedCommands map[string]string) error
LoadMetadata scans and loads workflow metadata
func (*WorkflowManager) RemoveWorkflow ¶ added in v1.14.11
func (wm *WorkflowManager) RemoveWorkflow(name string) error
RemoveWorkflow removes a workflow
func (*WorkflowManager) RenameWorkflow ¶ added in v1.14.11
func (wm *WorkflowManager) RenameWorkflow(oldName, newName string) error
RenameWorkflow renames a workflow
func (*WorkflowManager) UpdateWorkflow ¶ added in v1.14.11
func (wm *WorkflowManager) UpdateWorkflow(name, description, content string) error
UpdateWorkflow updates an existing workflow file
Source Files
¶
- agent.go
- agent_output.go
- atref.go
- cache.go
- capability.go
- coder.go
- context.go
- converter.go
- convo_anthropic.go
- convo_common.go
- convo_gemini.go
- convo_openai.go
- convo_openchat.go
- errors.go
- fetch.go
- files.go
- logger.go
- markdown.go
- mcp.go
- mcptools.go
- model_anthropic.go
- model_gemini.go
- model_openai.go
- model_openchat.go
- models.go
- package.go
- provider.go
- search.go
- serializer.go
- skills.go
- status.go
- subagent.go
- think.go
- tokenizer.go
- tools_anthropic.go
- tools_common.go
- tools_gemini.go
- tools_impl.go
- tools_openai.go
- tools_openchat.go
- usage.go
- utils.go
- workflow.go