diff --git a/cmd/picoclaw/internal/gateway/helpers.go b/cmd/picoclaw/internal/gateway/helpers.go
index a06625dc98..4610352c22 100644
--- a/cmd/picoclaw/internal/gateway/helpers.go
+++ b/cmd/picoclaw/internal/gateway/helpers.go
@@ -24,6 +24,7 @@ import (
"github.com/sipeed/picoclaw/pkg/providers"
"github.com/sipeed/picoclaw/pkg/state"
"github.com/sipeed/picoclaw/pkg/tools"
+ cron_tool "github.com/sipeed/picoclaw/pkg/tools/cron"
"github.com/sipeed/picoclaw/pkg/voice"
)
@@ -70,7 +71,8 @@ func gatewayCmd(debug bool) error {
})
// Setup cron tool and service
- execTimeout := time.Duration(cfg.Tools.Cron.ExecTimeoutMinutes) * time.Minute
+ cronCfg := cron_tool.GetCronConfig(cfg)
+ execTimeout := time.Duration(cronCfg.ExecTimeoutMinutes) * time.Minute
cronService := setupCronTool(
agentLoop,
msgBus,
@@ -232,14 +234,16 @@ func setupCronTool(
cronService := cron.NewCronService(cronStorePath, nil)
// Create and register CronTool
- cronTool := tools.NewCronTool(cronService, agentLoop, msgBus, workspace, restrict, execTimeout, cfg)
- agentLoop.RegisterTool(cronTool)
-
- // Set the onJob handler
- cronService.SetOnJob(func(job *cron.CronJob) (string, error) {
- result := cronTool.ExecuteJob(context.Background(), job)
- return result, nil
- })
-
+ cronToolCfg := cron_tool.GetCronConfig(cfg)
+ if cronToolCfg.Enabled {
+ cronTool := cron_tool.NewCronTool(cronService, agentLoop, msgBus, workspace, restrict, execTimeout, cfg.GetTool("cron"))
+ agentLoop.RegisterTool(cronTool)
+
+ // Set the onJob handler
+ cronService.SetOnJob(func(job *cron.CronJob) (string, error) {
+ result := cronTool.ExecuteJob(context.Background(), job)
+ return result, nil
+ })
+ }
return cronService
}
diff --git a/cmd/picoclaw/internal/skills/helpers.go b/cmd/picoclaw/internal/skills/helpers.go
index 439b81a4f2..724d6cbcaf 100644
--- a/cmd/picoclaw/internal/skills/helpers.go
+++ b/cmd/picoclaw/internal/skills/helpers.go
@@ -12,6 +12,7 @@ import (
"github.com/sipeed/picoclaw/cmd/picoclaw/internal"
"github.com/sipeed/picoclaw/pkg/config"
"github.com/sipeed/picoclaw/pkg/skills"
+ "github.com/sipeed/picoclaw/pkg/tools/find_skills"
"github.com/sipeed/picoclaw/pkg/utils"
)
@@ -62,10 +63,8 @@ func skillsInstallFromRegistry(cfg *config.Config, registryName, slug string) er
fmt.Printf("Installing skill '%s' from %s registry...\n", slug, registryName)
- registryMgr := skills.NewRegistryManagerFromConfig(skills.RegistryConfig{
- MaxConcurrentSearches: cfg.Tools.Skills.MaxConcurrentSearches,
- ClawHub: skills.ClawHubConfig(cfg.Tools.Skills.Registries.ClawHub),
- })
+ skillsCfg := find_skills.GetSkillsConfig(cfg)
+ registryMgr := skills.NewRegistryManagerFromConfig(skillsCfg)
registry := registryMgr.GetRegistry(registryName)
if registry == nil {
diff --git a/pkg/agent/instance.go b/pkg/agent/instance.go
index a6fd365c7e..1b20a2aacd 100644
--- a/pkg/agent/instance.go
+++ b/pkg/agent/instance.go
@@ -47,13 +47,7 @@ func NewAgentInstance(
fallbacks := resolveAgentFallbacks(agentCfg, defaults)
restrict := defaults.RestrictToWorkspace
- toolsRegistry := tools.NewToolRegistry()
- toolsRegistry.Register(tools.NewReadFileTool(workspace, restrict))
- toolsRegistry.Register(tools.NewWriteFileTool(workspace, restrict))
- toolsRegistry.Register(tools.NewListDirTool(workspace, restrict))
- toolsRegistry.Register(tools.NewExecToolWithConfig(workspace, restrict, cfg))
- toolsRegistry.Register(tools.NewEditFileTool(workspace, restrict))
- toolsRegistry.Register(tools.NewAppendFileTool(workspace, restrict))
+ toolsRegistry := tools.NewToolRegistry(cfg, workspace, restrict)
sessionsDir := filepath.Join(workspace, "sessions")
sessionsManager := session.NewSessionManager(sessionsDir)
diff --git a/pkg/agent/loop.go b/pkg/agent/loop.go
index 693f2227be..5717a910ed 100644
--- a/pkg/agent/loop.go
+++ b/pkg/agent/loop.go
@@ -23,9 +23,10 @@ import (
"github.com/sipeed/picoclaw/pkg/logger"
"github.com/sipeed/picoclaw/pkg/providers"
"github.com/sipeed/picoclaw/pkg/routing"
- "github.com/sipeed/picoclaw/pkg/skills"
"github.com/sipeed/picoclaw/pkg/state"
"github.com/sipeed/picoclaw/pkg/tools"
+ "github.com/sipeed/picoclaw/pkg/tools/message"
+ "github.com/sipeed/picoclaw/pkg/tools/subagent"
"github.com/sipeed/picoclaw/pkg/utils"
)
@@ -92,63 +93,31 @@ func registerSharedTools(
continue
}
- // Web tools
- if searchTool := tools.NewWebSearchTool(tools.WebSearchToolOptions{
- BraveAPIKey: cfg.Tools.Web.Brave.APIKey,
- BraveMaxResults: cfg.Tools.Web.Brave.MaxResults,
- BraveEnabled: cfg.Tools.Web.Brave.Enabled,
- TavilyAPIKey: cfg.Tools.Web.Tavily.APIKey,
- TavilyBaseURL: cfg.Tools.Web.Tavily.BaseURL,
- TavilyMaxResults: cfg.Tools.Web.Tavily.MaxResults,
- TavilyEnabled: cfg.Tools.Web.Tavily.Enabled,
- DuckDuckGoMaxResults: cfg.Tools.Web.DuckDuckGo.MaxResults,
- DuckDuckGoEnabled: cfg.Tools.Web.DuckDuckGo.Enabled,
- PerplexityAPIKey: cfg.Tools.Web.Perplexity.APIKey,
- PerplexityMaxResults: cfg.Tools.Web.Perplexity.MaxResults,
- PerplexityEnabled: cfg.Tools.Web.Perplexity.Enabled,
- Proxy: cfg.Tools.Web.Proxy,
- }); searchTool != nil {
- agent.Tools.Register(searchTool)
- }
- agent.Tools.Register(tools.NewWebFetchToolWithProxy(50000, cfg.Tools.Web.Proxy))
-
- // Hardware tools (I2C, SPI) - Linux only, returns error on other platforms
- agent.Tools.Register(tools.NewI2CTool())
- agent.Tools.Register(tools.NewSPITool())
-
// Message tool
- messageTool := tools.NewMessageTool()
- messageTool.SetSendCallback(func(channel, chatID, content string) error {
- msgBus.PublishOutbound(bus.OutboundMessage{
- Channel: channel,
- ChatID: chatID,
- Content: content,
+ if cfg.ToolEnabled("message") {
+ messageTool := message.NewMessageTool()
+ messageTool.SetSendCallback(func(channel, chatID, content string) error {
+ msgBus.PublishOutbound(bus.OutboundMessage{
+ Channel: channel,
+ ChatID: chatID,
+ Content: content,
+ })
+ return nil
})
- return nil
- })
- agent.Tools.Register(messageTool)
-
- // Skill discovery and installation tools
- registryMgr := skills.NewRegistryManagerFromConfig(skills.RegistryConfig{
- MaxConcurrentSearches: cfg.Tools.Skills.MaxConcurrentSearches,
- ClawHub: skills.ClawHubConfig(cfg.Tools.Skills.Registries.ClawHub),
- })
- searchCache := skills.NewSearchCache(
- cfg.Tools.Skills.SearchCache.MaxSize,
- time.Duration(cfg.Tools.Skills.SearchCache.TTLSeconds)*time.Second,
- )
- agent.Tools.Register(tools.NewFindSkillsTool(registryMgr, searchCache))
- agent.Tools.Register(tools.NewInstallSkillTool(registryMgr, agent.Workspace))
+ agent.Tools.Register(messageTool)
+ }
// Spawn tool with allowlist checker
- subagentManager := tools.NewSubagentManager(provider, agent.Model, agent.Workspace, msgBus)
- subagentManager.SetLLMOptions(agent.MaxTokens, agent.Temperature)
- spawnTool := tools.NewSpawnTool(subagentManager)
- currentAgentID := agentID
- spawnTool.SetAllowlistChecker(func(targetAgentID string) bool {
- return registry.CanSpawnSubagent(currentAgentID, targetAgentID)
- })
- agent.Tools.Register(spawnTool)
+ if cfg.ToolEnabled("spawn") {
+ subagentManager := subagent.NewSubagentManager(provider, agent.Model, agent.Workspace, msgBus)
+ subagentManager.SetLLMOptions(agent.MaxTokens, agent.Temperature)
+ spawnTool := subagent.NewSpawnTool(subagentManager)
+ currentAgentID := agentID
+ spawnTool.SetAllowlistChecker(func(targetAgentID string) bool {
+ return registry.CanSpawnSubagent(currentAgentID, targetAgentID)
+ })
+ agent.Tools.Register(spawnTool)
+ }
}
}
@@ -178,7 +147,7 @@ func (al *AgentLoop) Run(ctx context.Context) error {
defaultAgent := al.registry.GetDefaultAgent()
if defaultAgent != nil {
if tool, ok := defaultAgent.Tools.Get("message"); ok {
- if mt, ok := tool.(*tools.MessageTool); ok {
+ if mt, ok := tool.(*message.MessageTool); ok {
alreadySent = mt.HasSentInRound()
}
}
diff --git a/pkg/config/config.go b/pkg/config/config.go
index ca5803c35b..2d6ec7d1cf 100644
--- a/pkg/config/config.go
+++ b/pkg/config/config.go
@@ -55,7 +55,8 @@ type Config struct {
Providers ProvidersConfig `json:"providers,omitempty"`
ModelList []ModelConfig `json:"model_list"` // New model-centric provider configuration
Gateway GatewayConfig `json:"gateway"`
- Tools ToolsConfig `json:"tools"`
+ Tools ToolsConfig `json:"tools,omitempty"`
+ ToolList []ToolConfig `json:"tool_list"`
Heartbeat HeartbeatConfig `json:"heartbeat"`
Devices DevicesConfig `json:"devices"`
}
@@ -452,6 +453,7 @@ type PerplexityConfig struct {
}
type WebToolsConfig struct {
+ Enabled bool `json:"enabled" env:"PICOCLAW_TOOLS_WEB_ENABLED"`
Brave BraveConfig `json:"brave"`
Tavily TavilyConfig `json:"tavily"`
DuckDuckGo DuckDuckGoConfig `json:"duckduckgo"`
@@ -461,19 +463,55 @@ type WebToolsConfig struct {
Proxy string `json:"proxy,omitempty" env:"PICOCLAW_TOOLS_WEB_PROXY"`
}
-type CronToolsConfig struct {
- ExecTimeoutMinutes int `json:"exec_timeout_minutes" env:"PICOCLAW_TOOLS_CRON_EXEC_TIMEOUT_MINUTES"` // 0 means no timeout
+type CronToolConfig struct {
+ Enabled bool `json:"enabled" env:"PICOCLAW_TOOLS_CRON_ENABLED"`
+ ExecTimeoutMinutes int `json:"exec_timeout_minutes" env:"PICOCLAW_TOOLS_CRON_EXEC_TIMEOUT_MINUTES"` // 0 means no timeout
+}
+
+type ToolConfig struct {
+ Name string `json:"name" env:"PICOCLAW_TOOLS_{{.Name}}_ENABLED"` // Used for env var parsing, not required in JSON if using struct field names
+ Enabled bool `json:"enabled" env:"PICOCLAW_TOOLS_ENABLED"`
+ Extra map[string]any `json:"extra,omitempty"` // Catch-all for any additional tool-specific configs
}
type ExecConfig struct {
+ Enabled bool `json:"enabled" env:"PICOCLAW_TOOLS_EXEC_ENABLED"`
EnableDenyPatterns bool `json:"enable_deny_patterns" env:"PICOCLAW_TOOLS_EXEC_ENABLE_DENY_PATTERNS"`
CustomDenyPatterns []string `json:"custom_deny_patterns" env:"PICOCLAW_TOOLS_EXEC_CUSTOM_DENY_PATTERNS"`
}
type ToolsConfig struct {
- Web WebToolsConfig `json:"web"`
- Cron CronToolsConfig `json:"cron"`
- Exec ExecConfig `json:"exec"`
+ // Web tools
+ Web WebToolsConfig `json:"web"`
+
+ // Cron tools
+ Cron CronToolConfig `json:"cron"`
+
+ // File tools
+ ReadFile ToolConfig `json:"read_file" env:"PICOCLAW_TOOLS_READ_FILE_ENABLED"`
+ WriteFile ToolConfig `json:"write_file" env:"PICOCLAW_TOOLS_WRITE_FILE_ENABLED"`
+ EditFile ToolConfig `json:"edit_file" env:"PICOCLAW_TOOLS_EDIT_FILE_ENABLED"`
+ AppendFile ToolConfig `json:"append_file" env:"PICOCLAW_TOOLS_APPEND_FILE_ENABLED"`
+ ListDir ToolConfig `json:"list_dir" env:"PICOCLAW_TOOLS_LIST_DIR_ENABLED"`
+
+ // Exec tool
+ Exec ExecConfig `json:"exec"`
+
+ // Skills tools
+ FindSkills ToolConfig `json:"find_skills" env:"PICOCLAW_TOOLS_FIND_SKILLS_ENABLED"`
+ InstallSkill ToolConfig `json:"install_skill" env:"PICOCLAW_TOOLS_INSTALL_SKILL_ENABLED"`
+
+ // Subagent tools
+ Spawn ToolConfig `json:"spawn" env:"PICOCLAW_TOOLS_SPAWN_ENABLED"`
+
+ // Message tool
+ Message ToolConfig `json:"message" env:"PICOCLAW_TOOLS_MESSAGE_ENABLED"`
+
+ // Hardware tools
+ I2C ToolConfig `json:"i2c" env:"PICOCLAW_TOOLS_I2C_ENABLED"`
+ SPI ToolConfig `json:"spi" env:"PICOCLAW_TOOLS_SPI_ENABLED"`
+
+ // Skills configuration (registry, cache, etc.)
Skills SkillsToolsConfig `json:"skills"`
}
@@ -529,6 +567,15 @@ func LoadConfig(path string) (*Config, error) {
cfg.ModelList = nil
}
+ // Pre-scan: if user provides legacy "tools" config, we need to clear the default
+ // ToolList so migration can properly convert tools to tool_list
+ var raw map[string]any
+ if err := json.Unmarshal(data, &raw); err == nil {
+ if _, hasTools := raw["tools"]; hasTools {
+ cfg.ToolList = nil
+ }
+ }
+
if err := json.Unmarshal(data, cfg); err != nil {
return nil, err
}
@@ -542,6 +589,12 @@ func LoadConfig(path string) (*Config, error) {
cfg.ModelList = ConvertProvidersToModelList(cfg)
}
+ // Auto-migrate: convert legacy tools config to tool_list for backward compatibility
+ if len(cfg.ToolList) == 0 && cfg.hasToolsConfig() {
+ cfg.ToolList = ConvertToolsToToolList(cfg.Tools)
+ cfg.Tools = ToolsConfig{}
+ }
+
// Validate model_list for uniqueness and required fields
if err := cfg.ValidateModelList(); err != nil {
return nil, err
@@ -560,6 +613,109 @@ func SaveConfig(path string, cfg *Config) error {
return fileutil.WriteFileAtomic(path, data, 0o600)
}
+func (c *Config) hasToolsConfig() bool {
+ return c.Tools.Web.Enabled || c.Tools.Web.Proxy != "" ||
+ c.Tools.Cron.Enabled ||
+ c.Tools.ReadFile.Enabled || c.Tools.WriteFile.Enabled ||
+ c.Tools.EditFile.Enabled || c.Tools.AppendFile.Enabled ||
+ c.Tools.ListDir.Enabled || c.Tools.Exec.Enabled ||
+ c.Tools.FindSkills.Enabled || c.Tools.InstallSkill.Enabled ||
+ c.Tools.Spawn.Enabled || c.Tools.Message.Enabled ||
+ c.Tools.I2C.Enabled || c.Tools.SPI.Enabled ||
+ c.Tools.Skills.MaxConcurrentSearches > 0
+}
+
+func ConvertToolsToToolList(tools ToolsConfig) []ToolConfig {
+ var toolList []ToolConfig
+
+ if tools.Web.Enabled || tools.Web.Proxy != "" || tools.Web.Brave.APIKey != "" ||
+ tools.Web.Tavily.APIKey != "" || tools.Web.DuckDuckGo.Enabled ||
+ tools.Web.Perplexity.APIKey != "" {
+ toolList = append(toolList, ToolConfig{Name: "web", Enabled: true, Extra: map[string]any{
+ "brave": tools.Web.Brave,
+ "tavily": tools.Web.Tavily,
+ "duckduckgo": tools.Web.DuckDuckGo,
+ "perplexity": tools.Web.Perplexity,
+ "proxy": tools.Web.Proxy,
+ }})
+ }
+
+ if tools.Cron.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "cron", Enabled: true, Extra: map[string]any{
+ "exec_timeout_minutes": tools.Cron.ExecTimeoutMinutes,
+ }})
+ }
+
+ if tools.ReadFile.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "read-file", Enabled: true})
+ }
+ if tools.WriteFile.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "write-file", Enabled: true})
+ }
+ if tools.EditFile.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "edit-file", Enabled: true})
+ }
+ if tools.AppendFile.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "append-file", Enabled: true})
+ }
+ if tools.ListDir.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "list-dir", Enabled: true})
+ }
+
+ if tools.Exec.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "exec", Enabled: true, Extra: map[string]any{
+ "enable_deny_patterns": tools.Exec.EnableDenyPatterns,
+ "custom_deny_patterns": tools.Exec.CustomDenyPatterns,
+ }})
+ }
+
+ if tools.FindSkills.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "find-skills", Enabled: true})
+ }
+ if tools.InstallSkill.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "install-skill", Enabled: true})
+ }
+
+ if tools.Spawn.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "spawn", Enabled: true})
+ }
+
+ if tools.Message.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "message", Enabled: true})
+ }
+
+ if tools.I2C.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "i2c", Enabled: true})
+ }
+ if tools.SPI.Enabled {
+ toolList = append(toolList, ToolConfig{Name: "spi", Enabled: true})
+ }
+
+ if tools.Skills.MaxConcurrentSearches > 0 || tools.Skills.SearchCache.MaxSize > 0 {
+ toolList = append(toolList, ToolConfig{Name: "skills", Enabled: true, Extra: map[string]any{
+ "registries": tools.Skills.Registries,
+ "max_concurrent_searches": tools.Skills.MaxConcurrentSearches,
+ "search_cache": tools.Skills.SearchCache,
+ }})
+ }
+
+ return toolList
+}
+
+func (c *Config) GetTool(name string) *ToolConfig {
+ for i := range c.ToolList {
+ if c.ToolList[i].Name == name {
+ return &c.ToolList[i]
+ }
+ }
+ return nil
+}
+
+func (c *Config) ToolEnabled(name string) bool {
+ tc := c.GetTool(name)
+ return tc != nil && tc.Enabled
+}
+
func (c *Config) WorkspacePath() string {
return expandHome(c.Agents.Defaults.Workspace)
}
diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go
index bf56b7f34c..2018a456c2 100644
--- a/pkg/config/config_test.go
+++ b/pkg/config/config_test.go
@@ -284,22 +284,6 @@ func TestDefaultConfig_Channels(t *testing.T) {
}
}
-// TestDefaultConfig_WebTools verifies web tools config
-func TestDefaultConfig_WebTools(t *testing.T) {
- cfg := DefaultConfig()
-
- // Verify web tools defaults
- if cfg.Tools.Web.Brave.MaxResults != 5 {
- t.Error("Expected Brave MaxResults 5, got ", cfg.Tools.Web.Brave.MaxResults)
- }
- if cfg.Tools.Web.Brave.APIKey != "" {
- t.Error("Brave API key should be empty by default")
- }
- if cfg.Tools.Web.DuckDuckGo.MaxResults != 5 {
- t.Error("Expected DuckDuckGo MaxResults 5, got ", cfg.Tools.Web.DuckDuckGo.MaxResults)
- }
-}
-
func TestSaveConfig_FilePermissions(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("file permission bits are not enforced on Windows")
@@ -393,27 +377,6 @@ func TestLoadConfig_OpenAIWebSearchCanBeDisabled(t *testing.T) {
}
}
-func TestLoadConfig_WebToolsProxy(t *testing.T) {
- tmpDir := t.TempDir()
- configPath := filepath.Join(tmpDir, "config.json")
- configJSON := `{
- "agents": {"defaults":{"workspace":"./workspace","model":"gpt4","max_tokens":8192,"max_tool_iterations":20}},
- "model_list": [{"model_name":"gpt4","model":"openai/gpt-5.2","api_key":"x"}],
- "tools": {"web":{"proxy":"http://127.0.0.1:7890"}}
-}`
- if err := os.WriteFile(configPath, []byte(configJSON), 0o600); err != nil {
- t.Fatalf("os.WriteFile() error: %v", err)
- }
-
- cfg, err := LoadConfig(configPath)
- if err != nil {
- t.Fatalf("LoadConfig() error: %v", err)
- }
- if cfg.Tools.Web.Proxy != "http://127.0.0.1:7890" {
- t.Fatalf("Tools.Web.Proxy = %q, want %q", cfg.Tools.Web.Proxy, "http://127.0.0.1:7890")
- }
-}
-
// TestDefaultConfig_DMScope verifies the default dm_scope value
func TestDefaultConfig_DMScope(t *testing.T) {
cfg := DefaultConfig()
diff --git a/pkg/config/defaults.go b/pkg/config/defaults.go
index cf799140d1..3ebfab0b54 100644
--- a/pkg/config/defaults.go
+++ b/pkg/config/defaults.go
@@ -275,44 +275,117 @@ func DefaultConfig() *Config {
Host: "127.0.0.1",
Port: 18790,
},
- Tools: ToolsConfig{
- Web: WebToolsConfig{
- Proxy: "",
- Brave: BraveConfig{
- Enabled: false,
- APIKey: "",
- MaxResults: 5,
+ ToolList: []ToolConfig{
+ {
+ Name: "read-file",
+ Enabled: true,
+ },
+ {
+ Name: "write-file",
+ Enabled: true,
+ },
+ {
+ Name: "edit-file",
+ Enabled: false,
+ },
+ {
+ Name: "append-file",
+ Enabled: false,
+ },
+ {
+ Name: "list-dir",
+ Enabled: false,
+ },
+ {
+ Name: "exec",
+ Enabled: true,
+ Extra: map[string]any{
+ "enable_deny_patterns": true,
+ "custom_deny_patterns": []string{},
},
- DuckDuckGo: DuckDuckGoConfig{
- Enabled: true,
- MaxResults: 5,
+ },
+ {
+ Name: "find-skills",
+ Enabled: true,
+ },
+ {
+ Name: "install-skill",
+ Enabled: true,
+ },
+ {
+ Name: "spawn",
+ Enabled: true,
+ },
+ {
+ Name: "message",
+ Enabled: true,
+ },
+ {
+ Name: "web",
+ Enabled: true,
+ Extra: map[string]any{
+ "brave": map[string]any{
+ "enabled": false,
+ "api_key": "",
+ "max_results": 5,
+ },
+ "tavily": map[string]any{
+ "enabled": false,
+ "api_key": "",
+ "max_results": 5,
+ },
+ "duckduckgo": map[string]any{
+ "enabled": true,
+ "max_results": 5,
+ },
+ "perplexity": map[string]any{
+ "enabled": false,
+ "api_key": "",
+ "max_results": 5,
+ },
+ "proxy": "",
},
- Perplexity: PerplexityConfig{
- Enabled: false,
- APIKey: "",
- MaxResults: 5,
+ },
+ {
+ Name: "cron",
+ Enabled: true,
+ Extra: map[string]any{
+ "exec_timeout_minutes": 5,
},
},
- Cron: CronToolsConfig{
- ExecTimeoutMinutes: 5,
+ {
+ Name: "i2c",
+ Enabled: false,
},
- Exec: ExecConfig{
- EnableDenyPatterns: true,
+ {
+ Name: "spi",
+ Enabled: false,
},
- Skills: SkillsToolsConfig{
- Registries: SkillsRegistriesConfig{
- ClawHub: ClawHubRegistryConfig{
- Enabled: true,
- BaseURL: "https://clawhub.ai",
+ {
+ Name: "skills",
+ Enabled: true,
+ Extra: map[string]any{
+ "registries": map[string]any{
+ "clawhub": map[string]any{
+ "enabled": true,
+ "base_url": "https://clawhub.ai",
+ "search_path": "/api/v1/search",
+ "skills_path": "/api/v1/skills",
+ "download_path": "/api/v1/download",
+ "timeout": 30,
+ "max_zip_size": 10485760,
+ "max_response_size": 5242880,
+ },
+ },
+ "max_concurrent_searches": 2,
+ "search_cache": map[string]any{
+ "max_size": 50,
+ "ttl_seconds": 300,
},
- },
- MaxConcurrentSearches: 2,
- SearchCache: SearchCacheConfig{
- MaxSize: 50,
- TTLSeconds: 300,
},
},
},
+ Tools: ToolsConfig{},
Heartbeat: HeartbeatConfig{
Enabled: true,
Interval: 30,
diff --git a/pkg/config/parse.go b/pkg/config/parse.go
new file mode 100644
index 0000000000..643897b20f
--- /dev/null
+++ b/pkg/config/parse.go
@@ -0,0 +1,88 @@
+package config
+
+func GetMap(data map[string]any, key string) (map[string]any, bool) {
+ v, ok := data[key]
+ if !ok {
+ return nil, false
+ }
+ m, ok := v.(map[string]any)
+ return m, ok
+}
+
+func GetString(data map[string]any, key string) (string, bool) {
+ v, ok := data[key]
+ if !ok {
+ return "", false
+ }
+ s, ok := v.(string)
+ return s, ok
+}
+
+func GetStringOrDefault(data map[string]any, key string, defaultVal string) string {
+ if v, ok := GetString(data, key); ok {
+ return v
+ }
+ return defaultVal
+}
+
+func GetFloat(data map[string]any, key string) (float64, bool) {
+ v, ok := data[key]
+ if !ok {
+ return 0, false
+ }
+ f, ok := v.(float64)
+ return f, ok
+}
+
+func GetBool(data map[string]any, key string) (bool, bool) {
+ v, ok := data[key]
+ if !ok {
+ return false, false
+ }
+ b, ok := v.(bool)
+ return b, ok
+}
+
+func GetBoolOrDefault(data map[string]any, key string, defaultVal bool) bool {
+ if v, ok := GetBool(data, key); ok {
+ return v
+ }
+ return defaultVal
+}
+
+func GetStringSlice(data map[string]any, key string) []string {
+ v, ok := data[key]
+ if !ok {
+ return []string{}
+ }
+ arr, ok := v.([]any)
+ if !ok {
+ return []string{}
+ }
+ result := make([]string, 0, len(arr))
+ for _, item := range arr {
+ if s, ok := item.(string); ok {
+ result = append(result, s)
+ }
+ }
+ return result
+}
+
+func GetInt(m map[string]any, key string) (int, bool) {
+ if v, ok := m[key]; ok {
+ switch n := v.(type) {
+ case float64:
+ return int(n), true
+ case int:
+ return n, true
+ }
+ }
+ return 0, false
+}
+
+func GetIntOrDefault(data map[string]any, key string, defaultVal int) int {
+ if v, ok := GetInt(data, key); ok {
+ return v
+ }
+ return defaultVal
+}
diff --git a/pkg/migrate/config.go b/pkg/migrate/config.go
index 869b398279..783f842328 100644
--- a/pkg/migrate/config.go
+++ b/pkg/migrate/config.go
@@ -71,37 +71,37 @@ func ConvertConfig(data map[string]any) (*config.Config, []string, error) {
cfg := config.DefaultConfig()
var warnings []string
- if agents, ok := getMap(data, "agents"); ok {
- if defaults, ok := getMap(agents, "defaults"); ok {
+ if agents, ok := config.GetMap(data, "agents"); ok {
+ if defaults, ok := config.GetMap(agents, "defaults"); ok {
// Prefer model_name, fallback to model for backward compatibility
- if v, ok := getString(defaults, "model_name"); ok {
+ if v, ok := config.GetString(defaults, "model_name"); ok {
cfg.Agents.Defaults.ModelName = v
- } else if v, ok := getString(defaults, "model"); ok {
+ } else if v, ok := config.GetString(defaults, "model"); ok {
cfg.Agents.Defaults.Model = v
}
- if v, ok := getFloat(defaults, "max_tokens"); ok {
+ if v, ok := config.GetFloat(defaults, "max_tokens"); ok {
cfg.Agents.Defaults.MaxTokens = int(v)
}
- if v, ok := getFloat(defaults, "temperature"); ok {
+ if v, ok := config.GetFloat(defaults, "temperature"); ok {
cfg.Agents.Defaults.Temperature = &v
}
- if v, ok := getFloat(defaults, "max_tool_iterations"); ok {
+ if v, ok := config.GetFloat(defaults, "max_tool_iterations"); ok {
cfg.Agents.Defaults.MaxToolIterations = int(v)
}
- if v, ok := getString(defaults, "workspace"); ok {
+ if v, ok := config.GetString(defaults, "workspace"); ok {
cfg.Agents.Defaults.Workspace = rewriteWorkspacePath(v)
}
}
}
- if providers, ok := getMap(data, "providers"); ok {
+ if providers, ok := config.GetMap(data, "providers"); ok {
for name, val := range providers {
pMap, ok := val.(map[string]any)
if !ok {
continue
}
- apiKey, _ := getString(pMap, "api_key")
- apiBase, _ := getString(pMap, "api_base")
+ apiKey, _ := config.GetString(pMap, "api_key")
+ apiBase, _ := config.GetString(pMap, "api_base")
if !supportedProviders[name] {
if apiKey != "" || apiBase != "" {
@@ -117,7 +117,7 @@ func ConvertConfig(data map[string]any) (*config.Config, []string, error) {
case "openai":
cfg.Providers.OpenAI = config.OpenAIProviderConfig{
ProviderConfig: pc,
- WebSearch: getBoolOrDefault(pMap, "web_search", true),
+ WebSearch: config.GetBoolOrDefault(pMap, "web_search", true),
}
case "openrouter":
cfg.Providers.OpenRouter = pc
@@ -133,7 +133,7 @@ func ConvertConfig(data map[string]any) (*config.Config, []string, error) {
}
}
- if channels, ok := getMap(data, "channels"); ok {
+ if channels, ok := config.GetMap(data, "channels"); ok {
for name, val := range channels {
cMap, ok := val.(map[string]any)
if !ok {
@@ -143,94 +143,94 @@ func ConvertConfig(data map[string]any) (*config.Config, []string, error) {
warnings = append(warnings, fmt.Sprintf("Channel '%s' not supported in PicoClaw, skipping", name))
continue
}
- enabled, _ := getBool(cMap, "enabled")
- allowFrom := getStringSlice(cMap, "allow_from")
+ enabled, _ := config.GetBool(cMap, "enabled")
+ allowFrom := config.GetStringSlice(cMap, "allow_from")
switch name {
case "telegram":
cfg.Channels.Telegram.Enabled = enabled
cfg.Channels.Telegram.AllowFrom = allowFrom
- if v, ok := getString(cMap, "token"); ok {
+ if v, ok := config.GetString(cMap, "token"); ok {
cfg.Channels.Telegram.Token = v
}
case "discord":
cfg.Channels.Discord.Enabled = enabled
cfg.Channels.Discord.AllowFrom = allowFrom
- if v, ok := getString(cMap, "token"); ok {
+ if v, ok := config.GetString(cMap, "token"); ok {
cfg.Channels.Discord.Token = v
}
case "whatsapp":
cfg.Channels.WhatsApp.Enabled = enabled
cfg.Channels.WhatsApp.AllowFrom = allowFrom
- if v, ok := getString(cMap, "bridge_url"); ok {
+ if v, ok := config.GetString(cMap, "bridge_url"); ok {
cfg.Channels.WhatsApp.BridgeURL = v
}
case "feishu":
cfg.Channels.Feishu.Enabled = enabled
cfg.Channels.Feishu.AllowFrom = allowFrom
- if v, ok := getString(cMap, "app_id"); ok {
+ if v, ok := config.GetString(cMap, "app_id"); ok {
cfg.Channels.Feishu.AppID = v
}
- if v, ok := getString(cMap, "app_secret"); ok {
+ if v, ok := config.GetString(cMap, "app_secret"); ok {
cfg.Channels.Feishu.AppSecret = v
}
- if v, ok := getString(cMap, "encrypt_key"); ok {
+ if v, ok := config.GetString(cMap, "encrypt_key"); ok {
cfg.Channels.Feishu.EncryptKey = v
}
- if v, ok := getString(cMap, "verification_token"); ok {
+ if v, ok := config.GetString(cMap, "verification_token"); ok {
cfg.Channels.Feishu.VerificationToken = v
}
case "qq":
cfg.Channels.QQ.Enabled = enabled
cfg.Channels.QQ.AllowFrom = allowFrom
- if v, ok := getString(cMap, "app_id"); ok {
+ if v, ok := config.GetString(cMap, "app_id"); ok {
cfg.Channels.QQ.AppID = v
}
- if v, ok := getString(cMap, "app_secret"); ok {
+ if v, ok := config.GetString(cMap, "app_secret"); ok {
cfg.Channels.QQ.AppSecret = v
}
case "dingtalk":
cfg.Channels.DingTalk.Enabled = enabled
cfg.Channels.DingTalk.AllowFrom = allowFrom
- if v, ok := getString(cMap, "client_id"); ok {
+ if v, ok := config.GetString(cMap, "client_id"); ok {
cfg.Channels.DingTalk.ClientID = v
}
- if v, ok := getString(cMap, "client_secret"); ok {
+ if v, ok := config.GetString(cMap, "client_secret"); ok {
cfg.Channels.DingTalk.ClientSecret = v
}
case "maixcam":
cfg.Channels.MaixCam.Enabled = enabled
cfg.Channels.MaixCam.AllowFrom = allowFrom
- if v, ok := getString(cMap, "host"); ok {
+ if v, ok := config.GetString(cMap, "host"); ok {
cfg.Channels.MaixCam.Host = v
}
- if v, ok := getFloat(cMap, "port"); ok {
+ if v, ok := config.GetFloat(cMap, "port"); ok {
cfg.Channels.MaixCam.Port = int(v)
}
}
}
}
- if gateway, ok := getMap(data, "gateway"); ok {
- if v, ok := getString(gateway, "host"); ok {
+ if gateway, ok := config.GetMap(data, "gateway"); ok {
+ if v, ok := config.GetString(gateway, "host"); ok {
cfg.Gateway.Host = v
}
- if v, ok := getFloat(gateway, "port"); ok {
+ if v, ok := config.GetFloat(gateway, "port"); ok {
cfg.Gateway.Port = int(v)
}
}
- if tools, ok := getMap(data, "tools"); ok {
- if web, ok := getMap(tools, "web"); ok {
+ if tools, ok := config.GetMap(data, "tools"); ok {
+ if web, ok := config.GetMap(tools, "web"); ok {
// Migrate old "search" config to "brave" if api_key is present
- if search, ok := getMap(web, "search"); ok {
- if v, ok := getString(search, "api_key"); ok {
+ if search, ok := config.GetMap(web, "search"); ok {
+ if v, ok := config.GetString(search, "api_key"); ok {
cfg.Tools.Web.Brave.APIKey = v
if v != "" {
cfg.Tools.Web.Brave.Enabled = true
}
}
- if v, ok := getFloat(search, "max_results"); ok {
+ if v, ok := config.GetFloat(search, "max_results"); ok {
cfg.Tools.Web.Brave.MaxResults = int(v)
cfg.Tools.Web.DuckDuckGo.MaxResults = int(v)
}
@@ -345,64 +345,3 @@ func rewriteWorkspacePath(path string) string {
path = strings.Replace(path, ".openclaw", ".picoclaw", 1)
return path
}
-
-func getMap(data map[string]any, key string) (map[string]any, bool) {
- v, ok := data[key]
- if !ok {
- return nil, false
- }
- m, ok := v.(map[string]any)
- return m, ok
-}
-
-func getString(data map[string]any, key string) (string, bool) {
- v, ok := data[key]
- if !ok {
- return "", false
- }
- s, ok := v.(string)
- return s, ok
-}
-
-func getFloat(data map[string]any, key string) (float64, bool) {
- v, ok := data[key]
- if !ok {
- return 0, false
- }
- f, ok := v.(float64)
- return f, ok
-}
-
-func getBool(data map[string]any, key string) (bool, bool) {
- v, ok := data[key]
- if !ok {
- return false, false
- }
- b, ok := v.(bool)
- return b, ok
-}
-
-func getBoolOrDefault(data map[string]any, key string, defaultVal bool) bool {
- if v, ok := getBool(data, key); ok {
- return v
- }
- return defaultVal
-}
-
-func getStringSlice(data map[string]any, key string) []string {
- v, ok := data[key]
- if !ok {
- return []string{}
- }
- arr, ok := v.([]any)
- if !ok {
- return []string{}
- }
- result := make([]string, 0, len(arr))
- for _, item := range arr {
- if s, ok := item.(string); ok {
- result = append(result, s)
- }
- }
- return result
-}
diff --git a/pkg/tools/append_file/append_file.go b/pkg/tools/append_file/append_file.go
new file mode 100644
index 0000000000..855869b8d4
--- /dev/null
+++ b/pkg/tools/append_file/append_file.go
@@ -0,0 +1,77 @@
+package append_file
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io/fs"
+
+ "github.com/sipeed/picoclaw/pkg/tools/common"
+)
+
+type AppendFileTool struct {
+ fs common.FileSystem
+}
+
+func NewAppendFileTool(workspace string, restrict bool) *AppendFileTool {
+ var fs common.FileSystem
+ if restrict {
+ fs = &common.SandboxFs{Workspace: workspace}
+ } else {
+ fs = &common.HostFs{}
+ }
+ return &AppendFileTool{fs: fs}
+}
+
+func (t *AppendFileTool) Name() string {
+ return "append_file"
+}
+
+func (t *AppendFileTool) Description() string {
+ return "Append content to the end of a file"
+}
+
+func (t *AppendFileTool) Parameters() map[string]any {
+ return map[string]any{
+ "type": "object",
+ "properties": map[string]any{
+ "path": map[string]any{
+ "type": "string",
+ "description": "The file path to append to",
+ },
+ "content": map[string]any{
+ "type": "string",
+ "description": "The content to append",
+ },
+ },
+ "required": []string{"path", "content"},
+ }
+}
+
+func (t *AppendFileTool) Execute(ctx context.Context, args map[string]any) *common.ToolResult {
+ path, ok := args["path"].(string)
+ if !ok {
+ return common.ErrorResult("path is required")
+ }
+
+ content, ok := args["content"].(string)
+ if !ok {
+ return common.ErrorResult("content is required")
+ }
+
+ if err := appendFile(t.fs, path, content); err != nil {
+ return common.ErrorResult(err.Error())
+ }
+ return common.SilentResult(fmt.Sprintf("Appended to %s", path))
+}
+
+// appendFile reads the existing content (if any) via sysFs, appends new content, and writes back.
+func appendFile(sysFs common.FileSystem, path, appendContent string) error {
+ content, err := sysFs.ReadFile(path)
+ if err != nil && !errors.Is(err, fs.ErrNotExist) {
+ return err
+ }
+
+ newContent := append(content, []byte(appendContent)...)
+ return sysFs.WriteFile(path, newContent)
+}
diff --git a/pkg/tools/append_file/append_file_test.go b/pkg/tools/append_file/append_file_test.go
new file mode 100644
index 0000000000..4e10aaede3
--- /dev/null
+++ b/pkg/tools/append_file/append_file_test.go
@@ -0,0 +1,103 @@
+package append_file
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+// TestAppendFileTool_AppendToExisting verifies appending to an existing file
+func TestAppendFileTool_AppendToExisting(t *testing.T) {
+ tmpDir := t.TempDir()
+ testFile := filepath.Join(tmpDir, "test.txt")
+ os.WriteFile(testFile, []byte("Hello World"), 0o644)
+
+ tool := NewAppendFileTool(tmpDir, true)
+ ctx := context.Background()
+ args := map[string]any{
+ "path": testFile,
+ "content": "\nAppended text",
+ }
+
+ result := tool.Execute(ctx, args)
+
+ assert.False(t, result.IsError, "Expected success, got error: %s", result.ForLLM)
+ assert.True(t, result.Silent, "Expected Silent=true for AppendFile")
+
+ content, err := os.ReadFile(testFile)
+ assert.NoError(t, err)
+ assert.Contains(t, string(content), "Appended text")
+ assert.Contains(t, string(content), "Hello World")
+}
+
+// TestAppendFileTool_AppendToNonExistent verifies appending to a non-existent file creates it
+func TestAppendFileTool_AppendToNonExistent(t *testing.T) {
+ tmpDir := t.TempDir()
+ testFile := filepath.Join(tmpDir, "newfile.txt")
+
+ tool := NewAppendFileTool(tmpDir, true)
+ ctx := context.Background()
+ args := map[string]any{
+ "path": testFile,
+ "content": "First content",
+ }
+
+ result := tool.Execute(ctx, args)
+
+ assert.False(t, result.IsError, "Expected success, got error: %s", result.ForLLM)
+
+ content, err := os.ReadFile(testFile)
+ assert.NoError(t, err)
+ assert.Equal(t, "First content", string(content))
+}
+
+// TestAppendFileTool_MissingPath verifies error handling for missing path
+func TestAppendFileTool_MissingPath(t *testing.T) {
+ tool := NewAppendFileTool("", false)
+ ctx := context.Background()
+ args := map[string]any{
+ "content": "Some content",
+ }
+
+ result := tool.Execute(ctx, args)
+
+ assert.True(t, result.IsError)
+ assert.Contains(t, result.ForLLM, "path is required")
+}
+
+// TestAppendFileTool_MissingContent verifies error handling for missing content
+func TestAppendFileTool_MissingContent(t *testing.T) {
+ tool := NewAppendFileTool("", false)
+ ctx := context.Background()
+ args := map[string]any{
+ "path": "/tmp/test.txt",
+ }
+
+ result := tool.Execute(ctx, args)
+
+ assert.True(t, result.IsError)
+ assert.Contains(t, result.ForLLM, "content is required")
+}
+
+// TestAppendFileTool_RestrictedMode verifies access control
+func TestAppendFileTool_RestrictedMode(t *testing.T) {
+ tmpDir := t.TempDir()
+ testFile := filepath.Join(tmpDir, "test.txt")
+ os.WriteFile(testFile, []byte("Original"), 0o644)
+
+ tool := NewAppendFileTool(tmpDir, true)
+ ctx := context.Background()
+
+ // Try to append to a file outside the workspace
+ args := map[string]any{
+ "path": "/etc/passwd",
+ "content": "Malicious content",
+ }
+
+ result := tool.Execute(ctx, args)
+
+ assert.True(t, result.IsError)
+}
diff --git a/pkg/tools/filesystem.go b/pkg/tools/common/filesystem.go
similarity index 58%
rename from pkg/tools/filesystem.go
rename to pkg/tools/common/filesystem.go
index 03d461dcc6..7590db2769 100644
--- a/pkg/tools/filesystem.go
+++ b/pkg/tools/common/filesystem.go
@@ -1,7 +1,6 @@
-package tools
+package common
import (
- "context"
"fmt"
"io/fs"
"os"
@@ -13,7 +12,7 @@ import (
)
// validatePath ensures the given path is within the workspace if restrict is true.
-func validatePath(path, workspace string, restrict bool) (string, error) {
+func ValidatePath(path, workspace string, restrict bool) (string, error) {
if workspace == "" {
return path, fmt.Errorf("workspace is not defined")
}
@@ -83,183 +82,18 @@ func isWithinWorkspace(candidate, workspace string) bool {
return err == nil && filepath.IsLocal(rel)
}
-type ReadFileTool struct {
- fs fileSystem
-}
-
-func NewReadFileTool(workspace string, restrict bool) *ReadFileTool {
- var fs fileSystem
- if restrict {
- fs = &sandboxFs{workspace: workspace}
- } else {
- fs = &hostFs{}
- }
- return &ReadFileTool{fs: fs}
-}
-
-func (t *ReadFileTool) Name() string {
- return "read_file"
-}
-
-func (t *ReadFileTool) Description() string {
- return "Read the contents of a file"
-}
-
-func (t *ReadFileTool) Parameters() map[string]any {
- return map[string]any{
- "type": "object",
- "properties": map[string]any{
- "path": map[string]any{
- "type": "string",
- "description": "Path to the file to read",
- },
- },
- "required": []string{"path"},
- }
-}
-
-func (t *ReadFileTool) Execute(ctx context.Context, args map[string]any) *ToolResult {
- path, ok := args["path"].(string)
- if !ok {
- return ErrorResult("path is required")
- }
-
- content, err := t.fs.ReadFile(path)
- if err != nil {
- return ErrorResult(err.Error())
- }
- return NewToolResult(string(content))
-}
-
-type WriteFileTool struct {
- fs fileSystem
-}
-
-func NewWriteFileTool(workspace string, restrict bool) *WriteFileTool {
- var fs fileSystem
- if restrict {
- fs = &sandboxFs{workspace: workspace}
- } else {
- fs = &hostFs{}
- }
- return &WriteFileTool{fs: fs}
-}
-
-func (t *WriteFileTool) Name() string {
- return "write_file"
-}
-
-func (t *WriteFileTool) Description() string {
- return "Write content to a file"
-}
-
-func (t *WriteFileTool) Parameters() map[string]any {
- return map[string]any{
- "type": "object",
- "properties": map[string]any{
- "path": map[string]any{
- "type": "string",
- "description": "Path to the file to write",
- },
- "content": map[string]any{
- "type": "string",
- "description": "Content to write to the file",
- },
- },
- "required": []string{"path", "content"},
- }
-}
-
-func (t *WriteFileTool) Execute(ctx context.Context, args map[string]any) *ToolResult {
- path, ok := args["path"].(string)
- if !ok {
- return ErrorResult("path is required")
- }
-
- content, ok := args["content"].(string)
- if !ok {
- return ErrorResult("content is required")
- }
-
- if err := t.fs.WriteFile(path, []byte(content)); err != nil {
- return ErrorResult(err.Error())
- }
-
- return SilentResult(fmt.Sprintf("File written: %s", path))
-}
-
-type ListDirTool struct {
- fs fileSystem
-}
-
-func NewListDirTool(workspace string, restrict bool) *ListDirTool {
- var fs fileSystem
- if restrict {
- fs = &sandboxFs{workspace: workspace}
- } else {
- fs = &hostFs{}
- }
- return &ListDirTool{fs: fs}
-}
-
-func (t *ListDirTool) Name() string {
- return "list_dir"
-}
-
-func (t *ListDirTool) Description() string {
- return "List files and directories in a path"
-}
-
-func (t *ListDirTool) Parameters() map[string]any {
- return map[string]any{
- "type": "object",
- "properties": map[string]any{
- "path": map[string]any{
- "type": "string",
- "description": "Path to list",
- },
- },
- "required": []string{"path"},
- }
-}
-
-func (t *ListDirTool) Execute(ctx context.Context, args map[string]any) *ToolResult {
- path, ok := args["path"].(string)
- if !ok {
- path = "."
- }
-
- entries, err := t.fs.ReadDir(path)
- if err != nil {
- return ErrorResult(fmt.Sprintf("failed to read directory: %v", err))
- }
- return formatDirEntries(entries)
-}
-
-func formatDirEntries(entries []os.DirEntry) *ToolResult {
- var result strings.Builder
- for _, entry := range entries {
- if entry.IsDir() {
- result.WriteString("DIR: " + entry.Name() + "\n")
- } else {
- result.WriteString("FILE: " + entry.Name() + "\n")
- }
- }
- return NewToolResult(result.String())
-}
-
-// fileSystem abstracts reading, writing, and listing files, allowing both
+// FileSystem abstracts reading, writing, and listing files, allowing both
// unrestricted (host filesystem) and sandbox (os.Root) implementations to share the same polymorphic interface.
-type fileSystem interface {
+type FileSystem interface {
ReadFile(path string) ([]byte, error)
WriteFile(path string, data []byte) error
ReadDir(path string) ([]os.DirEntry, error)
}
-// hostFs is an unrestricted fileReadWriter that operates directly on the host filesystem.
-type hostFs struct{}
+// HostFs is an unrestricted fileReadWriter that operates directly on the host filesystem.
+type HostFs struct{}
-func (h *hostFs) ReadFile(path string) ([]byte, error) {
+func (h *HostFs) ReadFile(path string) ([]byte, error) {
content, err := os.ReadFile(path)
if err != nil {
if os.IsNotExist(err) {
@@ -273,33 +107,33 @@ func (h *hostFs) ReadFile(path string) ([]byte, error) {
return content, nil
}
-func (h *hostFs) ReadDir(path string) ([]os.DirEntry, error) {
+func (h *HostFs) ReadDir(path string) ([]os.DirEntry, error) {
return os.ReadDir(path)
}
-func (h *hostFs) WriteFile(path string, data []byte) error {
+func (h *HostFs) WriteFile(path string, data []byte) error {
// Use unified atomic write utility with explicit sync for flash storage reliability.
// Using 0o600 (owner read/write only) for secure default permissions.
return fileutil.WriteFileAtomic(path, data, 0o600)
}
-// sandboxFs is a sandboxed fileSystem that operates within a strictly defined workspace using os.Root.
-type sandboxFs struct {
- workspace string
+// SandboxFs is a sandboxed FileSystem that operates within a strictly defined workspace using os.Root.
+type SandboxFs struct {
+ Workspace string
}
-func (r *sandboxFs) execute(path string, fn func(root *os.Root, relPath string) error) error {
- if r.workspace == "" {
+func (r *SandboxFs) execute(path string, fn func(root *os.Root, relPath string) error) error {
+ if r.Workspace == "" {
return fmt.Errorf("workspace is not defined")
}
- root, err := os.OpenRoot(r.workspace)
+ root, err := os.OpenRoot(r.Workspace)
if err != nil {
return fmt.Errorf("failed to open workspace: %w", err)
}
defer root.Close()
- relPath, err := getSafeRelPath(r.workspace, path)
+ relPath, err := getSafeRelPath(r.Workspace, path)
if err != nil {
return err
}
@@ -307,7 +141,7 @@ func (r *sandboxFs) execute(path string, fn func(root *os.Root, relPath string)
return fn(root, relPath)
}
-func (r *sandboxFs) ReadFile(path string) ([]byte, error) {
+func (r *SandboxFs) ReadFile(path string) ([]byte, error) {
var content []byte
err := r.execute(path, func(root *os.Root, relPath string) error {
fileContent, err := root.ReadFile(relPath)
@@ -328,7 +162,7 @@ func (r *sandboxFs) ReadFile(path string) ([]byte, error) {
return content, err
}
-func (r *sandboxFs) WriteFile(path string, data []byte) error {
+func (r *SandboxFs) WriteFile(path string, data []byte) error {
return r.execute(path, func(root *os.Root, relPath string) error {
dir := filepath.Dir(relPath)
if dir != "." && dir != "/" {
@@ -381,7 +215,7 @@ func (r *sandboxFs) WriteFile(path string, data []byte) error {
})
}
-func (r *sandboxFs) ReadDir(path string) ([]os.DirEntry, error) {
+func (r *SandboxFs) ReadDir(path string) ([]os.DirEntry, error) {
var entries []os.DirEntry
err := r.execute(path, func(root *os.Root, relPath string) error {
dirEntries, err := fs.ReadDir(root.FS(), relPath)
diff --git a/pkg/tools/result.go b/pkg/tools/common/result.go
similarity index 99%
rename from pkg/tools/result.go
rename to pkg/tools/common/result.go
index b13055b1c4..71d2ff122b 100644
--- a/pkg/tools/result.go
+++ b/pkg/tools/common/result.go
@@ -1,4 +1,4 @@
-package tools
+package common
import "encoding/json"
diff --git a/pkg/tools/base.go b/pkg/tools/common/types.go
similarity index 89%
rename from pkg/tools/base.go
rename to pkg/tools/common/types.go
index 770d8cb047..5901f0cb75 100644
--- a/pkg/tools/base.go
+++ b/pkg/tools/common/types.go
@@ -1,6 +1,8 @@
-package tools
+package common
-import "context"
+import (
+ "context"
+)
// Tool is the interface that all tools must implement.
type Tool interface {
@@ -68,14 +70,3 @@ type AsyncTool interface {
// The callback will be called from a goroutine and should handle thread-safety if needed.
SetCallback(cb AsyncCallback)
}
-
-func ToolToSchema(tool Tool) map[string]any {
- return map[string]any{
- "type": "function",
- "function": map[string]any{
- "name": tool.Name(),
- "description": tool.Description(),
- "parameters": tool.Parameters(),
- },
- }
-}
diff --git a/pkg/tools/common/web.go b/pkg/tools/common/web.go
new file mode 100644
index 0000000000..d930d5599f
--- /dev/null
+++ b/pkg/tools/common/web.go
@@ -0,0 +1,64 @@
+package common
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strings"
+ "time"
+)
+
+const (
+ UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
+)
+
+// Pre-compiled regexes for HTML text extraction
+var (
+ ReScript = regexp.MustCompile(``)
- reStyle = regexp.MustCompile(`