diff --git a/cmd/wfctl/multi_registry.go b/cmd/wfctl/multi_registry.go index b9f0b24a..d637234a 100644 --- a/cmd/wfctl/multi_registry.go +++ b/cmd/wfctl/multi_registry.go @@ -29,12 +29,12 @@ func NewMultiRegistry(cfg *RegistryConfig) *MultiRegistry { case "github": sources = append(sources, NewGitHubRegistrySource(sc)) case "static": - staticSrc, staticErr := NewStaticRegistrySource(sc) - if staticErr != nil { - fmt.Fprintf(os.Stderr, "warning: %v, skipping\n", staticErr) + src, err := NewStaticRegistrySource(sc) + if err != nil { + fmt.Fprintf(os.Stderr, "warning: %v, skipping\n", err) continue } - sources = append(sources, staticSrc) + sources = append(sources, src) default: // Skip unknown types fmt.Fprintf(os.Stderr, "warning: unknown registry type %q for %q, skipping\n", sc.Type, sc.Name) diff --git a/cmd/wfctl/plugin_install.go b/cmd/wfctl/plugin_install.go index 3c5f016d..f1fdd2de 100644 --- a/cmd/wfctl/plugin_install.go +++ b/cmd/wfctl/plugin_install.go @@ -81,19 +81,19 @@ func runPluginInstall(args []string) error { return err } - // Enforce mutual exclusivity: at most one of --url, --local, or positional args. - exclusiveCount := 0 + // Validate mutual exclusivity of install modes. + modes := 0 if *directURL != "" { - exclusiveCount++ + modes++ } if *localPath != "" { - exclusiveCount++ + modes++ } if fs.NArg() > 0 { - exclusiveCount++ + modes++ } - if exclusiveCount > 1 { - return fmt.Errorf("--url, --local, and are mutually exclusive; specify only one") + if modes > 1 { + return fmt.Errorf("specify only one of: , --url, or --local") } if *directURL != "" { @@ -165,13 +165,15 @@ func runPluginInstall(args []string) error { // Update .wfctl.yaml lockfile if name@version was provided. if _, ver := parseNameVersion(nameArg); ver != "" { - // Hash the installed binary (not the archive) so verifyInstalledChecksum matches. + pluginName = normalizePluginName(pluginName) + binaryChecksum := "" binaryPath := filepath.Join(pluginDirVal, pluginName, pluginName) - sha, hashErr := hashFileSHA256(binaryPath) - if hashErr != nil { - fmt.Fprintf(os.Stderr, "warning: could not hash installed binary: %v\n", hashErr) + if cs, hashErr := hashFileSHA256(binaryPath); hashErr == nil { + binaryChecksum = cs + } else { + fmt.Fprintf(os.Stderr, "warning: could not hash binary %s: %v (lockfile will have no checksum)\n", binaryPath, hashErr) } - updateLockfileWithChecksum(pluginName, manifest.Version, manifest.Repository, sourceName, sha) + updateLockfileWithChecksum(pluginName, manifest.Version, manifest.Repository, sourceName, binaryChecksum) } return nil @@ -516,7 +518,7 @@ func installFromURL(url, pluginDir string) error { } if err := ensurePluginBinary(destDir, pluginName); err != nil { - return fmt.Errorf("normalize binary name: %w", err) + return fmt.Errorf("could not normalize binary name: %w", err) } // Validate the installed plugin (same checks as registry installs). @@ -536,6 +538,20 @@ func installFromURL(url, pluginDir string) error { return nil } +// hashFileSHA256 computes the SHA-256 hex digest of the file at path using streaming I/O. +func hashFileSHA256(path string) (string, error) { + f, err := os.Open(path) + if err != nil { + return "", fmt.Errorf("hash file %s: %w", path, err) + } + defer f.Close() + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return "", fmt.Errorf("hash file %s: %w", path, err) + } + return hex.EncodeToString(h.Sum(nil)), nil +} + // verifyInstalledChecksum reads the plugin binary and verifies its SHA-256 checksum. func verifyInstalledChecksum(pluginDir, pluginName, expectedSHA256 string) error { binaryPath := filepath.Join(pluginDir, pluginName) @@ -590,13 +606,11 @@ func installFromLocal(srcDir, pluginDir string) error { return err } - // Update lockfile with binary checksum for consistency with other install paths. - installedBinary := filepath.Join(destDir, pluginName) - sha, hashErr := hashFileSHA256(installedBinary) + binaryChecksum, hashErr := hashFileSHA256(filepath.Join(destDir, pluginName)) if hashErr != nil { - fmt.Fprintf(os.Stderr, "warning: could not hash installed binary: %v\n", hashErr) + fmt.Fprintf(os.Stderr, "warning: could not compute binary checksum: %v\n", hashErr) } - updateLockfileWithChecksum(pluginName, pj.Version, "", "", sha) + updateLockfileWithChecksum(pluginName, pj.Version, "", "", binaryChecksum) fmt.Printf("Installed %s v%s from %s to %s\n", pluginName, pj.Version, srcDir, destDir) return nil @@ -693,16 +707,6 @@ func parseGitHubRepoURL(repoURL string) (owner, repo string, err error) { return parts[1], repoName, nil } -// hashFileSHA256 returns the hex-encoded SHA-256 hash of the file at path. -func hashFileSHA256(path string) (string, error) { - data, err := os.ReadFile(path) - if err != nil { - return "", fmt.Errorf("hash file %s: %w", path, err) - } - h := sha256.Sum256(data) - return hex.EncodeToString(h[:]), nil -} - // extractTarGz decompresses and extracts a .tar.gz archive into destDir. // It guards against path traversal (zip-slip) attacks. func extractTarGz(data []byte, destDir string) error { diff --git a/cmd/wfctl/plugin_lockfile.go b/cmd/wfctl/plugin_lockfile.go index 95d28682..2569f34e 100644 --- a/cmd/wfctl/plugin_lockfile.go +++ b/cmd/wfctl/plugin_lockfile.go @@ -90,8 +90,10 @@ func installFromLockfile(pluginDir, cfgPath string) error { if entry.SHA256 != "" { pluginInstallDir := filepath.Join(pluginDir, name) if verifyErr := verifyInstalledChecksum(pluginInstallDir, name, entry.SHA256); verifyErr != nil { - fmt.Fprintf(os.Stderr, "CHECKSUM MISMATCH for %s: %v — removing plugin\n", name, verifyErr) - _ = os.RemoveAll(pluginInstallDir) + fmt.Fprintf(os.Stderr, "CHECKSUM MISMATCH for %s: %v\n", name, verifyErr) + if removeErr := os.RemoveAll(pluginInstallDir); removeErr != nil { + fmt.Fprintf(os.Stderr, "warning: could not remove plugin dir: %v\n", removeErr) + } failed = append(failed, name) continue } diff --git a/cmd/wfctl/registry_source.go b/cmd/wfctl/registry_source.go index bd11fe85..053bcf24 100644 --- a/cmd/wfctl/registry_source.go +++ b/cmd/wfctl/registry_source.go @@ -64,7 +64,7 @@ func (g *GitHubRegistrySource) ListPlugins() ([]string, error) { req.Header.Set("Authorization", "Bearer "+token) } - resp, err := http.DefaultClient.Do(req) + resp, err := registryHTTPClient.Do(req) if err != nil { return nil, fmt.Errorf("list registry plugins from %s: %w", g.name, err) } @@ -90,7 +90,11 @@ func (g *GitHubRegistrySource) FetchManifest(name string) (*RegistryManifest, er "https://raw.githubusercontent.com/%s/%s/%s/plugins/%s/manifest.json", g.owner, g.repo, g.branch, name, ) - resp, err := http.Get(url) //nolint:gosec // URL constructed from configured registry + req, err := http.NewRequest(http.MethodGet, url, nil) //nolint:gosec // URL constructed from configured registry + if err != nil { + return nil, fmt.Errorf("build request: %w", err) + } + resp, err := registryHTTPClient.Do(req) if err != nil { return nil, fmt.Errorf("fetch manifest for %q from %s: %w", name, g.name, err) } @@ -206,8 +210,13 @@ func (s *StaticRegistrySource) SearchPlugins(query string) ([]PluginSearchResult strings.Contains(strings.ToLower(e.Name), q) || strings.Contains(strings.ToLower(e.Description), q) { results = append(results, PluginSearchResult{ - PluginSummary: PluginSummary(e), - Source: s.name, + PluginSummary: PluginSummary{ //nolint:staticcheck // S1016: explicit fields for clarity across struct tag boundaries + Name: e.Name, + Version: e.Version, + Description: e.Description, + Tier: e.Tier, + }, + Source: s.name, }) } } @@ -226,7 +235,8 @@ func (s *StaticRegistrySource) ListPlugins() ([]string, error) { return names, nil } -// registryHTTPClient is used for all registry HTTP requests with a reasonable timeout. +// registryHTTPClient is used for all registry HTTP requests (both GitHub and static +// sources) with a reasonable timeout to avoid hangs on network issues. var registryHTTPClient = &http.Client{Timeout: 30 * time.Second} // fetch performs an HTTP GET with optional auth token. diff --git a/config/config.go b/config/config.go index 724a0ee1..21cbd20d 100644 --- a/config/config.go +++ b/config/config.go @@ -419,6 +419,25 @@ func MergeApplicationConfig(appCfg *ApplicationConfig) (*WorkflowConfig, error) } combined.Modules = append(combined.Modules, wfCfg.Modules...) + + // Merge external plugin declarations — deduplicate by name (first definition wins). + if wfCfg.Plugins != nil && len(wfCfg.Plugins.External) > 0 { + if combined.Plugins == nil { + combined.Plugins = &PluginsConfig{} + } + existingPlugins := make(map[string]struct{}, len(combined.Plugins.External)) + for _, ep := range combined.Plugins.External { + existingPlugins[ep.Name] = struct{}{} + } + for _, ep := range wfCfg.Plugins.External { + if _, exists := existingPlugins[ep.Name]; exists { + continue + } + combined.Plugins.External = append(combined.Plugins.External, ep) + existingPlugins[ep.Name] = struct{}{} + } + } + for k, v := range wfCfg.Workflows { if existing, exists := combined.Workflows[k]; exists { // If the existing value is nil (e.g. `http:` with no body in YAML), diff --git a/config/merge_test.go b/config/merge_test.go index 91fda4a1..8dc66a61 100644 --- a/config/merge_test.go +++ b/config/merge_test.go @@ -2,6 +2,7 @@ package config import ( "os" + "path/filepath" "reflect" "strings" "testing" @@ -533,3 +534,68 @@ func writeFileContent(path, content string) error { func contains(s, substr string) bool { return strings.Contains(s, substr) } + +func TestMergeApplicationConfig_PluginDedup(t *testing.T) { + dir := t.TempDir() + + // Workflow A declares plugin "foo" + wfA := filepath.Join(dir, "a.yaml") + if err := writeFileContent(wfA, ` +plugins: + external: + - name: foo + version: "1.0" + repository: "https://example.com/foo" +modules: [] +`); err != nil { + t.Fatalf("write a.yaml: %v", err) + } + + // Workflow B declares plugin "foo" (duplicate) and "bar" + wfB := filepath.Join(dir, "b.yaml") + if err := writeFileContent(wfB, ` +plugins: + external: + - name: foo + version: "2.0" + repository: "https://example.com/foo-v2" + - name: bar + version: "1.0" + repository: "https://example.com/bar" +modules: [] +`); err != nil { + t.Fatalf("write b.yaml: %v", err) + } + + appCfg := &ApplicationConfig{ + Application: ApplicationInfo{ + Workflows: []WorkflowRef{ + {File: wfA}, + {File: wfB}, + }, + }, + } + + cfg, err := MergeApplicationConfig(appCfg) + if err != nil { + t.Fatalf("MergeApplicationConfig: %v", err) + } + + if cfg.Plugins == nil { + t.Fatal("expected Plugins to be non-nil after merge") + } + if len(cfg.Plugins.External) != 2 { + t.Fatalf("expected 2 plugins (foo + bar), got %d", len(cfg.Plugins.External)) + } + + // First definition wins — foo should have version "1.0" from workflow A + var fooVer string + for _, p := range cfg.Plugins.External { + if p.Name == "foo" { + fooVer = p.Version + } + } + if fooVer != "1.0" { + t.Errorf("expected foo version 1.0 (first definition wins), got %s", fooVer) + } +} diff --git a/docs/PLUGIN_AUTHORING.md b/docs/PLUGIN_AUTHORING.md index b28eb759..69dd58c3 100644 --- a/docs/PLUGIN_AUTHORING.md +++ b/docs/PLUGIN_AUTHORING.md @@ -101,7 +101,17 @@ func (p *Provider) CreateModule(typeName, name string, config map[string]any) (s ## Plugin Manifest -The `plugin.json` declares what your plugin provides. The name should match what you passed to `wfctl plugin init`: +The `plugin.json` at the project root declares what your plugin provides. The `name` +field **must match the short name** you passed to `wfctl plugin init` (e.g. `my-plugin`). +This is the name used by the engine for plugin discovery, the `requires.plugins` dependency +check, and `wfctl plugin install`. + +> **Note:** The scaffolded `internal/provider.go` returns a manifest with the name prefixed +> as `workflow-plugin-` (e.g. `workflow-plugin-my-plugin`). That longer form is +> the canonical name used in the **public registry** (`workflow-registry`) and in release +> artifact URLs. When referencing your plugin in a workflow config's `requires.plugins` or +> `plugins.external`, use the same short name you put in `plugin.json` — the engine resolves +> both forms automatically. ```json { diff --git a/module/pipeline_step_workflow_call.go b/module/pipeline_step_workflow_call.go index 9ceeca0f..ed48de3b 100644 --- a/module/pipeline_step_workflow_call.go +++ b/module/pipeline_step_workflow_call.go @@ -31,6 +31,7 @@ type WorkflowCallStep struct { name string workflow string // target pipeline name mode WorkflowCallMode // "sync" (default) or "async" + stopPipeline bool // if true, stop parent pipeline after this step completes inputMapping map[string]string outputMapping map[string]string timeout time.Duration @@ -86,6 +87,10 @@ func NewWorkflowCallStepFactory(lookup PipelineLookupFn) StepFactory { } } + if v, ok := cfg["stop_pipeline"].(bool); ok { + step.stopPipeline = v + } + return step, nil } } @@ -101,9 +106,13 @@ func (s *WorkflowCallStep) Execute(ctx context.Context, pc *PipelineContext) (*S if s.lookup == nil { return nil, fmt.Errorf("workflow_call step %q: no pipeline lookup function configured", s.name) } - target, ok := s.lookup(s.workflow) + workflowName, resolveErr := s.tmpl.Resolve(s.workflow, pc) + if resolveErr != nil { + return nil, fmt.Errorf("workflow_call step %q: failed to resolve workflow name %q: %w", s.name, s.workflow, resolveErr) + } + target, ok := s.lookup(workflowName) if !ok { - return nil, fmt.Errorf("workflow_call step %q: pipeline %q not found — ensure it is defined in the application config", s.name, s.workflow) + return nil, fmt.Errorf("workflow_call step %q: pipeline %q not found — ensure it is defined in the application config", s.name, workflowName) } // Build trigger data from input mapping or fall back to passing all current data @@ -130,7 +139,7 @@ func (s *WorkflowCallStep) Execute(ctx context.Context, pc *PipelineContext) (*S defer cancel() _, _ = target.Execute(asyncCtx, data) //nolint:errcheck }(ctx, triggerData) - return &StepResult{Output: map[string]any{"workflow": s.workflow, "mode": "async", "dispatched": true}}, nil + return &StepResult{Output: map[string]any{"workflow": workflowName, "mode": "async", "dispatched": true}, Stop: s.stopPipeline}, nil } // Sync mode: apply timeout and wait for result @@ -139,7 +148,7 @@ func (s *WorkflowCallStep) Execute(ctx context.Context, pc *PipelineContext) (*S childCtx, err := target.Execute(syncCtx, triggerData) if err != nil { - return nil, fmt.Errorf("workflow_call step %q: workflow %q failed: %w", s.name, s.workflow, err) + return nil, fmt.Errorf("workflow_call step %q: workflow %q failed: %w", s.name, workflowName, err) } // Map outputs back to parent context @@ -153,7 +162,7 @@ func (s *WorkflowCallStep) Execute(ctx context.Context, pc *PipelineContext) (*S output["result"] = childCtx.Current } - return &StepResult{Output: output}, nil + return &StepResult{Output: output, Stop: s.stopPipeline}, nil } // Ensure interface satisfaction at compile time. diff --git a/plugin/autofetch.go b/plugin/autofetch.go index d34f9701..5afdfd66 100644 --- a/plugin/autofetch.go +++ b/plugin/autofetch.go @@ -13,13 +13,24 @@ import ( // It shells out to wfctl for the actual download/install logic. // version is an optional semver constraint (e.g., ">=0.1.0" or "0.2.0"). func AutoFetchPlugin(pluginName, version, pluginDir string) error { + return autoFetchPlugin(pluginName, version, pluginDir, nil) +} + +// autoFetchPlugin is the internal implementation that accepts an optional structured +// logger. When logger is non-nil, status messages are emitted via slog instead of +// writing directly to stderr. +func autoFetchPlugin(pluginName, version, pluginDir string, logger *slog.Logger) error { // Check both pluginName and workflow-plugin- (or the short form // if pluginName already has the "workflow-plugin-" prefix). if isPluginInstalled(pluginName, pluginDir) { return nil } - fmt.Fprintf(os.Stderr, "[auto-fetch] Plugin %q not found locally, fetching from registry...\n", pluginName) + if logger != nil { + logger.Info("plugin not found locally, fetching from registry", "plugin", pluginName) + } else { + fmt.Fprintf(os.Stderr, "[auto-fetch] Plugin %q not found locally, fetching from registry...\n", pluginName) + } // Build install argument with version if specified. installArg := pluginName @@ -27,7 +38,12 @@ func AutoFetchPlugin(pluginName, version, pluginDir string) error { stripped, ok := stripVersionConstraint(version) if !ok { // Complex constraint (e.g. ">=0.1.0,<0.2.0") — install latest instead. - fmt.Fprintf(os.Stderr, "[auto-fetch] Version constraint %q is complex; installing latest version of %q\n", version, pluginName) + if logger != nil { + logger.Warn("version constraint is complex; installing latest version", + "plugin", pluginName, "constraint", version) + } else { + fmt.Fprintf(os.Stderr, "[auto-fetch] Version constraint %q is complex; installing latest version of %q\n", version, pluginName) + } stripped = "" } if stripped != "" { @@ -118,7 +134,20 @@ func AutoFetchDeclaredPlugins(decls []AutoFetchDecl, pluginDir string, logger *s return } - // Check wfctl availability once. + // Scan for at least one AutoFetch=true entry before checking wfctl availability. + // This avoids a misleading warning on startup when no plugins require auto-fetch. + hasAutoFetch := false + for _, d := range decls { + if d.AutoFetch { + hasAutoFetch = true + break + } + } + if !hasAutoFetch { + return + } + + // Check wfctl availability once — only needed when auto-fetch is actually requested. if _, err := exec.LookPath("wfctl"); err != nil { if logger != nil { logger.Warn("wfctl not found on PATH; skipping auto-fetch for declared plugins", @@ -134,7 +163,7 @@ func AutoFetchDeclaredPlugins(decls []AutoFetchDecl, pluginDir string, logger *s } // Record whether the plugin was already present before fetching. alreadyPresent := isPluginInstalled(d.Name, pluginDir) - if err := AutoFetchPlugin(d.Name, d.Version, pluginDir); err != nil { + if err := autoFetchPlugin(d.Name, d.Version, pluginDir, logger); err != nil { if logger != nil { logger.Warn("auto-fetch failed for plugin", "plugin", d.Name, "error", err) } diff --git a/plugin/autofetch_test.go b/plugin/autofetch_test.go index 419ea220..aa9d4d4e 100644 --- a/plugin/autofetch_test.go +++ b/plugin/autofetch_test.go @@ -81,11 +81,9 @@ func TestStripVersionConstraint(t *testing.T) { } } -// TestAutoFetchPlugin_CorrectArgs verifies that AutoFetchPlugin constructs the -// expected wfctl arguments. We do this by ensuring the function short-circuits -// when the plugin is already installed (not executing wfctl), which confirms -// the plugin.json check is evaluated before any exec.Command call. -func TestAutoFetchPlugin_CorrectArgs(t *testing.T) { +// TestAutoFetchPlugin_SkipsWhenExists verifies that AutoFetchPlugin returns nil +// immediately when the plugin is already installed, without invoking wfctl. +func TestAutoFetchPlugin_SkipsWhenExists(t *testing.T) { dir := t.TempDir() pluginDir := filepath.Join(dir, "plugins") pluginName := "test-plugin" diff --git a/plugin/integrity.go b/plugin/integrity.go index 2687bffd..ecdd1218 100644 --- a/plugin/integrity.go +++ b/plugin/integrity.go @@ -4,6 +4,7 @@ import ( "crypto/sha256" "encoding/hex" "fmt" + "io" "os" "path/filepath" "strings" @@ -49,13 +50,17 @@ func VerifyPluginIntegrity(pluginDir, pluginName string) error { } binaryPath := filepath.Join(pluginDir, pluginName, pluginName) - binaryData, err := os.ReadFile(binaryPath) + f, err := os.Open(binaryPath) if err != nil { - return fmt.Errorf("read plugin binary %s: %w", binaryPath, err) + return fmt.Errorf("open plugin binary %s: %w", binaryPath, err) } + defer f.Close() - h := sha256.Sum256(binaryData) - got := hex.EncodeToString(h[:]) + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return fmt.Errorf("hash plugin binary %s: %w", binaryPath, err) + } + got := hex.EncodeToString(h.Sum(nil)) if !strings.EqualFold(got, entry.SHA256) { return fmt.Errorf("plugin %q integrity check failed: binary checksum %s does not match lockfile %s", pluginName, got, entry.SHA256) } diff --git a/plugin/integrity_test.go b/plugin/integrity_test.go index ac3fcb30..34e1903c 100644 --- a/plugin/integrity_test.go +++ b/plugin/integrity_test.go @@ -57,6 +57,13 @@ func TestVerifyPluginIntegrity_UnreadableLockfile(t *testing.T) { t.Fatalf("write lockfile: %v", err) } + // POSIX permission bits are not enforced on all platforms (e.g. Windows, or + // when running as root). Verify the file is actually unreadable before + // asserting fail-closed behaviour. + if _, err := os.ReadFile(p); err == nil { + t.Skip("lockfile is readable despite 0000 permissions (platform or root); skipping test") + } + err := VerifyPluginIntegrity(filepath.Join(dir, "plugins"), "my-plugin") if err == nil { t.Error("expected error when lockfile is unreadable, got nil (fail-open)")