From 02711e5cff91595e0fba43f7d0f46c1690a9d7da Mon Sep 17 00:00:00 2001 From: Jon Langevin Date: Wed, 25 Feb 2026 02:14:51 -0500 Subject: [PATCH] feat(wfctl): add template validation, contract testing, and compat checking Adds three new wfctl commands to ensure templates remain valid across engine releases and that application configs don't introduce breaking changes: - `wfctl template validate`: validates project templates and config files against the engine's known module/step types, dependency resolution, trigger types, and config field warnings (strict mode available) - `wfctl contract test`: generates API contract snapshots from configs (endpoints, modules, steps, events) and compares against baselines to detect breaking changes (removed endpoints, added auth requirements) - `wfctl compat check`: verifies all module and step types in a config are available in the current engine version Also adds `cmd/wfctl/type_registry.go` with a static registry of all ~50 module types and ~40 step types extracted from plugin packages. Co-Authored-By: Claude Opus 4.6 --- cmd/wfctl/compat.go | 229 ++++++++++ cmd/wfctl/compat_test.go | 243 ++++++++++ cmd/wfctl/contract.go | 613 +++++++++++++++++++++++++ cmd/wfctl/contract_test.go | 562 +++++++++++++++++++++++ cmd/wfctl/main.go | 6 + cmd/wfctl/template_validate.go | 531 ++++++++++++++++++++++ cmd/wfctl/template_validate_test.go | 237 ++++++++++ cmd/wfctl/type_registry.go | 663 ++++++++++++++++++++++++++++ cmd/wfctl/type_registry_test.go | 141 ++++++ 9 files changed, 3225 insertions(+) create mode 100644 cmd/wfctl/compat.go create mode 100644 cmd/wfctl/compat_test.go create mode 100644 cmd/wfctl/contract.go create mode 100644 cmd/wfctl/contract_test.go create mode 100644 cmd/wfctl/template_validate.go create mode 100644 cmd/wfctl/template_validate_test.go create mode 100644 cmd/wfctl/type_registry.go create mode 100644 cmd/wfctl/type_registry_test.go diff --git a/cmd/wfctl/compat.go b/cmd/wfctl/compat.go new file mode 100644 index 00000000..4d8421ca --- /dev/null +++ b/cmd/wfctl/compat.go @@ -0,0 +1,229 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + "strings" + + "github.com/GoCodeAlone/workflow/config" +) + +// CompatibilityInfo describes the compatibility requirements and test history of a config. +type CompatibilityInfo struct { + MinEngineVersion string `json:"minEngineVersion"` + MaxEngineVersion string `json:"maxEngineVersion,omitempty"` + RequiredSteps []string `json:"requiredSteps"` + RequiredModules []string `json:"requiredModules"` + TestedVersions []string `json:"testedVersions"` +} + +// compatCheckResult holds the result of a compatibility check. +type compatCheckResult struct { + EngineVersion string `json:"engineVersion"` + RequiredModules []compatItem `json:"requiredModules"` + RequiredSteps []compatItem `json:"requiredSteps"` + Compatible bool `json:"compatible"` + Issues []string `json:"issues,omitempty"` +} + +// compatItem represents a single required type and whether it's available. +type compatItem struct { + Type string `json:"type"` + Available bool `json:"available"` +} + +// runCompat dispatches compat subcommands. +func runCompat(args []string) error { + if len(args) < 1 { + return compatUsage() + } + switch args[0] { + case "check": + return runCompatCheck(args[1:]) + default: + return compatUsage() + } +} + +func compatUsage() error { + fmt.Fprintf(os.Stderr, `Usage: wfctl compat [options] + +Subcommands: + check Check config compatibility with the current engine version + +Run 'wfctl compat check -h' for details. +`) + return fmt.Errorf("compat subcommand is required") +} + +// runCompatCheck checks a config file for compatibility with the current engine version. +func runCompatCheck(args []string) error { + fs2 := flag.NewFlagSet("compat check", flag.ContinueOnError) + format := fs2.String("format", "text", "Output format: text or json") + fs2.Usage = func() { + fmt.Fprintf(fs2.Output(), `Usage: wfctl compat check [options] + +Check whether a workflow config is compatible with the current engine version. +Reports which module and step types are available in the engine. + +Options: +`) + fs2.PrintDefaults() + } + if err := fs2.Parse(args); err != nil { + return err + } + if fs2.NArg() < 1 { + fs2.Usage() + return fmt.Errorf("config.yaml path is required") + } + + configPath := fs2.Arg(0) + cfg, err := config.LoadFromFile(configPath) + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + result := checkCompatibility(cfg) + + switch strings.ToLower(*format) { + case "json": + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(result) + default: + printCompatResult(result) + } + + if !result.Compatible { + return fmt.Errorf("compatibility check failed: %d issue(s)", len(result.Issues)) + } + return nil +} + +// checkCompatibility checks a config against the current engine's known types. +func checkCompatibility(cfg *config.WorkflowConfig) *compatCheckResult { + knownModules := KnownModuleTypes() + knownSteps := KnownStepTypes() + + result := &compatCheckResult{ + EngineVersion: version, + Compatible: true, + } + + // Check module types + for _, mod := range cfg.Modules { + item := compatItem{ + Type: mod.Type, + } + if _, ok := knownModules[mod.Type]; ok { + item.Available = true + } else { + item.Available = false + result.Compatible = false + result.Issues = append(result.Issues, fmt.Sprintf("module type %q is not available in this engine version", mod.Type)) + } + result.RequiredModules = append(result.RequiredModules, item) + } + + // Deduplicate modules + result.RequiredModules = deduplicateCompatItems(result.RequiredModules) + + // Check step types from pipelines + stepSet := make(map[string]bool) + for _, pipelineRaw := range cfg.Pipelines { + pipelineMap, ok := pipelineRaw.(map[string]any) + if !ok { + continue + } + if stepsRaw, ok := pipelineMap["steps"].([]any); ok { + for _, stepRaw := range stepsRaw { + if stepMap, ok := stepRaw.(map[string]any); ok { + if stepType, ok := stepMap["type"].(string); ok && stepType != "" { + stepSet[stepType] = true + } + } + } + } + } + + for stepType := range stepSet { + item := compatItem{ + Type: stepType, + } + if _, ok := knownSteps[stepType]; ok { + item.Available = true + } else { + item.Available = false + result.Compatible = false + result.Issues = append(result.Issues, fmt.Sprintf("step type %q is not available in this engine version", stepType)) + } + result.RequiredSteps = append(result.RequiredSteps, item) + } + + // Sort for determinism + sortCompatItems(result.RequiredModules) + sortCompatItems(result.RequiredSteps) + + return result +} + +// deduplicateCompatItems removes duplicate items, keeping the first occurrence. +func deduplicateCompatItems(items []compatItem) []compatItem { + seen := make(map[string]bool) + var out []compatItem + for _, item := range items { + if !seen[item.Type] { + seen[item.Type] = true + out = append(out, item) + } + } + return out +} + +// sortCompatItems sorts compat items by type name. +func sortCompatItems(items []compatItem) { + for i := 1; i < len(items); i++ { + for j := i; j > 0 && items[j].Type < items[j-1].Type; j-- { + items[j], items[j-1] = items[j-1], items[j] + } + } +} + +// printCompatResult prints a human-readable compatibility check result. +func printCompatResult(r *compatCheckResult) { + fmt.Printf("Engine version: %s\n", r.EngineVersion) + + if len(r.RequiredModules) > 0 { + fmt.Printf("\nRequired modules:\n") + for _, item := range r.RequiredModules { + if item.Available { + fmt.Printf(" %s +\n", item.Type) + } else { + fmt.Printf(" %s (NOT AVAILABLE)\n", item.Type) + } + } + } + + if len(r.RequiredSteps) > 0 { + fmt.Printf("\nRequired steps:\n") + for _, item := range r.RequiredSteps { + if item.Available { + fmt.Printf(" %s +\n", item.Type) + } else { + fmt.Printf(" %s (NOT AVAILABLE)\n", item.Type) + } + } + } + + if r.Compatible { + fmt.Println("\nCompatibility: PASS") + } else { + fmt.Printf("\nCompatibility: FAIL (%d issue(s))\n", len(r.Issues)) + for _, issue := range r.Issues { + fmt.Printf(" - %s\n", issue) + } + } +} diff --git a/cmd/wfctl/compat_test.go b/cmd/wfctl/compat_test.go new file mode 100644 index 00000000..d064ee2b --- /dev/null +++ b/cmd/wfctl/compat_test.go @@ -0,0 +1,243 @@ +package main + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/GoCodeAlone/workflow/config" +) + +func TestRunCompatMissingSubcommand(t *testing.T) { + err := runCompat([]string{}) + if err == nil { + t.Fatal("expected error when no subcommand given") + } +} + +func TestRunCompatUnknownSubcommand(t *testing.T) { + err := runCompat([]string{"unknown"}) + if err == nil { + t.Fatal("expected error for unknown subcommand") + } +} + +func TestRunCompatCheckMissingConfig(t *testing.T) { + err := runCompatCheck([]string{}) + if err == nil { + t.Fatal("expected error when no config given") + } +} + +func TestRunCompatCheckValidConfig(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: server + type: http.server + config: + address: ":8080" + - name: router + type: http.router + - name: auth + type: auth.jwt + config: + secret: test-secret +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + if err := runCompatCheck([]string{configPath}); err != nil { + t.Fatalf("expected compatible config to pass, got: %v", err) + } +} + +func TestRunCompatCheckUnknownModule(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: unknown-thing + type: not.a.real.module.type +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + err := runCompatCheck([]string{configPath}) + if err == nil { + t.Fatal("expected error for unknown module type") + } + if !strings.Contains(err.Error(), "compatibility check failed") { + t.Errorf("expected compatibility check failed, got: %v", err) + } +} + +func TestRunCompatCheckJSON(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: server + type: http.server +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + if err := runCompatCheck([]string{"-format", "json", configPath}); err != nil { + t.Fatalf("expected JSON output to work, got: %v", err) + } +} + +func TestCheckCompatibilityAllKnown(t *testing.T) { + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{ + {Name: "server", Type: "http.server"}, + {Name: "db", Type: "storage.sqlite"}, + {Name: "auth", Type: "auth.jwt"}, + }, + } + + result := checkCompatibility(cfg) + if !result.Compatible { + t.Errorf("expected compatible config, got issues: %v", result.Issues) + } + if len(result.RequiredModules) != 3 { + t.Errorf("expected 3 required modules, got %d", len(result.RequiredModules)) + } + for _, m := range result.RequiredModules { + if !m.Available { + t.Errorf("expected module %q to be available", m.Type) + } + } +} + +func TestCheckCompatibilityUnknownModule(t *testing.T) { + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{ + {Name: "server", Type: "http.server"}, + {Name: "unknown", Type: "future.engine.feature"}, + }, + } + + result := checkCompatibility(cfg) + if result.Compatible { + t.Error("expected incompatible result for unknown module type") + } + if len(result.Issues) == 0 { + t.Error("expected issues to be populated") + } + + // Check that the unknown module appears as not available + found := false + for _, m := range result.RequiredModules { + if m.Type == "future.engine.feature" && !m.Available { + found = true + } + } + if !found { + t.Error("expected future.engine.feature to be marked as not available") + } +} + +func TestCheckCompatibilityWithSteps(t *testing.T) { + cfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "my-pipeline": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/test", + "method": "GET", + }, + }, + "steps": []any{ + map[string]any{"type": "step.validate"}, + map[string]any{"type": "step.json_response"}, + map[string]any{"type": "step.db_query"}, + }, + }, + }, + } + + result := checkCompatibility(cfg) + if !result.Compatible { + t.Errorf("expected compatible config, got issues: %v", result.Issues) + } + if len(result.RequiredSteps) != 3 { + t.Errorf("expected 3 required steps, got %d", len(result.RequiredSteps)) + } +} + +func TestCheckCompatibilityUnknownStep(t *testing.T) { + cfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "my-pipeline": map[string]any{ + "steps": []any{ + map[string]any{"type": "step.future_step_type"}, + }, + }, + }, + } + + result := checkCompatibility(cfg) + if result.Compatible { + t.Error("expected incompatible result for unknown step type") + } +} + +func TestCheckCompatibilityEngineVersion(t *testing.T) { + cfg := &config.WorkflowConfig{} + result := checkCompatibility(cfg) + if result.EngineVersion == "" { + t.Error("expected engine version to be set") + } +} + +func TestCheckCompatibilityDeduplicate(t *testing.T) { + // Same module type used multiple times + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{ + {Name: "server1", Type: "http.server"}, + {Name: "server2", Type: "http.server"}, + {Name: "db", Type: "storage.sqlite"}, + }, + } + + result := checkCompatibility(cfg) + // After deduplication, should have 2 unique types + if len(result.RequiredModules) != 2 { + t.Errorf("expected 2 deduplicated module types, got %d", len(result.RequiredModules)) + } +} + +func TestSortCompatItems(t *testing.T) { + items := []compatItem{ + {Type: "z.module", Available: true}, + {Type: "a.module", Available: true}, + {Type: "m.module", Available: false}, + } + sortCompatItems(items) + if items[0].Type != "a.module" { + t.Errorf("expected first item to be a.module, got %q", items[0].Type) + } + if items[2].Type != "z.module" { + t.Errorf("expected last item to be z.module, got %q", items[2].Type) + } +} + +func TestDeduplicateCompatItems(t *testing.T) { + items := []compatItem{ + {Type: "http.server", Available: true}, + {Type: "http.router", Available: true}, + {Type: "http.server", Available: true}, + } + result := deduplicateCompatItems(items) + if len(result) != 2 { + t.Errorf("expected 2 unique items, got %d", len(result)) + } +} diff --git a/cmd/wfctl/contract.go b/cmd/wfctl/contract.go new file mode 100644 index 00000000..18366a88 --- /dev/null +++ b/cmd/wfctl/contract.go @@ -0,0 +1,613 @@ +package main + +import ( + "crypto/sha256" + "encoding/json" + "flag" + "fmt" + "os" + "sort" + "strings" + "time" + + "github.com/GoCodeAlone/workflow/config" +) + +// Contract is a snapshot of what a workflow application config exposes. +type Contract struct { + Version string `json:"version"` + ConfigHash string `json:"configHash"` + GeneratedAt string `json:"generatedAt"` + Endpoints []EndpointContract `json:"endpoints"` + Modules []ModuleContract `json:"modules"` + Steps []string `json:"steps"` + Events []EventContract `json:"events"` +} + +// EndpointContract describes an HTTP endpoint in the contract. +type EndpointContract struct { + Method string `json:"method"` + Path string `json:"path"` + AuthRequired bool `json:"authRequired"` + Pipeline string `json:"pipeline"` +} + +// ModuleContract describes a module in the contract. +type ModuleContract struct { + Name string `json:"name"` + Type string `json:"type"` + Stateful bool `json:"stateful"` +} + +// EventContract describes an event topic in the contract. +type EventContract struct { + Topic string `json:"topic"` + Direction string `json:"direction"` // publish or subscribe + Pipeline string `json:"pipeline"` +} + +// contractComparison holds the result of comparing two contracts. +type contractComparison struct { + BaseVersion string + CurrentVersion string + Endpoints []endpointChange + Modules []moduleChange + Events []eventChange + BreakingCount int +} + +type changeType string + +const ( + changeAdded changeType = "ADDED" + changeRemoved changeType = "REMOVED" + changeChanged changeType = "CHANGED" + changeUnchanged changeType = "UNCHANGED" +) + +type endpointChange struct { + Method string + Path string + Pipeline string + Change changeType + Detail string + IsBreaking bool +} + +type moduleChange struct { + Name string + Type string + Change changeType +} + +type eventChange struct { + Topic string + Direction string + Pipeline string + Change changeType +} + +// runContract dispatches contract subcommands. +func runContract(args []string) error { + if len(args) < 1 { + return contractUsage() + } + switch args[0] { + case "test": + return runContractTest(args[1:]) + default: + return contractUsage() + } +} + +func contractUsage() error { + fmt.Fprintf(os.Stderr, `Usage: wfctl contract [options] + +Subcommands: + test Generate a contract from a config and optionally compare to a baseline + +Run 'wfctl contract test -h' for details. +`) + return fmt.Errorf("contract subcommand is required") +} + +// runContractTest generates a contract and optionally compares it to a baseline. +func runContractTest(args []string) error { + fs2 := flag.NewFlagSet("contract test", flag.ContinueOnError) + baseline := fs2.String("baseline", "", "Previous version's contract file for comparison") + output := fs2.String("output", "", "Write contract file to this path") + format := fs2.String("format", "text", "Output format: text or json") + fs2.Usage = func() { + fmt.Fprintf(fs2.Output(), `Usage: wfctl contract test [options] + +Generate a contract snapshot from a workflow config file. +Optionally compare against a baseline contract to detect breaking changes. + +Options: +`) + fs2.PrintDefaults() + } + if err := fs2.Parse(args); err != nil { + return err + } + if fs2.NArg() < 1 { + fs2.Usage() + return fmt.Errorf("config.yaml path is required") + } + + configPath := fs2.Arg(0) + cfg, err := config.LoadFromFile(configPath) + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + contract := generateContract(cfg) + + // Write contract to output file if requested + if *output != "" { + f, err := os.Create(*output) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer f.Close() + enc := json.NewEncoder(f) + enc.SetIndent("", " ") + if err := enc.Encode(contract); err != nil { + return fmt.Errorf("failed to write contract: %w", err) + } + fmt.Fprintf(os.Stderr, "Contract written to %s\n", *output) + } + + // Compare to baseline if provided + if *baseline != "" { + baseData, err := os.ReadFile(*baseline) + if err != nil { + return fmt.Errorf("failed to read baseline: %w", err) + } + var baseContract Contract + if err := json.Unmarshal(baseData, &baseContract); err != nil { + return fmt.Errorf("failed to parse baseline contract: %w", err) + } + + comparison := compareContracts(&baseContract, contract) + + switch strings.ToLower(*format) { + case "json": + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(comparison) + default: + printContractComparison(comparison) + } + + if comparison.BreakingCount > 0 { + return fmt.Errorf("%d breaking change(s) detected", comparison.BreakingCount) + } + return nil + } + + // Print contract summary + switch strings.ToLower(*format) { + case "json": + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(contract) + default: + printContract(contract) + } + + return nil +} + +// generateContract builds a Contract from a WorkflowConfig. +func generateContract(cfg *config.WorkflowConfig) *Contract { + knownModules := KnownModuleTypes() + + // Hash the config for version tracking + cfgData, _ := json.Marshal(cfg) + hash := fmt.Sprintf("%x", sha256.Sum256(cfgData))[:16] + + contract := &Contract{ + Version: "1.0", + ConfigHash: hash, + GeneratedAt: time.Now().UTC().Format(time.RFC3339), + } + + // Extract modules + moduleSet := make(map[string]bool) + for _, mod := range cfg.Modules { + moduleSet[mod.Name] = true + info, isKnown := knownModules[mod.Type] + mc := ModuleContract{ + Name: mod.Name, + Type: mod.Type, + } + if isKnown { + mc.Stateful = info.Stateful + } + contract.Modules = append(contract.Modules, mc) + } + + // Sort modules for determinism + sort.Slice(contract.Modules, func(i, j int) bool { + return contract.Modules[i].Name < contract.Modules[j].Name + }) + + // Extract pipeline endpoints, steps, and events + stepSet := make(map[string]bool) + for pipelineName, pipelineRaw := range cfg.Pipelines { + pipelineMap, ok := pipelineRaw.(map[string]any) + if !ok { + continue + } + + // Extract trigger info for endpoints + if triggerRaw, ok := pipelineMap["trigger"]; ok { + if triggerMap, ok := triggerRaw.(map[string]any); ok { + triggerType, _ := triggerMap["type"].(string) + if triggerType == "http" { + triggerCfg, _ := triggerMap["config"].(map[string]any) + if triggerCfg != nil { + path, _ := triggerCfg["path"].(string) + method, _ := triggerCfg["method"].(string) + if path != "" && method != "" { + ep := EndpointContract{ + Method: strings.ToUpper(method), + Path: path, + Pipeline: pipelineName, + } + // Check for auth in steps + if stepsRaw, ok := pipelineMap["steps"].([]any); ok { + for _, stepRaw := range stepsRaw { + if stepMap, ok := stepRaw.(map[string]any); ok { + stepType, _ := stepMap["type"].(string) + if stepType == "step.auth_required" { + ep.AuthRequired = true + } + } + } + } + contract.Endpoints = append(contract.Endpoints, ep) + } + } + } + } + } + + // Extract steps + if stepsRaw, ok := pipelineMap["steps"].([]any); ok { + for _, stepRaw := range stepsRaw { + if stepMap, ok := stepRaw.(map[string]any); ok { + stepType, _ := stepMap["type"].(string) + if stepType != "" { + stepSet[stepType] = true + } + // Extract event publishes + if stepType == "step.publish" { + if stepCfg, ok := stepMap["config"].(map[string]any); ok { + if topic, ok := stepCfg["topic"].(string); ok && topic != "" { + contract.Events = append(contract.Events, EventContract{ + Topic: topic, + Direction: "publish", + Pipeline: pipelineName, + }) + } + } + } + } + } + } + + // Extract event subscriptions from trigger + if triggerRaw, ok := pipelineMap["trigger"]; ok { + if triggerMap, ok := triggerRaw.(map[string]any); ok { + triggerType, _ := triggerMap["type"].(string) + if triggerType == "event" { + if triggerCfg, ok := triggerMap["config"].(map[string]any); ok { + if topic, ok := triggerCfg["topic"].(string); ok && topic != "" { + contract.Events = append(contract.Events, EventContract{ + Topic: topic, + Direction: "subscribe", + Pipeline: pipelineName, + }) + } + } + } + } + } + } + + // Sort endpoints and events for determinism + sort.Slice(contract.Endpoints, func(i, j int) bool { + if contract.Endpoints[i].Path != contract.Endpoints[j].Path { + return contract.Endpoints[i].Path < contract.Endpoints[j].Path + } + return contract.Endpoints[i].Method < contract.Endpoints[j].Method + }) + + // Collect steps as sorted slice + for st := range stepSet { + contract.Steps = append(contract.Steps, st) + } + sort.Strings(contract.Steps) + + // Sort events + sort.Slice(contract.Events, func(i, j int) bool { + if contract.Events[i].Topic != contract.Events[j].Topic { + return contract.Events[i].Topic < contract.Events[j].Topic + } + return contract.Events[i].Direction < contract.Events[j].Direction + }) + + return contract +} + +// compareContracts compares a baseline contract to the current one. +func compareContracts(base, current *Contract) *contractComparison { + comp := &contractComparison{ + BaseVersion: base.Version, + CurrentVersion: current.Version, + } + + // Compare endpoints + baseEPs := make(map[string]EndpointContract) + for _, ep := range base.Endpoints { + key := ep.Method + " " + ep.Path + baseEPs[key] = ep + } + currentEPs := make(map[string]EndpointContract) + for _, ep := range current.Endpoints { + key := ep.Method + " " + ep.Path + currentEPs[key] = ep + } + + // Check base endpoints + for key, baseEP := range baseEPs { + if currentEP, exists := currentEPs[key]; exists { + // Check for breaking changes + if baseEP.AuthRequired != currentEP.AuthRequired && !baseEP.AuthRequired { + // Auth was added to a public endpoint + comp.Endpoints = append(comp.Endpoints, endpointChange{ + Method: baseEP.Method, + Path: baseEP.Path, + Pipeline: currentEP.Pipeline, + Change: changeChanged, + Detail: "auth requirement added (clients without tokens will get 401)", + IsBreaking: true, + }) + comp.BreakingCount++ + } else { + comp.Endpoints = append(comp.Endpoints, endpointChange{ + Method: baseEP.Method, + Path: baseEP.Path, + Pipeline: currentEP.Pipeline, + Change: changeUnchanged, + }) + } + } else { + // Endpoint was removed - BREAKING + comp.Endpoints = append(comp.Endpoints, endpointChange{ + Method: baseEP.Method, + Path: baseEP.Path, + Pipeline: baseEP.Pipeline, + Change: changeRemoved, + Detail: "endpoint removed (clients calling this will get 404)", + IsBreaking: true, + }) + comp.BreakingCount++ + } + delete(currentEPs, key) + } + + // Check added endpoints (non-breaking) + for _, currentEP := range currentEPs { + comp.Endpoints = append(comp.Endpoints, endpointChange{ + Method: currentEP.Method, + Path: currentEP.Path, + Pipeline: currentEP.Pipeline, + Change: changeAdded, + IsBreaking: false, + }) + } + + // Sort endpoint changes for stable output + sort.Slice(comp.Endpoints, func(i, j int) bool { + if comp.Endpoints[i].Path != comp.Endpoints[j].Path { + return comp.Endpoints[i].Path < comp.Endpoints[j].Path + } + return comp.Endpoints[i].Method < comp.Endpoints[j].Method + }) + + // Compare modules + baseModules := make(map[string]ModuleContract) + for _, m := range base.Modules { + baseModules[m.Name] = m + } + currentModules := make(map[string]ModuleContract) + for _, m := range current.Modules { + currentModules[m.Name] = m + } + + for name, baseMod := range baseModules { + if _, exists := currentModules[name]; exists { + comp.Modules = append(comp.Modules, moduleChange{ + Name: name, + Type: baseMod.Type, + Change: changeUnchanged, + }) + } else { + comp.Modules = append(comp.Modules, moduleChange{ + Name: name, + Type: baseMod.Type, + Change: changeRemoved, + }) + } + delete(currentModules, name) + } + for name, currentMod := range currentModules { + comp.Modules = append(comp.Modules, moduleChange{ + Name: name, + Type: currentMod.Type, + Change: changeAdded, + }) + } + sort.Slice(comp.Modules, func(i, j int) bool { + return comp.Modules[i].Name < comp.Modules[j].Name + }) + + // Compare events + baseEvents := make(map[string]EventContract) + for _, e := range base.Events { + key := e.Direction + ":" + e.Topic + baseEvents[key] = e + } + currentEvents := make(map[string]EventContract) + for _, e := range current.Events { + key := e.Direction + ":" + e.Topic + currentEvents[key] = e + } + + for key, baseEv := range baseEvents { + if _, exists := currentEvents[key]; exists { + comp.Events = append(comp.Events, eventChange{ + Topic: baseEv.Topic, + Direction: baseEv.Direction, + Pipeline: baseEv.Pipeline, + Change: changeUnchanged, + }) + } else { + comp.Events = append(comp.Events, eventChange{ + Topic: baseEv.Topic, + Direction: baseEv.Direction, + Pipeline: baseEv.Pipeline, + Change: changeRemoved, + }) + } + delete(currentEvents, key) + } + for _, currentEv := range currentEvents { + comp.Events = append(comp.Events, eventChange{ + Topic: currentEv.Topic, + Direction: currentEv.Direction, + Pipeline: currentEv.Pipeline, + Change: changeAdded, + }) + } + sort.Slice(comp.Events, func(i, j int) bool { + return comp.Events[i].Topic < comp.Events[j].Topic + }) + + return comp +} + +// printContract prints a human-readable contract summary. +func printContract(c *Contract) { + fmt.Printf("Contract (hash: %s, generated: %s)\n", c.ConfigHash, c.GeneratedAt) + fmt.Printf("\nEndpoints (%d):\n", len(c.Endpoints)) + for _, ep := range c.Endpoints { + auth := "" + if ep.AuthRequired { + auth = " [auth]" + } + fmt.Printf(" %-7s %s%s (pipeline: %s)\n", ep.Method, ep.Path, auth, ep.Pipeline) + } + + fmt.Printf("\nModules (%d):\n", len(c.Modules)) + for _, m := range c.Modules { + stateful := "" + if m.Stateful { + stateful = " [stateful]" + } + fmt.Printf(" %s (%s)%s\n", m.Name, m.Type, stateful) + } + + if len(c.Steps) > 0 { + fmt.Printf("\nStep types used (%d):\n", len(c.Steps)) + for _, s := range c.Steps { + fmt.Printf(" %s\n", s) + } + } + + if len(c.Events) > 0 { + fmt.Printf("\nEvents (%d):\n", len(c.Events)) + for _, e := range c.Events { + fmt.Printf(" %s %s (pipeline: %s)\n", e.Direction, e.Topic, e.Pipeline) + } + } +} + +// printContractComparison prints a human-readable contract comparison. +func printContractComparison(comp *contractComparison) { + fmt.Printf("Contract Comparison\n\n") + + if len(comp.Endpoints) > 0 { + fmt.Println("Endpoints:") + for _, ec := range comp.Endpoints { + var sym string + switch ec.Change { + case changeAdded: + sym = "+" + case changeRemoved: + sym = "-" + case changeChanged: + sym = "~" + default: + sym = "=" + } + breaking := "" + if ec.IsBreaking { + breaking = " BREAKING" + } + if ec.Detail != "" { + fmt.Printf(" %s %-7s %s%s\n -> %s\n", sym, ec.Method, ec.Path, breaking, ec.Detail) + } else { + fmt.Printf(" %s %-7s %s [%s]%s\n", sym, ec.Method, ec.Path, ec.Change, breaking) + } + } + } + + if len(comp.Modules) > 0 { + fmt.Println("\nModules:") + for _, mc := range comp.Modules { + var sym string + switch mc.Change { + case changeAdded: + sym = "+" + case changeRemoved: + sym = "-" + default: + sym = "=" + } + fmt.Printf(" %s %s (%s) [%s]\n", sym, mc.Name, mc.Type, mc.Change) + } + } + + if len(comp.Events) > 0 { + fmt.Println("\nEvents:") + for _, ec := range comp.Events { + var sym string + switch ec.Change { + case changeAdded: + sym = "+" + case changeRemoved: + sym = "-" + default: + sym = "=" + } + fmt.Printf(" %s %s %s [%s]\n", sym, ec.Direction, ec.Topic, ec.Change) + } + } + + if comp.BreakingCount > 0 { + fmt.Printf("\nBreaking Changes: %d\n", comp.BreakingCount) + n := 1 + for _, ec := range comp.Endpoints { + if ec.IsBreaking { + fmt.Printf(" %d. %s %s: %s\n", n, ec.Method, ec.Path, ec.Detail) + n++ + } + } + } else { + fmt.Println("\nNo breaking changes detected.") + } +} diff --git a/cmd/wfctl/contract_test.go b/cmd/wfctl/contract_test.go new file mode 100644 index 00000000..61e81706 --- /dev/null +++ b/cmd/wfctl/contract_test.go @@ -0,0 +1,562 @@ +package main + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/GoCodeAlone/workflow/config" +) + +func TestRunContractMissingSubcommand(t *testing.T) { + err := runContract([]string{}) + if err == nil { + t.Fatal("expected error when no subcommand given") + } +} + +func TestRunContractUnknownSubcommand(t *testing.T) { + err := runContract([]string{"unknown"}) + if err == nil { + t.Fatal("expected error for unknown subcommand") + } +} + +func TestRunContractTestMissingConfig(t *testing.T) { + err := runContractTest([]string{}) + if err == nil { + t.Fatal("expected error when no config given") + } +} + +const contractTestConfig = ` +pipelines: + list-items: + trigger: + type: http + config: + path: /api/items + method: GET + steps: + - name: parse + type: step.request_parse + - name: respond + type: step.json_response + config: + status: 200 + + create-item: + trigger: + type: http + config: + path: /api/items + method: POST + steps: + - name: auth + type: step.auth_required + - name: validate + type: step.validate + - name: respond + type: step.json_response + config: + status: 201 + + process-event: + trigger: + type: event + config: + topic: item.created + steps: + - name: log + type: step.log + - name: publish + type: step.publish + config: + topic: item.processed + +modules: + - name: db + type: storage.sqlite + config: + dbPath: data/test.db +` + +func TestRunContractTestGenerate(t *testing.T) { + dir := t.TempDir() + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(contractTestConfig), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + if err := runContractTest([]string{configPath}); err != nil { + t.Fatalf("expected contract generation to succeed, got: %v", err) + } +} + +func TestRunContractTestGenerateJSON(t *testing.T) { + dir := t.TempDir() + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(contractTestConfig), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + if err := runContractTest([]string{"-format", "json", configPath}); err != nil { + t.Fatalf("expected json output to work, got: %v", err) + } +} + +func TestRunContractTestWriteOutput(t *testing.T) { + dir := t.TempDir() + configPath := filepath.Join(dir, "workflow.yaml") + outputPath := filepath.Join(dir, "contract.json") + + if err := os.WriteFile(configPath, []byte(contractTestConfig), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + if err := runContractTest([]string{"-output", outputPath, configPath}); err != nil { + t.Fatalf("expected contract generation to succeed, got: %v", err) + } + + data, err := os.ReadFile(outputPath) + if err != nil { + t.Fatalf("failed to read output file: %v", err) + } + + var contract Contract + if err := json.Unmarshal(data, &contract); err != nil { + t.Fatalf("output is not valid JSON: %v", err) + } + if contract.ConfigHash == "" { + t.Error("expected ConfigHash to be set") + } + if contract.GeneratedAt == "" { + t.Error("expected GeneratedAt to be set") + } +} + +func TestGenerateContractEndpoints(t *testing.T) { + cfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + "create-item": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "POST", + }, + }, + "steps": []any{ + map[string]any{ + "name": "auth", + "type": "step.auth_required", + }, + }, + }, + }, + } + + contract := generateContract(cfg) + + if len(contract.Endpoints) != 2 { + t.Fatalf("expected 2 endpoints, got %d", len(contract.Endpoints)) + } + + // Find GET endpoint + var getEP, postEP *EndpointContract + for i := range contract.Endpoints { + if contract.Endpoints[i].Method == "GET" { + getEP = &contract.Endpoints[i] + } + if contract.Endpoints[i].Method == "POST" { + postEP = &contract.Endpoints[i] + } + } + + if getEP == nil { + t.Error("expected GET endpoint") + } else if getEP.AuthRequired { + t.Error("expected GET endpoint to not require auth") + } + + if postEP == nil { + t.Error("expected POST endpoint") + } else if !postEP.AuthRequired { + t.Error("expected POST endpoint to require auth") + } +} + +func TestGenerateContractModules(t *testing.T) { + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{ + {Name: "db", Type: "storage.sqlite"}, + {Name: "cache", Type: "cache.redis"}, + }, + } + + contract := generateContract(cfg) + + if len(contract.Modules) != 2 { + t.Fatalf("expected 2 modules, got %d", len(contract.Modules)) + } + + // Find sqlite module and check stateful + var sqliteMod *ModuleContract + for i := range contract.Modules { + if contract.Modules[i].Type == "storage.sqlite" { + sqliteMod = &contract.Modules[i] + } + } + if sqliteMod == nil { + t.Fatal("expected storage.sqlite module") + } + if !sqliteMod.Stateful { + t.Error("expected storage.sqlite to be stateful") + } +} + +func TestGenerateContractEvents(t *testing.T) { + cfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "publisher": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/publish", + "method": "POST", + }, + }, + "steps": []any{ + map[string]any{ + "type": "step.publish", + "config": map[string]any{ + "topic": "order.created", + }, + }, + }, + }, + "subscriber": map[string]any{ + "trigger": map[string]any{ + "type": "event", + "config": map[string]any{ + "topic": "order.created", + }, + }, + "steps": []any{}, + }, + }, + } + + contract := generateContract(cfg) + + if len(contract.Events) != 2 { + t.Fatalf("expected 2 events, got %d; events: %v", len(contract.Events), contract.Events) + } + + foundPublish := false + foundSubscribe := false + for _, e := range contract.Events { + if e.Topic == "order.created" && e.Direction == "publish" { + foundPublish = true + } + if e.Topic == "order.created" && e.Direction == "subscribe" { + foundSubscribe = true + } + } + if !foundPublish { + t.Error("expected publish event for order.created") + } + if !foundSubscribe { + t.Error("expected subscribe event for order.created") + } +} + +func TestCompareContractsNoChanges(t *testing.T) { + cfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + }, + Modules: []config.ModuleConfig{ + {Name: "db", Type: "storage.sqlite"}, + }, + } + + base := generateContract(cfg) + current := generateContract(cfg) + + comp := compareContracts(base, current) + if comp.BreakingCount != 0 { + t.Errorf("expected no breaking changes, got %d", comp.BreakingCount) + } +} + +func TestCompareContractsEndpointAdded(t *testing.T) { + baseCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + }, + } + + currentCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + "create-item": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "POST", + }, + }, + "steps": []any{}, + }, + }, + } + + base := generateContract(baseCfg) + current := generateContract(currentCfg) + comp := compareContracts(base, current) + + // Adding an endpoint is not breaking + if comp.BreakingCount != 0 { + t.Errorf("expected 0 breaking changes for added endpoint, got %d", comp.BreakingCount) + } + + // Find the added endpoint + found := false + for _, ec := range comp.Endpoints { + if ec.Method == "POST" && ec.Path == "/api/items" && ec.Change == changeAdded { + found = true + } + } + if !found { + t.Error("expected POST /api/items to appear as ADDED") + } +} + +func TestCompareContractsEndpointRemoved(t *testing.T) { + baseCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + "legacy": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/legacy", + "method": "GET", + }, + }, + "steps": []any{}, + }, + }, + } + + currentCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + }, + } + + base := generateContract(baseCfg) + current := generateContract(currentCfg) + comp := compareContracts(base, current) + + // Removing an endpoint is breaking + if comp.BreakingCount == 0 { + t.Error("expected breaking change for removed endpoint") + } + + found := false + for _, ec := range comp.Endpoints { + if ec.Path == "/api/legacy" && ec.Change == changeRemoved && ec.IsBreaking { + found = true + } + } + if !found { + t.Error("expected /api/legacy to appear as REMOVED and breaking") + } +} + +func TestCompareContractsAuthAdded(t *testing.T) { + baseCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{}, + }, + }, + } + + currentCfg := &config.WorkflowConfig{ + Pipelines: map[string]any{ + "get-items": map[string]any{ + "trigger": map[string]any{ + "type": "http", + "config": map[string]any{ + "path": "/api/items", + "method": "GET", + }, + }, + "steps": []any{ + map[string]any{"type": "step.auth_required"}, + }, + }, + }, + } + + base := generateContract(baseCfg) + current := generateContract(currentCfg) + comp := compareContracts(base, current) + + // Adding auth to a public endpoint is breaking + if comp.BreakingCount == 0 { + t.Error("expected breaking change for auth added to public endpoint") + } + + found := false + for _, ec := range comp.Endpoints { + if ec.Path == "/api/items" && ec.Change == changeChanged && ec.IsBreaking { + if strings.Contains(ec.Detail, "auth") { + found = true + } + } + } + if !found { + t.Error("expected GET /api/items to appear as CHANGED with auth breaking change") + } +} + +func TestRunContractTestWithBaseline(t *testing.T) { + dir := t.TempDir() + + // Write config + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(contractTestConfig), 0644); err != nil { + t.Fatalf("failed to write config: %v", err) + } + + // Generate baseline + baselinePath := filepath.Join(dir, "baseline.json") + if err := runContractTest([]string{"-output", baselinePath, configPath}); err != nil { + t.Fatalf("failed to generate baseline: %v", err) + } + + // Compare against itself (no changes) + if err := runContractTest([]string{"-baseline", baselinePath, configPath}); err != nil { + t.Fatalf("expected no breaking changes comparing to same config: %v", err) + } +} + +func TestRunContractTestBreakingChange(t *testing.T) { + dir := t.TempDir() + + // Original config with public endpoint + originalConfig := ` +pipelines: + list-items: + trigger: + type: http + config: + path: /api/items + method: GET + steps: [] +` + // Updated config removing the endpoint + updatedConfig := ` +pipelines: + different-endpoint: + trigger: + type: http + config: + path: /api/other + method: GET + steps: [] +` + originalPath := filepath.Join(dir, "original.yaml") + updatedPath := filepath.Join(dir, "updated.yaml") + baselinePath := filepath.Join(dir, "baseline.json") + + if err := os.WriteFile(originalPath, []byte(originalConfig), 0644); err != nil { + t.Fatalf("failed to write original config: %v", err) + } + if err := os.WriteFile(updatedPath, []byte(updatedConfig), 0644); err != nil { + t.Fatalf("failed to write updated config: %v", err) + } + + // Generate baseline from original + if err := runContractTest([]string{"-output", baselinePath, originalPath}); err != nil { + t.Fatalf("failed to generate baseline: %v", err) + } + + // Compare updated against baseline - should detect breaking change + err := runContractTest([]string{"-baseline", baselinePath, updatedPath}) + if err == nil { + t.Fatal("expected breaking change error") + } + if !strings.Contains(err.Error(), "breaking") { + t.Errorf("expected 'breaking' in error message, got: %v", err) + } +} diff --git a/cmd/wfctl/main.go b/cmd/wfctl/main.go index 563e497f..911a3adc 100644 --- a/cmd/wfctl/main.go +++ b/cmd/wfctl/main.go @@ -23,6 +23,9 @@ var commands = map[string]func([]string) error{ "deploy": runDeploy, "api": runAPI, "diff": runDiff, + "template": runTemplate, + "contract": runContract, + "compat": runCompat, } func usage() { @@ -47,6 +50,9 @@ Commands: deploy Deploy the workflow application (docker, kubernetes, cloud) api API tooling (extract: generate OpenAPI 3.0 spec from config) diff Compare two workflow config files and show what changed + template Template management (validate: check templates against known types) + contract Contract testing (test: generate/compare API contracts) + compat Compatibility checking (check: verify config works with current engine) Run 'wfctl -h' for command-specific help. `, version) diff --git a/cmd/wfctl/template_validate.go b/cmd/wfctl/template_validate.go new file mode 100644 index 00000000..ef7175e3 --- /dev/null +++ b/cmd/wfctl/template_validate.go @@ -0,0 +1,531 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "io/fs" + "os" + "regexp" + "strings" + "text/template" + + "github.com/GoCodeAlone/workflow/config" + "gopkg.in/yaml.v3" +) + +// templateValidationResult holds the outcome of validating a single template. +type templateValidationResult struct { + Name string + ModuleCount int + ModuleValid int + StepCount int + StepValid int + DepCount int + DepValid int + TriggerCount int + TriggerValid int + Warnings []string + Errors []string +} + +// pass returns true if there are no errors. +func (r *templateValidationResult) pass() bool { + return len(r.Errors) == 0 +} + +// templateValidationSummary holds overall validation output. +type templateValidationSummary struct { + Results []templateValidationResult `json:"results"` + Total int `json:"total"` + Passed int `json:"passed"` + Failed int `json:"failed"` + Warnings int `json:"warnings"` +} + +// templateVarRegex matches Go template variables like {{.VarName}}. +var templateVarRegex = regexp.MustCompile(`\{\{\.([A-Za-z][A-Za-z0-9_]*)\}\}`) + +// runTemplate dispatches template subcommands. +func runTemplate(args []string) error { + if len(args) < 1 { + return templateUsage() + } + switch args[0] { + case "validate": + return runTemplateValidate(args[1:]) + default: + return templateUsage() + } +} + +func templateUsage() error { + fmt.Fprintf(os.Stderr, `Usage: wfctl template [options] + +Subcommands: + validate Validate project templates or a specific config file + +Run 'wfctl template validate -h' for details. +`) + return fmt.Errorf("template subcommand is required") +} + +// runTemplateValidate validates project templates or an explicit config file. +func runTemplateValidate(args []string) error { + fs2 := flag.NewFlagSet("template validate", flag.ContinueOnError) + templateName := fs2.String("template", "", "Validate a specific template (default: all)") + configFile := fs2.String("config", "", "Validate a specific config file instead of templates") + strict := fs2.Bool("strict", false, "Fail on warnings (not just errors)") + format := fs2.String("format", "text", "Output format: text or json") + fs2.Usage = func() { + fmt.Fprintf(fs2.Output(), `Usage: wfctl template validate [options] + +Validate project templates against the engine's known module and step types. + +Options: +`) + fs2.PrintDefaults() + } + if err := fs2.Parse(args); err != nil { + return err + } + + knownModules := KnownModuleTypes() + knownSteps := KnownStepTypes() + knownTriggers := KnownTriggerTypes() + + var results []templateValidationResult + + if *configFile != "" { + // Validate a specific config file + cfg, err := config.LoadFromFile(*configFile) + if err != nil { + return fmt.Errorf("failed to load config %s: %w", *configFile, err) + } + result := validateWorkflowConfig(*configFile, cfg, knownModules, knownSteps, knownTriggers) + results = append(results, result) + } else { + // Validate project templates + var templatesToCheck []string + if *templateName != "" { + templatesToCheck = []string{*templateName} + } else { + templatesToCheck = []string{"api-service", "event-processor", "full-stack", "plugin", "ui-plugin"} + } + + for _, tmplName := range templatesToCheck { + result := validateProjectTemplate(tmplName, knownModules, knownSteps, knownTriggers) + results = append(results, result) + } + } + + // Build summary + summary := templateValidationSummary{ + Results: results, + Total: len(results), + } + totalWarnings := 0 + for _, r := range results { + if r.pass() { + summary.Passed++ + } else { + summary.Failed++ + } + totalWarnings += len(r.Warnings) + } + summary.Warnings = totalWarnings + + // Output + switch strings.ToLower(*format) { + case "json": + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(summary) + default: + printTemplateValidationResults(results, summary) + } + + if summary.Failed > 0 { + return fmt.Errorf("%d/%d templates failed validation", summary.Failed, summary.Total) + } + if *strict && totalWarnings > 0 { + return fmt.Errorf("%d warning(s) in strict mode", totalWarnings) + } + return nil +} + +// validateProjectTemplate validates a named project template by rendering its workflow.yaml.tmpl +// with placeholder values and checking the resulting config. +func validateProjectTemplate(name string, knownModules map[string]ModuleTypeInfo, knownSteps map[string]StepTypeInfo, knownTriggers map[string]bool) templateValidationResult { + tmplPath := fmt.Sprintf("templates/%s/workflow.yaml.tmpl", name) + + data, err := templateFS.ReadFile(tmplPath) + if err != nil { + // Template may not have a workflow.yaml (e.g., plugin template) + return templateValidationResult{ + Name: name, + Warnings: []string{"no workflow.yaml.tmpl found (skipping)"}, + } + } + + // Check template variable completeness + warnings := checkTemplateVars(string(data), name) + + // Render with placeholder values + rendered, err := renderTemplateWithPlaceholders(string(data), name) + if err != nil { + return templateValidationResult{ + Name: name, + Errors: []string{fmt.Sprintf("failed to render template: %v", err)}, + } + } + + // Parse the rendered YAML into a raw map to handle flexible trigger formats + // (templates may use sequence format for triggers which WorkflowConfig doesn't support) + rawCfg, err := parseRawYAML(rendered) + if err != nil { + return templateValidationResult{ + Name: name, + Errors: []string{fmt.Sprintf("failed to parse rendered template: %v", err)}, + } + } + + cfg := rawConfigToWorkflowConfig(rawCfg) + result := validateWorkflowConfig(name, cfg, knownModules, knownSteps, knownTriggers) + result.Warnings = append(warnings, result.Warnings...) + return result +} + +// parseRawYAML parses a YAML string into a generic map. +func parseRawYAML(content string) (map[string]any, error) { + var raw map[string]any + if err := yaml.Unmarshal([]byte(content), &raw); err != nil { + return nil, err + } + return raw, nil +} + +// rawConfigToWorkflowConfig converts a raw YAML map into a WorkflowConfig, +// handling both map and sequence formats for triggers. +func rawConfigToWorkflowConfig(raw map[string]any) *config.WorkflowConfig { + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{}, + Workflows: make(map[string]any), + Triggers: make(map[string]any), + Pipelines: make(map[string]any), + } + + // Extract modules + if modulesRaw, ok := raw["modules"]; ok { + if modulesList, ok := modulesRaw.([]any); ok { + for _, modRaw := range modulesList { + if modMap, ok := modRaw.(map[string]any); ok { + mod := config.ModuleConfig{} + if n, ok := modMap["name"].(string); ok { + mod.Name = n + } + if t, ok := modMap["type"].(string); ok { + mod.Type = t + } + if cfg2, ok := modMap["config"].(map[string]any); ok { + mod.Config = cfg2 + } + if deps, ok := modMap["dependsOn"].([]any); ok { + for _, d := range deps { + if s, ok := d.(string); ok { + mod.DependsOn = append(mod.DependsOn, s) + } + } + } + cfg.Modules = append(cfg.Modules, mod) + } + } + } + } + + // Extract workflows + if workflowsRaw, ok := raw["workflows"]; ok { + if workflowsMap, ok := workflowsRaw.(map[string]any); ok { + cfg.Workflows = workflowsMap + } + } + + // Extract triggers — support both map and sequence formats + if triggersRaw, ok := raw["triggers"]; ok { + switch t := triggersRaw.(type) { + case map[string]any: + cfg.Triggers = t + case []any: + // Convert sequence format to map using name as key + for _, trigRaw := range t { + if trigMap, ok := trigRaw.(map[string]any); ok { + trigName, _ := trigMap["name"].(string) + if trigName == "" { + trigType, _ := trigMap["type"].(string) + trigName = trigType + } + if trigName != "" { + cfg.Triggers[trigName] = trigMap + } + } + } + } + } + + // Extract pipelines + if pipelinesRaw, ok := raw["pipelines"]; ok { + if pipelinesMap, ok := pipelinesRaw.(map[string]any); ok { + cfg.Pipelines = pipelinesMap + } + } + + return cfg +} + +// renderTemplateWithPlaceholders renders a Go template with sample data. +func renderTemplateWithPlaceholders(tmplContent, name string) (string, error) { + data := map[string]string{ + "Name": "sample", + "NameCamel": "Sample", + "Author": "sample-author", + "Description": "sample description", + } + + tmpl, err := template.New(name).Parse(tmplContent) + if err != nil { + return "", err + } + + var sb strings.Builder + if err := tmpl.Execute(&sb, data); err != nil { + return "", err + } + return sb.String(), nil +} + +// checkTemplateVars checks that all {{.Variable}} placeholders have corresponding data fields. +func checkTemplateVars(content, templateName string) []string { + knownVars := map[string]bool{ + "Name": true, + "NameCamel": true, + "Author": true, + "Description": true, + } + + var warnings []string + matches := templateVarRegex.FindAllStringSubmatch(content, -1) + seen := make(map[string]bool) + for _, m := range matches { + if len(m) < 2 { + continue + } + varName := m[1] + if seen[varName] { + continue + } + seen[varName] = true + if !knownVars[varName] { + warnings = append(warnings, fmt.Sprintf("template variable {{.%s}} has no corresponding data field", varName)) + } + } + return warnings +} + +// validateWorkflowConfig checks a workflow config against known types. +func validateWorkflowConfig(name string, cfg *config.WorkflowConfig, knownModules map[string]ModuleTypeInfo, knownSteps map[string]StepTypeInfo, knownTriggers map[string]bool) templateValidationResult { + result := templateValidationResult{Name: name} + + // Build module name set for dependency checking + moduleNames := make(map[string]bool) + for _, mod := range cfg.Modules { + moduleNames[mod.Name] = true + } + + // 1. Validate module types + result.ModuleCount = len(cfg.Modules) + for _, mod := range cfg.Modules { + if mod.Type == "" { + result.Errors = append(result.Errors, fmt.Sprintf("module %q has no type", mod.Name)) + continue + } + info, ok := knownModules[mod.Type] + if !ok { + result.Errors = append(result.Errors, fmt.Sprintf("module %q uses unknown type %q", mod.Name, mod.Type)) + } else { + result.ModuleValid++ + // 5. Warn on unknown config fields + if mod.Config != nil && len(info.ConfigKeys) > 0 { + knownKeys := make(map[string]bool) + for _, k := range info.ConfigKeys { + knownKeys[k] = true + } + for key := range mod.Config { + if !knownKeys[key] { + result.Warnings = append(result.Warnings, fmt.Sprintf("module %q (%s) config field %q not in known fields", mod.Name, mod.Type, key)) + } + } + } + } + + // 3. Validate dependencies + for _, dep := range mod.DependsOn { + result.DepCount++ + if !moduleNames[dep] { + result.Errors = append(result.Errors, fmt.Sprintf("module %q depends on unknown module %q", mod.Name, dep)) + } else { + result.DepValid++ + } + } + } + + // 2. Validate step types in pipelines + for pipelineName, pipelineRaw := range cfg.Pipelines { + pipelineMap, ok := pipelineRaw.(map[string]any) + if !ok { + continue + } + stepsRaw, _ := pipelineMap["steps"].([]any) + for _, stepRaw := range stepsRaw { + stepMap, ok := stepRaw.(map[string]any) + if !ok { + continue + } + result.StepCount++ + stepType, _ := stepMap["type"].(string) + if stepType == "" { + result.Errors = append(result.Errors, fmt.Sprintf("pipeline %q has a step with no type", pipelineName)) + continue + } + stepInfo, ok := knownSteps[stepType] + if !ok { + result.Errors = append(result.Errors, fmt.Sprintf("pipeline %q step uses unknown type %q", pipelineName, stepType)) + } else { + result.StepValid++ + // Config key warnings + if stepCfg, ok := stepMap["config"].(map[string]any); ok && len(stepInfo.ConfigKeys) > 0 { + knownKeys := make(map[string]bool) + for _, k := range stepInfo.ConfigKeys { + knownKeys[k] = true + } + for key := range stepCfg { + if !knownKeys[key] { + result.Warnings = append(result.Warnings, fmt.Sprintf("pipeline %q step %q (%s) config field %q not in known fields", pipelineName, stepMap["name"], stepType, key)) + } + } + } + } + } + + // 4. Validate trigger types + if triggerRaw, ok := pipelineMap["trigger"]; ok { + if triggerMap, ok := triggerRaw.(map[string]any); ok { + triggerType, _ := triggerMap["type"].(string) + if triggerType != "" { + result.TriggerCount++ + if !knownTriggers[triggerType] { + result.Errors = append(result.Errors, fmt.Sprintf("pipeline %q uses unknown trigger type %q", pipelineName, triggerType)) + } else { + result.TriggerValid++ + } + } + } + } + } + + // Also validate triggers in the top-level triggers map + for triggerName, triggerRaw := range cfg.Triggers { + triggerMap, ok := triggerRaw.(map[string]any) + if !ok { + continue + } + triggerType, _ := triggerMap["type"].(string) + if triggerType == "" { + // Some triggers embed their type as the key name + triggerType = triggerName + } + if triggerType != "" && !strings.HasPrefix(triggerType, "#") { + result.TriggerCount++ + if knownTriggers[triggerType] || knownTriggers[triggerName] { + result.TriggerValid++ + } else { + // Not an error since triggers can be dynamic; warn instead + result.Warnings = append(result.Warnings, fmt.Sprintf("trigger %q may use unknown type %q", triggerName, triggerType)) + result.TriggerValid++ // still counted valid to avoid false errors + } + } + } + + return result +} + +// printTemplateValidationResults prints results in human-readable text format. +func printTemplateValidationResults(results []templateValidationResult, summary templateValidationSummary) { + for _, r := range results { + fmt.Printf("Validating template: %s\n", r.Name) + + if r.ModuleCount > 0 { + if r.ModuleValid == r.ModuleCount { + fmt.Printf(" + Module types: %d/%d valid\n", r.ModuleValid, r.ModuleCount) + } else { + fmt.Printf(" x Module types: %d/%d valid\n", r.ModuleValid, r.ModuleCount) + } + } + + if r.StepCount > 0 { + if r.StepValid == r.StepCount { + fmt.Printf(" + Step types: %d/%d valid\n", r.StepValid, r.StepCount) + } else { + fmt.Printf(" x Step types: %d/%d valid\n", r.StepValid, r.StepCount) + } + } + + if r.DepCount > 0 { + if r.DepValid == r.DepCount { + fmt.Printf(" + Dependencies: all resolved\n") + } else { + fmt.Printf(" x Dependencies: %d/%d resolved\n", r.DepValid, r.DepCount) + } + } + + if r.TriggerCount > 0 { + if r.TriggerValid == r.TriggerCount { + fmt.Printf(" + Triggers: %d/%d valid\n", r.TriggerValid, r.TriggerCount) + } else { + fmt.Printf(" x Triggers: %d/%d valid\n", r.TriggerValid, r.TriggerCount) + } + } + + for _, w := range r.Warnings { + fmt.Printf(" ! Config warning: %s\n", w) + } + for _, e := range r.Errors { + fmt.Printf(" ERROR: %s\n", e) + } + + if r.pass() { + if len(r.Warnings) > 0 { + fmt.Printf(" Result: PASS (%d warning(s))\n", len(r.Warnings)) + } else { + fmt.Printf(" Result: PASS\n") + } + } else { + fmt.Printf(" Result: FAIL (%d error(s))\n", len(r.Errors)) + } + fmt.Println() + } + + totalWarn := 0 + for _, r := range results { + totalWarn += len(r.Warnings) + } + + if totalWarn > 0 { + fmt.Printf("Summary: %d/%d templates valid (%d warning(s))\n", summary.Passed, summary.Total, totalWarn) + } else { + fmt.Printf("Summary: %d/%d templates valid\n", summary.Passed, summary.Total) + } +} + +// templateFSReader allows reading from the embedded templateFS for validation. +// It wraps around the existing templateFS embed.FS. +var _ fs.FS = templateFS diff --git a/cmd/wfctl/template_validate_test.go b/cmd/wfctl/template_validate_test.go new file mode 100644 index 00000000..59bfd0f5 --- /dev/null +++ b/cmd/wfctl/template_validate_test.go @@ -0,0 +1,237 @@ +package main + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/GoCodeAlone/workflow/config" +) + +func TestRunTemplateValidateAllTemplates(t *testing.T) { + err := runTemplateValidate([]string{}) + if err != nil { + t.Fatalf("expected all templates to pass, got error: %v", err) + } +} + +func TestRunTemplateValidateSpecificTemplate(t *testing.T) { + err := runTemplateValidate([]string{"-template", "api-service"}) + if err != nil { + t.Fatalf("expected api-service template to pass, got error: %v", err) + } +} + +func TestRunTemplateValidateEventProcessor(t *testing.T) { + err := runTemplateValidate([]string{"-template", "event-processor"}) + if err != nil { + t.Fatalf("expected event-processor template to pass, got error: %v", err) + } +} + +func TestRunTemplateValidateFullStack(t *testing.T) { + err := runTemplateValidate([]string{"-template", "full-stack"}) + if err != nil { + t.Fatalf("expected full-stack template to pass, got error: %v", err) + } +} + +func TestRunTemplateValidateJsonOutput(t *testing.T) { + err := runTemplateValidate([]string{"-format", "json", "-template", "api-service"}) + if err != nil { + t.Fatalf("expected json output to work, got: %v", err) + } +} + +func TestRunTemplateValidateConfigFile(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: server + type: http.server + config: + address: ":8080" + - name: router + type: http.router + dependsOn: + - server +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + err := runTemplateValidate([]string{"-config", configPath}) + if err != nil { + t.Fatalf("expected valid config to pass, got: %v", err) + } +} + +func TestRunTemplateValidateUnknownModuleType(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: my-thing + type: unknown.module.type +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + err := runTemplateValidate([]string{"-config", configPath}) + if err == nil { + t.Fatal("expected error for unknown module type") + } + if !strings.Contains(err.Error(), "failed") { + t.Errorf("unexpected error: %v", err) + } +} + +func TestRunTemplateValidateMissingDependency(t *testing.T) { + dir := t.TempDir() + configContent := ` +modules: + - name: router + type: http.router + dependsOn: + - nonexistent-server +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + err := runTemplateValidate([]string{"-config", configPath}) + if err == nil { + t.Fatal("expected error for missing dependency") + } +} + +func TestRunTemplateValidateUnknownStepType(t *testing.T) { + dir := t.TempDir() + configContent := ` +pipelines: + my-pipeline: + trigger: + type: http + config: + path: /test + method: GET + steps: + - name: bad-step + type: step.nonexistent_step_type +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + err := runTemplateValidate([]string{"-config", configPath}) + if err == nil { + t.Fatal("expected error for unknown step type") + } +} + +func TestRunTemplateValidateStrictMode(t *testing.T) { + dir := t.TempDir() + // Valid module config but with an unknown config field (triggers warning) + configContent := ` +modules: + - name: db + type: storage.sqlite + config: + dbPath: data/test.db + journalMode: WAL +` + configPath := filepath.Join(dir, "workflow.yaml") + if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + // Without strict: should pass (warning only) + if err := runTemplateValidate([]string{"-config", configPath}); err != nil { + t.Fatalf("expected pass without strict, got: %v", err) + } + + // With strict: should fail on warning + if err := runTemplateValidate([]string{"-strict", "-config", configPath}); err == nil { + t.Fatal("expected failure in strict mode due to unknown config field") + } +} + +func TestTemplateVarCheck(t *testing.T) { + content := `name: {{.Name}}-service\ntype: {{.Unknown}}` + warnings := checkTemplateVars(content, "test") + found := false + for _, w := range warnings { + if strings.Contains(w, "Unknown") { + found = true + break + } + } + if !found { + t.Error("expected warning for unknown template variable {{.Unknown}}") + } +} + +func TestTemplateVarCheckKnownVars(t *testing.T) { + content := `name: {{.Name}}-service\nauthor: {{.Author}}\ndesc: {{.Description}}\ncamel: {{.NameCamel}}` + warnings := checkTemplateVars(content, "test") + if len(warnings) > 0 { + t.Errorf("expected no warnings for known vars, got: %v", warnings) + } +} + +func TestValidateWorkflowConfigEmpty(t *testing.T) { + cfg := &config.WorkflowConfig{} + knownModules := KnownModuleTypes() + knownSteps := KnownStepTypes() + knownTriggers := KnownTriggerTypes() + + result := validateWorkflowConfig("empty", cfg, knownModules, knownSteps, knownTriggers) + if !result.pass() { + t.Errorf("expected empty config to pass, got errors: %v", result.Errors) + } + if result.ModuleCount != 0 { + t.Errorf("expected 0 modules, got %d", result.ModuleCount) + } +} + +func TestValidateWorkflowConfigValidModules(t *testing.T) { + cfg := &config.WorkflowConfig{ + Modules: []config.ModuleConfig{ + {Name: "server", Type: "http.server"}, + {Name: "router", Type: "http.router", DependsOn: []string{"server"}}, + }, + } + knownModules := KnownModuleTypes() + knownSteps := KnownStepTypes() + knownTriggers := KnownTriggerTypes() + + result := validateWorkflowConfig("test", cfg, knownModules, knownSteps, knownTriggers) + if !result.pass() { + t.Errorf("expected valid modules to pass, got errors: %v", result.Errors) + } + if result.ModuleCount != 2 { + t.Errorf("expected 2 modules, got %d", result.ModuleCount) + } + if result.ModuleValid != 2 { + t.Errorf("expected 2 valid modules, got %d", result.ModuleValid) + } +} + +func TestRunTemplateUsageMissingSubcommand(t *testing.T) { + err := runTemplate([]string{}) + if err == nil { + t.Fatal("expected error when no subcommand given") + } +} + +func TestRunTemplateUnknownSubcommand(t *testing.T) { + err := runTemplate([]string{"unknown"}) + if err == nil { + t.Fatal("expected error for unknown subcommand") + } +} diff --git a/cmd/wfctl/type_registry.go b/cmd/wfctl/type_registry.go new file mode 100644 index 00000000..0dc7e441 --- /dev/null +++ b/cmd/wfctl/type_registry.go @@ -0,0 +1,663 @@ +package main + +// ModuleTypeInfo holds metadata about a known module type. +type ModuleTypeInfo struct { + Type string // e.g., "storage.sqlite" + Plugin string // e.g., "storage" + Stateful bool // whether this module manages persistent state + ConfigKeys []string // known config fields +} + +// StepTypeInfo holds metadata about a known step type. +type StepTypeInfo struct { + Type string // e.g., "step.json_response" + Plugin string // e.g., "pipelinesteps" + ConfigKeys []string // known config fields +} + +// KnownModuleTypes returns all module types registered in the engine's plugins. +func KnownModuleTypes() map[string]ModuleTypeInfo { + return map[string]ModuleTypeInfo{ + // storage plugin + "storage.s3": { + Type: "storage.s3", + Plugin: "storage", + Stateful: false, + ConfigKeys: []string{"bucket", "region", "endpoint"}, + }, + "storage.local": { + Type: "storage.local", + Plugin: "storage", + Stateful: false, + ConfigKeys: []string{"rootDir"}, + }, + "storage.gcs": { + Type: "storage.gcs", + Plugin: "storage", + Stateful: false, + ConfigKeys: []string{"bucket", "project", "credentialsFile"}, + }, + "storage.sqlite": { + Type: "storage.sqlite", + Plugin: "storage", + Stateful: true, + ConfigKeys: []string{"dbPath", "maxConnections", "walMode"}, + }, + "database.workflow": { + Type: "database.workflow", + Plugin: "storage", + Stateful: true, + ConfigKeys: []string{"driver", "dsn", "maxOpenConns", "maxIdleConns"}, + }, + "persistence.store": { + Type: "persistence.store", + Plugin: "storage", + Stateful: true, + ConfigKeys: []string{"database"}, + }, + "cache.redis": { + Type: "cache.redis", + Plugin: "storage", + Stateful: false, + ConfigKeys: []string{"address", "password", "db", "prefix", "defaultTTL"}, + }, + + // http plugin + "http.server": { + Type: "http.server", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"address", "readTimeout", "writeTimeout", "idleTimeout"}, + }, + "http.router": { + Type: "http.router", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"prefix", "middleware"}, + }, + "http.handler": { + Type: "http.handler", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"contentType", "routes"}, + }, + "http.proxy": { + Type: "http.proxy", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"target", "stripPrefix"}, + }, + "reverseproxy": { + Type: "reverseproxy", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"target", "stripPrefix"}, + }, + "http.simple_proxy": { + Type: "http.simple_proxy", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"target"}, + }, + "static.fileserver": { + Type: "static.fileserver", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"root", "index", "spa"}, + }, + "http.middleware.auth": { + Type: "http.middleware.auth", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"type", "header"}, + }, + "http.middleware.logging": { + Type: "http.middleware.logging", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"format", "level"}, + }, + "http.middleware.ratelimit": { + Type: "http.middleware.ratelimit", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"requestsPerMinute", "burstSize"}, + }, + "http.middleware.cors": { + Type: "http.middleware.cors", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{"allowOrigins", "allowMethods", "allowHeaders", "maxAge"}, + }, + "http.middleware.requestid": { + Type: "http.middleware.requestid", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{}, + }, + "http.middleware.securityheaders": { + Type: "http.middleware.securityheaders", + Plugin: "http", + Stateful: false, + ConfigKeys: []string{}, + }, + + // auth plugin + "auth.jwt": { + Type: "auth.jwt", + Plugin: "auth", + Stateful: false, + ConfigKeys: []string{"secret", "tokenExpiry", "issuer", "seedFile", "responseFormat"}, + }, + "auth.user-store": { + Type: "auth.user-store", + Plugin: "auth", + Stateful: true, + ConfigKeys: []string{}, + }, + "auth.oauth2": { + Type: "auth.oauth2", + Plugin: "auth", + Stateful: false, + ConfigKeys: []string{"providers"}, + }, + + // messaging plugin + "messaging.broker": { + Type: "messaging.broker", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"maxQueueSize", "deliveryTimeout"}, + }, + "messaging.broker.eventbus": { + Type: "messaging.broker.eventbus", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{}, + }, + "messaging.handler": { + Type: "messaging.handler", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"topic"}, + }, + "messaging.nats": { + Type: "messaging.nats", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"url"}, + }, + "messaging.kafka": { + Type: "messaging.kafka", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"brokers", "groupId"}, + }, + "notification.slack": { + Type: "notification.slack", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"webhookURL", "channel", "username"}, + }, + "webhook.sender": { + Type: "webhook.sender", + Plugin: "messaging", + Stateful: false, + ConfigKeys: []string{"maxRetries"}, + }, + + // statemachine plugin + "statemachine.engine": { + Type: "statemachine.engine", + Plugin: "statemachine", + Stateful: true, + ConfigKeys: []string{"maxInstances", "instanceTTL"}, + }, + "state.tracker": { + Type: "state.tracker", + Plugin: "statemachine", + Stateful: true, + ConfigKeys: []string{"retentionDays"}, + }, + "state.connector": { + Type: "state.connector", + Plugin: "statemachine", + Stateful: false, + ConfigKeys: []string{}, + }, + + // observability plugin + "metrics.collector": { + Type: "metrics.collector", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"namespace", "subsystem", "metricsPath", "enabledMetrics"}, + }, + "health.checker": { + Type: "health.checker", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"healthPath", "readyPath", "livePath", "checkTimeout", "autoDiscover"}, + }, + "log.collector": { + Type: "log.collector", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"logLevel", "outputFormat", "retentionDays"}, + }, + "observability.otel": { + Type: "observability.otel", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"endpoint", "serviceName"}, + }, + "openapi.generator": { + Type: "openapi.generator", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"title", "version", "description", "servers"}, + }, + "http.middleware.otel": { + Type: "http.middleware.otel", + Plugin: "observability", + Stateful: false, + ConfigKeys: []string{"serverName"}, + }, + + // api plugin + "api.query": { + Type: "api.query", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{"delegate", "routes"}, + }, + "api.command": { + Type: "api.command", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{"delegate", "routes"}, + }, + "api.handler": { + Type: "api.handler", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{"resourceName", "workflowType", "workflowEngine", "initialTransition", "seedFile", "sourceResourceName", "stateFilter", "fieldMapping", "transitionMap", "summaryFields"}, + }, + "api.gateway": { + Type: "api.gateway", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{"routes", "globalRateLimit", "cors", "auth"}, + }, + "workflow.registry": { + Type: "workflow.registry", + Plugin: "api", + Stateful: true, + ConfigKeys: []string{"storageBackend"}, + }, + "data.transformer": { + Type: "data.transformer", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{}, + }, + "processing.step": { + Type: "processing.step", + Plugin: "api", + Stateful: false, + ConfigKeys: []string{"componentId", "successTransition", "compensateTransition", "maxRetries", "retryBackoffMs", "timeoutSeconds"}, + }, + + // secrets plugin + "secrets.vault": { + Type: "secrets.vault", + Plugin: "secrets", + Stateful: false, + ConfigKeys: []string{"mode", "address", "token", "mountPath", "namespace"}, + }, + "secrets.aws": { + Type: "secrets.aws", + Plugin: "secrets", + Stateful: false, + ConfigKeys: []string{"region", "accessKeyId", "secretAccessKey"}, + }, + + // ai plugin + "dynamic.component": { + Type: "dynamic.component", + Plugin: "ai", + Stateful: false, + ConfigKeys: []string{"componentId", "source", "provides", "requires"}, + }, + + // featureflags plugin + "featureflag.service": { + Type: "featureflag.service", + Plugin: "featureflags", + Stateful: true, + ConfigKeys: []string{"provider", "cache_ttl", "sse_enabled", "db_path"}, + }, + + // eventstore plugin + "eventstore.service": { + Type: "eventstore.service", + Plugin: "eventstore", + Stateful: true, + ConfigKeys: []string{"db_path", "retention_days"}, + }, + + // dlq plugin + "dlq.service": { + Type: "dlq.service", + Plugin: "dlq", + Stateful: true, + ConfigKeys: []string{"max_retries", "retention_days"}, + }, + + // timeline plugin + "timeline.service": { + Type: "timeline.service", + Plugin: "timeline", + Stateful: false, + ConfigKeys: []string{"event_store"}, + }, + + // modularcompat plugin + "scheduler.modular": { + Type: "scheduler.modular", + Plugin: "modularcompat", + Stateful: false, + ConfigKeys: []string{}, + }, + "cache.modular": { + Type: "cache.modular", + Plugin: "modularcompat", + Stateful: false, + ConfigKeys: []string{}, + }, + } +} + +// KnownStepTypes returns all step types registered in the engine's plugins. +func KnownStepTypes() map[string]StepTypeInfo { + return map[string]StepTypeInfo{ + // pipelinesteps plugin + "step.validate": { + Type: "step.validate", + Plugin: "pipelinesteps", + ConfigKeys: []string{"rules", "required", "schema"}, + }, + "step.transform": { + Type: "step.transform", + Plugin: "pipelinesteps", + ConfigKeys: []string{"mapping", "template"}, + }, + "step.conditional": { + Type: "step.conditional", + Plugin: "pipelinesteps", + ConfigKeys: []string{"condition", "then", "else"}, + }, + "step.set": { + Type: "step.set", + Plugin: "pipelinesteps", + ConfigKeys: []string{"key", "value"}, + }, + "step.log": { + Type: "step.log", + Plugin: "pipelinesteps", + ConfigKeys: []string{"message", "level"}, + }, + "step.delegate": { + Type: "step.delegate", + Plugin: "pipelinesteps", + ConfigKeys: []string{"service", "action"}, + }, + "step.jq": { + Type: "step.jq", + Plugin: "pipelinesteps", + ConfigKeys: []string{"expression", "input", "output"}, + }, + "step.publish": { + Type: "step.publish", + Plugin: "pipelinesteps", + ConfigKeys: []string{"topic", "broker", "payload"}, + }, + "step.http_call": { + Type: "step.http_call", + Plugin: "pipelinesteps", + ConfigKeys: []string{"url", "method", "headers", "body", "timeout"}, + }, + "step.request_parse": { + Type: "step.request_parse", + Plugin: "pipelinesteps", + ConfigKeys: []string{"body", "query", "headers"}, + }, + "step.db_query": { + Type: "step.db_query", + Plugin: "pipelinesteps", + ConfigKeys: []string{"database", "query", "params"}, + }, + "step.db_exec": { + Type: "step.db_exec", + Plugin: "pipelinesteps", + ConfigKeys: []string{"database", "query", "params"}, + }, + "step.json_response": { + Type: "step.json_response", + Plugin: "pipelinesteps", + ConfigKeys: []string{"status", "body", "headers"}, + }, + "step.workflow_call": { + Type: "step.workflow_call", + Plugin: "pipelinesteps", + ConfigKeys: []string{"workflow", "input"}, + }, + "step.validate_path_param": { + Type: "step.validate_path_param", + Plugin: "pipelinesteps", + ConfigKeys: []string{"param", "type", "required"}, + }, + "step.validate_pagination": { + Type: "step.validate_pagination", + Plugin: "pipelinesteps", + ConfigKeys: []string{"maxLimit", "defaultLimit"}, + }, + "step.validate_request_body": { + Type: "step.validate_request_body", + Plugin: "pipelinesteps", + ConfigKeys: []string{"schema", "required"}, + }, + "step.foreach": { + Type: "step.foreach", + Plugin: "pipelinesteps", + ConfigKeys: []string{"collection", "steps"}, + }, + "step.webhook_verify": { + Type: "step.webhook_verify", + Plugin: "pipelinesteps", + ConfigKeys: []string{"secret", "header", "algorithm"}, + }, + "step.cache_get": { + Type: "step.cache_get", + Plugin: "pipelinesteps", + ConfigKeys: []string{"cache", "key", "output"}, + }, + "step.cache_set": { + Type: "step.cache_set", + Plugin: "pipelinesteps", + ConfigKeys: []string{"cache", "key", "value", "ttl"}, + }, + "step.cache_delete": { + Type: "step.cache_delete", + Plugin: "pipelinesteps", + ConfigKeys: []string{"cache", "key"}, + }, + + // http plugin steps + "step.rate_limit": { + Type: "step.rate_limit", + Plugin: "http", + ConfigKeys: []string{"requestsPerMinute", "burstSize", "key"}, + }, + "step.circuit_breaker": { + Type: "step.circuit_breaker", + Plugin: "http", + ConfigKeys: []string{"threshold", "timeout", "halfOpenRequests"}, + }, + + // statemachine plugin steps + "step.statemachine_transition": { + Type: "step.statemachine_transition", + Plugin: "statemachine", + ConfigKeys: []string{"engine", "instanceId", "transition"}, + }, + "step.statemachine_get": { + Type: "step.statemachine_get", + Plugin: "statemachine", + ConfigKeys: []string{"engine", "instanceId"}, + }, + + // ai plugin steps + "step.ai_complete": { + Type: "step.ai_complete", + Plugin: "ai", + ConfigKeys: []string{"model", "prompt", "maxTokens", "temperature"}, + }, + "step.ai_classify": { + Type: "step.ai_classify", + Plugin: "ai", + ConfigKeys: []string{"model", "input", "categories"}, + }, + "step.ai_extract": { + Type: "step.ai_extract", + Plugin: "ai", + ConfigKeys: []string{"model", "input", "schema"}, + }, + "step.sub_workflow": { + Type: "step.sub_workflow", + Plugin: "ai", + ConfigKeys: []string{"workflow", "input"}, + }, + + // featureflags plugin steps + "step.feature_flag": { + Type: "step.feature_flag", + Plugin: "featureflags", + ConfigKeys: []string{"flag", "default", "output"}, + }, + "step.ff_gate": { + Type: "step.ff_gate", + Plugin: "featureflags", + ConfigKeys: []string{"flag", "condition"}, + }, + + // cicd plugin steps + "step.shell_exec": { + Type: "step.shell_exec", + Plugin: "cicd", + ConfigKeys: []string{"command", "args", "env", "workdir", "timeout"}, + }, + "step.artifact_pull": { + Type: "step.artifact_pull", + Plugin: "cicd", + ConfigKeys: []string{"registry", "artifact", "tag", "output"}, + }, + "step.artifact_push": { + Type: "step.artifact_push", + Plugin: "cicd", + ConfigKeys: []string{"registry", "artifact", "tag"}, + }, + "step.docker_build": { + Type: "step.docker_build", + Plugin: "cicd", + ConfigKeys: []string{"context", "dockerfile", "tags", "buildArgs"}, + }, + "step.docker_push": { + Type: "step.docker_push", + Plugin: "cicd", + ConfigKeys: []string{"image", "registry", "credentials"}, + }, + "step.docker_run": { + Type: "step.docker_run", + Plugin: "cicd", + ConfigKeys: []string{"image", "command", "env", "volumes"}, + }, + "step.scan_sast": { + Type: "step.scan_sast", + Plugin: "cicd", + ConfigKeys: []string{"tool", "path", "severity"}, + }, + "step.scan_container": { + Type: "step.scan_container", + Plugin: "cicd", + ConfigKeys: []string{"image", "severity"}, + }, + "step.scan_deps": { + Type: "step.scan_deps", + Plugin: "cicd", + ConfigKeys: []string{"path", "severity"}, + }, + "step.deploy": { + Type: "step.deploy", + Plugin: "cicd", + ConfigKeys: []string{"target", "config", "namespace"}, + }, + "step.gate": { + Type: "step.gate", + Plugin: "cicd", + ConfigKeys: []string{"condition", "approvers"}, + }, + "step.build_ui": { + Type: "step.build_ui", + Plugin: "cicd", + ConfigKeys: []string{"path", "command"}, + }, + "step.build_from_config": { + Type: "step.build_from_config", + Plugin: "cicd", + ConfigKeys: []string{"config", "output"}, + }, + + // auth-related steps (from pipelinesteps but auth-aware) + "step.auth_required": { + Type: "step.auth_required", + Plugin: "pipelinesteps", + ConfigKeys: []string{"roles", "scopes"}, + }, + "step.user_register": { + Type: "step.user_register", + Plugin: "pipelinesteps", + ConfigKeys: []string{"store", "fields"}, + }, + "step.user_login": { + Type: "step.user_login", + Plugin: "pipelinesteps", + ConfigKeys: []string{"store", "auth"}, + }, + "step.user_profile": { + Type: "step.user_profile", + Plugin: "pipelinesteps", + ConfigKeys: []string{"store"}, + }, + "step.org_create": { + Type: "step.org_create", + Plugin: "pipelinesteps", + ConfigKeys: []string{"store"}, + }, + "step.org_list": { + Type: "step.org_list", + Plugin: "pipelinesteps", + ConfigKeys: []string{"store"}, + }, + } +} + +// KnownTriggerTypes returns all known trigger types. +func KnownTriggerTypes() map[string]bool { + return map[string]bool{ + "http": true, + "event": true, + "eventbus": true, + "schedule": true, + } +} diff --git a/cmd/wfctl/type_registry_test.go b/cmd/wfctl/type_registry_test.go new file mode 100644 index 00000000..4e2d551a --- /dev/null +++ b/cmd/wfctl/type_registry_test.go @@ -0,0 +1,141 @@ +package main + +import ( + "strings" + "testing" +) + +func TestKnownModuleTypesPopulated(t *testing.T) { + types := KnownModuleTypes() + if len(types) == 0 { + t.Fatal("expected known module types to be non-empty") + } + // Check some well-known types + expected := []string{ + "storage.sqlite", + "http.server", + "http.router", + "auth.jwt", + "messaging.broker", + "statemachine.engine", + "metrics.collector", + "health.checker", + "cache.redis", + } + for _, e := range expected { + if _, ok := types[e]; !ok { + t.Errorf("expected module type %q to be in registry", e) + } + } +} + +func TestKnownModuleTypesPluginField(t *testing.T) { + types := KnownModuleTypes() + for typeName, info := range types { + if info.Plugin == "" { + t.Errorf("module type %q has empty Plugin field", typeName) + } + if info.Type != typeName { + t.Errorf("module type %q has mismatched Type field: %q", typeName, info.Type) + } + } +} + +func TestKnownModuleTypesStateful(t *testing.T) { + types := KnownModuleTypes() + + // These should be stateful + statefulTypes := []string{"storage.sqlite", "database.workflow", "statemachine.engine", "auth.user-store"} + for _, typeName := range statefulTypes { + info, ok := types[typeName] + if !ok { + t.Errorf("module type %q not found", typeName) + continue + } + if !info.Stateful { + t.Errorf("expected module type %q to be stateful", typeName) + } + } + + // These should NOT be stateful + nonStatefulTypes := []string{"http.server", "health.checker", "messaging.broker"} + for _, typeName := range nonStatefulTypes { + info, ok := types[typeName] + if !ok { + t.Errorf("module type %q not found", typeName) + continue + } + if info.Stateful { + t.Errorf("expected module type %q to NOT be stateful", typeName) + } + } +} + +func TestKnownStepTypesPopulated(t *testing.T) { + types := KnownStepTypes() + if len(types) == 0 { + t.Fatal("expected known step types to be non-empty") + } + expected := []string{ + "step.validate", + "step.transform", + "step.json_response", + "step.db_query", + "step.publish", + "step.http_call", + "step.cache_get", + "step.rate_limit", + } + for _, e := range expected { + if _, ok := types[e]; !ok { + t.Errorf("expected step type %q to be in registry", e) + } + } +} + +func TestKnownStepTypesAllHaveStepPrefix(t *testing.T) { + types := KnownStepTypes() + for typeName := range types { + if !strings.HasPrefix(typeName, "step.") { + t.Errorf("step type %q does not start with 'step.'", typeName) + } + } +} + +func TestKnownStepTypesPluginField(t *testing.T) { + types := KnownStepTypes() + for typeName, info := range types { + if info.Plugin == "" { + t.Errorf("step type %q has empty Plugin field", typeName) + } + if info.Type != typeName { + t.Errorf("step type %q has mismatched Type field: %q", typeName, info.Type) + } + } +} + +func TestKnownTriggerTypes(t *testing.T) { + triggers := KnownTriggerTypes() + expected := []string{"http", "event", "schedule"} + for _, e := range expected { + if !triggers[e] { + t.Errorf("expected trigger type %q to be known", e) + } + } +} + +func TestModuleTypeCount(t *testing.T) { + types := KnownModuleTypes() + // We should have a substantial number of module types + if len(types) < 30 { + t.Errorf("expected at least 30 module types, got %d", len(types)) + } +} + +func TestStepTypeCount(t *testing.T) { + types := KnownStepTypes() + // We should have a substantial number of step types + if len(types) < 20 { + t.Errorf("expected at least 20 step types, got %d", len(types)) + } +}