diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0f03da35..81df0f13 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -184,6 +184,12 @@ jobs: GOOS=darwin GOARCH=arm64 go build -ldflags="${LDFLAGS}" -o dist/wfctl-darwin-arm64 ./cmd/wfctl GOOS=windows GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o dist/wfctl-windows-amd64.exe ./cmd/wfctl + GOOS=linux GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o dist/workflow-lsp-server-linux-amd64 ./cmd/workflow-lsp-server + GOOS=linux GOARCH=arm64 go build -ldflags="${LDFLAGS}" -o dist/workflow-lsp-server-linux-arm64 ./cmd/workflow-lsp-server + GOOS=darwin GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o dist/workflow-lsp-server-darwin-amd64 ./cmd/workflow-lsp-server + GOOS=darwin GOARCH=arm64 go build -ldflags="${LDFLAGS}" -o dist/workflow-lsp-server-darwin-arm64 ./cmd/workflow-lsp-server + GOOS=windows GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o dist/workflow-lsp-server-windows-amd64.exe ./cmd/workflow-lsp-server + - name: Download admin UI artifact uses: actions/download-artifact@v4 with: diff --git a/cmd/wfctl/main.go b/cmd/wfctl/main.go index 5ecaeed5..14b068c1 100644 --- a/cmd/wfctl/main.go +++ b/cmd/wfctl/main.go @@ -15,6 +15,7 @@ var commands = map[string]func([]string) error{ "plugin": runPlugin, "pipeline": runPipeline, "schema": runSchema, + "snippets": runSnippets, "manifest": runManifest, "migrate": runMigrate, "build-ui": runBuildUI, @@ -45,6 +46,7 @@ Commands: plugin Plugin management (init, docs, search, install, list, update, remove) pipeline Pipeline management (list, run) schema Generate JSON Schema for workflow configs + snippets Export IDE snippets (--format vscode|jetbrains|json) manifest Analyze config and report infrastructure requirements migrate Manage database schema migrations build-ui Build the application UI (npm install + npm run build + validate) diff --git a/cmd/wfctl/snippets.go b/cmd/wfctl/snippets.go new file mode 100644 index 00000000..bbac9cff --- /dev/null +++ b/cmd/wfctl/snippets.go @@ -0,0 +1,80 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + + "github.com/GoCodeAlone/workflow/schema" +) + +func runSnippets(args []string) error { + fs := flag.NewFlagSet("snippets", flag.ExitOnError) + format := fs.String("format", "json", "Output format: json, vscode, jetbrains") + output := fs.String("output", "", "Write output to file instead of stdout") + fs.Usage = func() { + fmt.Fprintf(fs.Output(), `Usage: wfctl snippets [options] + +Export workflow configuration snippets for IDE support. + +Options: +`) + fs.PrintDefaults() + fmt.Fprintf(fs.Output(), ` +Formats: + json Raw snippet list as JSON (default) + vscode VSCode .code-snippets JSON format + jetbrains JetBrains live templates XML format + +Examples: + wfctl snippets --format vscode --output workflow.code-snippets + wfctl snippets --format jetbrains --output workflow.xml +`) + } + if err := fs.Parse(args); err != nil { + return err + } + + var data []byte + var err error + + switch *format { + case "vscode": + data, err = schema.ExportSnippetsVSCode() + if err != nil { + return fmt.Errorf("vscode export failed: %w", err) + } + case "jetbrains": + data, err = schema.ExportSnippetsJetBrains() + if err != nil { + return fmt.Errorf("jetbrains export failed: %w", err) + } + case "json", "": + snips := schema.GetSnippets() + data, err = json.MarshalIndent(snips, "", " ") + if err != nil { + return fmt.Errorf("json export failed: %w", err) + } + default: + return fmt.Errorf("unknown format %q; choose json, vscode, or jetbrains", *format) + } + + w := os.Stdout + if *output != "" { + f, ferr := os.Create(*output) + if ferr != nil { + return fmt.Errorf("failed to create output file: %w", ferr) + } + defer f.Close() + w = f + } + + if _, err = w.Write(data); err != nil { + return fmt.Errorf("failed to write output: %w", err) + } + if *output != "" { + fmt.Fprintf(os.Stderr, "Snippets written to %s\n", *output) + } + return nil +} diff --git a/cmd/workflow-lsp-server/main.go b/cmd/workflow-lsp-server/main.go new file mode 100644 index 00000000..95231103 --- /dev/null +++ b/cmd/workflow-lsp-server/main.go @@ -0,0 +1,30 @@ +// Package main is the entrypoint for the workflow LSP server binary. +// It communicates with editors via the Language Server Protocol over stdio. +package main + +import ( + "flag" + "fmt" + "os" + + "github.com/GoCodeAlone/workflow/lsp" +) + +var version = "dev" + +func main() { + showVersion := flag.Bool("version", false, "Print version and exit") + flag.Parse() + + if *showVersion { + fmt.Println(version) + os.Exit(0) + } + + lsp.Version = version + s := lsp.NewServer() + if err := s.RunStdio(); err != nil { + fmt.Fprintf(os.Stderr, "workflow-lsp-server error: %v\n", err) + os.Exit(1) + } +} diff --git a/go.mod b/go.mod index 4882ea71..68ef4e93 100644 --- a/go.mod +++ b/go.mod @@ -49,6 +49,7 @@ require ( github.com/prometheus/client_golang v1.19.1 github.com/redis/go-redis/v9 v9.18.0 github.com/stripe/stripe-go/v82 v82.5.1 + github.com/tliron/glsp v0.2.2 go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 go.opentelemetry.io/otel v1.40.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.40.0 @@ -96,10 +97,13 @@ require ( github.com/aws/aws-sdk-go-v2/service/sso v1.30.11 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15 // indirect github.com/aws/smithy-go v1.24.1 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v5 v5.0.3 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/clipperhouse/stringish v0.1.1 // indirect + github.com/clipperhouse/uax29/v2 v2.3.0 // indirect github.com/cloudevents/sdk-go/v2 v2.16.2 // indirect github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5 // indirect github.com/containerd/errdefs v1.0.0 // indirect @@ -129,6 +133,7 @@ require ( github.com/google/s2a-go v0.1.9 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.11 // indirect github.com/googleapis/gax-go/v2 v2.17.0 // indirect + github.com/gorilla/websocket v1.5.1 // indirect github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.7 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect @@ -143,6 +148,7 @@ require ( github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/hcl v1.0.1-vault-7 // indirect github.com/hashicorp/yamux v0.1.2 // indirect + github.com/iancoleman/strcase v0.3.0 // indirect github.com/itchyny/timefmt-go v0.1.7 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect @@ -161,9 +167,11 @@ require ( github.com/launchdarkly/go-sdk-events/v3 v3.5.0 // indirect github.com/launchdarkly/go-semver v1.0.3 // indirect github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect @@ -173,6 +181,7 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/morikuni/aec v1.1.0 // indirect + github.com/muesli/termenv v0.15.2 // indirect github.com/nats-io/nkeys v0.4.12 // indirect github.com/nats-io/nuid v1.0.1 // indirect github.com/ncruces/go-strftime v1.0.0 // indirect @@ -180,6 +189,7 @@ require ( github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect github.com/patrickmn/go-cache v2.1.0+incompatible // indirect + github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect @@ -191,8 +201,12 @@ require ( github.com/robfig/cron/v3 v3.0.1 // indirect github.com/ryanuber/go-glob v1.0.0 // indirect github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/sourcegraph/jsonrpc2 v0.2.0 // indirect github.com/spf13/cast v1.7.1 // indirect github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + github.com/tliron/commonlog v0.2.8 // indirect + github.com/tliron/kutil v0.3.11 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect github.com/yuin/gopher-lua v1.1.1 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect @@ -208,6 +222,7 @@ require ( golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect golang.org/x/net v0.50.0 // indirect golang.org/x/sys v0.41.0 // indirect + golang.org/x/term v0.40.0 // indirect google.golang.org/genproto v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20260203192932-546029d2fa20 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d // indirect diff --git a/go.sum b/go.sum index d8931945..add50e32 100644 --- a/go.sum +++ b/go.sum @@ -131,6 +131,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.41.7 h1:NITQpgo9A5NrDZ57uOWj+abvXSb8 github.com/aws/aws-sdk-go-v2/service/sts v1.41.7/go.mod h1:sks5UWBhEuWYDPdwlnRFn1w7xWdH29Jcpe+/PJQefEs= github.com/aws/smithy-go v1.24.1 h1:VbyeNfmYkWoxMVpGUAbQumkODcYmfMRfZ8yQiH30SK0= github.com/aws/smithy-go v1.24.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= @@ -145,6 +147,10 @@ github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1x github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= +github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= +github.com/clipperhouse/uax29/v2 v2.3.0 h1:SNdx9DVUqMoBuBoW3iLOj4FQv3dN5mDtuqwuhIGpJy4= +github.com/clipperhouse/uax29/v2 v2.3.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= github.com/cloudevents/sdk-go/v2 v2.16.2 h1:ZYDFrYke4FD+jM8TZTJJO6JhKHzOQl2oqpFK1D+NnQM= github.com/cloudevents/sdk-go/v2 v2.16.2/go.mod h1:laOcGImm4nVJEU+PHnUrKL56CKmRL65RlQF0kRmW/kg= github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5 h1:6xNmx7iTtyBRev0+D/Tv1FZd4SCg8axKApyNyRsAt/w= @@ -257,6 +263,9 @@ github.com/googleapis/gax-go/v2 v2.17.0 h1:RksgfBpxqff0EZkDWYuz9q/uWsTVz+kf43LsZ github.com/googleapis/gax-go/v2 v2.17.0/go.mod h1:mzaqghpQp4JDh3HvADwrat+6M3MOIDp5YKHhb9PAgDY= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f h1:kOkUP6rcVVqC+KlKKENKtgfFfJyDySYhqL9srXooghY= github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.7 h1:X+2YciYSxvMQK0UZ7sg45ZVabVZBeBuvMkmuI2V3Fak= @@ -297,6 +306,8 @@ github.com/hashicorp/vault/api v1.22.0 h1:+HYFquE35/B74fHoIeXlZIP2YADVboaPjaSicH github.com/hashicorp/vault/api v1.22.0/go.mod h1:IUZA2cDvr4Ok3+NtK2Oq/r+lJeXkeCrHRmqdyWfpmGM= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/itchyny/gojq v0.12.18 h1:gFGHyt/MLbG9n6dqnvlliiya2TaMMh6FFaR2b1H6Drc= github.com/itchyny/gojq v0.12.18/go.mod h1:4hPoZ/3lN9fDL1D+aK7DY1f39XZpY9+1Xpjz8atrEkg= github.com/itchyny/timefmt-go v0.1.7 h1:xyftit9Tbw+Dc/huSSPJaEmX1TVL8lw5vxjJLK4GMMA= @@ -358,6 +369,8 @@ github.com/launchdarkly/go-server-sdk/v7 v7.14.5 h1:QtdAS2R4cnGe3j+UGx8mkL9I78L+ github.com/launchdarkly/go-server-sdk/v7 v7.14.5/go.mod h1:0CUdE5PI0SVG1Tb6CwKz8wZ9zEHUzfMutl6wY2MzUF0= github.com/launchdarkly/go-test-helpers/v3 v3.1.0 h1:E3bxJMzMoA+cJSF3xxtk2/chr1zshl1ZWa0/oR+8bvg= github.com/launchdarkly/go-test-helpers/v3 v3.1.0/go.mod h1:Ake5+hZFS/DmIGKx/cizhn5W9pGA7pplcR7xCxWiLIo= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mark3labs/mcp-go v0.27.0 h1:iok9kU4DUIU2/XVLgFS2Q9biIDqstC0jY4EQTK2Erzc= @@ -370,6 +383,8 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= github.com/minio/highwayhash v1.0.4-0.20251030100505-070ab1a87a76 h1:KGuD/pM2JpL9FAYvBrnBBeENKZNh6eNtjqytV6TYjnk= github.com/minio/highwayhash v1.0.4-0.20251030100505-070ab1a87a76/go.mod h1:GGYsuwP/fPD6Y9hMiXuapVvlIUEhFhMTh0rxU3ik1LQ= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -393,6 +408,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/morikuni/aec v1.1.0 h1:vBBl0pUnvi/Je71dsRrhMBtreIqNMYErSAbEeb8jrXQ= github.com/morikuni/aec v1.1.0/go.mod h1:xDRgiq/iw5l+zkao76YTKzKttOp2cwPEne25HDkJnBw= +github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= +github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= github.com/nats-io/jwt/v2 v2.8.0 h1:K7uzyz50+yGZDO5o772eRE7atlcSEENpL7P+b74JV1g= github.com/nats-io/jwt/v2 v2.8.0/go.mod h1:me11pOkwObtcBNR8AiMrUbtVOUGkqYjMQZ6jnSdVUIA= github.com/nats-io/nats-server/v2 v2.12.4 h1:ZnT10v2LU2Xcoiy8ek9X6Se4YG8EuMfIfvAEuFVx1Ts= @@ -413,6 +430,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ= +github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -446,9 +465,13 @@ github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkB github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= +github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0= +github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sourcegraph/jsonrpc2 v0.2.0 h1:KjN/dC4fP6aN9030MZCJs9WQbTOjWHhrtKVpzzSrr/U= +github.com/sourcegraph/jsonrpc2 v0.2.0/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo= github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M= @@ -473,6 +496,12 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/stripe/stripe-go/v82 v82.5.1 h1:05q6ZDKoe8PLMpQV072obF74HCgP4XJeJYoNuRSX2+8= github.com/stripe/stripe-go/v82 v82.5.1/go.mod h1:majCQX6AfObAvJiHraPi/5udwHi4ojRvJnnxckvHrX8= +github.com/tliron/commonlog v0.2.8 h1:vpKrEsZX4nlneC9673pXpeKqv3cFLxwpzNEZF1qiaQQ= +github.com/tliron/commonlog v0.2.8/go.mod h1:HgQZrJEuiKLLRvUixtPWGcmTmWWtKkCtywF6x9X5Spw= +github.com/tliron/glsp v0.2.2 h1:IKPfwpE8Lu8yB6Dayta+IyRMAbTVunudeauEgjXBt+c= +github.com/tliron/glsp v0.2.2/go.mod h1:GMVWDNeODxHzmDPvYbYTCs7yHVaEATfYtXiYJ9w1nBg= +github.com/tliron/kutil v0.3.11 h1:kongR0dhrrn9FR/3QRFoUfQe27t78/xQvrU9aXIy5bk= +github.com/tliron/kutil v0.3.11/go.mod h1:4IqOAAdpJuDxYbJxMv4nL8LSH0mPofSrdwIv8u99PDc= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= @@ -572,6 +601,8 @@ golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= +golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= diff --git a/lsp/completion.go b/lsp/completion.go new file mode 100644 index 00000000..bdf6fb58 --- /dev/null +++ b/lsp/completion.go @@ -0,0 +1,281 @@ +package lsp + +import ( + "sort" + + protocol "github.com/tliron/glsp/protocol_3_16" +) + +type topLevelKeyEntry struct { + key string + doc string + kind protocol.CompletionItemKind +} + +// getTopLevelKeys returns completion items for top-level YAML keys. +func getTopLevelKeys() []protocol.CompletionItem { + entries := []topLevelKeyEntry{ + {"modules", "List of module definitions to instantiate", protocol.CompletionItemKindKeyword}, + {"workflows", "Workflow handler configurations", protocol.CompletionItemKindKeyword}, + {"triggers", "Trigger configurations", protocol.CompletionItemKindKeyword}, + {"pipelines", "Named pipeline definitions", protocol.CompletionItemKindKeyword}, + {"imports", "List of external config files to import", protocol.CompletionItemKindKeyword}, + {"requires", "Plugin and version dependency declarations", protocol.CompletionItemKindKeyword}, + {"platform", "Platform-level configuration", protocol.CompletionItemKindKeyword}, + } + + items := make([]protocol.CompletionItem, 0, len(entries)) + for _, e := range entries { + kind := e.kind + doc := e.doc + label := e.key + items = append(items, protocol.CompletionItem{ + Label: label, + Kind: &kind, + Documentation: doc, + }) + } + return items +} + +// getModuleTypeCompletions returns completion items for module type values. +func getModuleTypeCompletions(reg *Registry) []protocol.CompletionItem { + types := make([]string, 0, len(reg.ModuleTypes)) + for t := range reg.ModuleTypes { + types = append(types, t) + } + sort.Strings(types) + + kind := protocol.CompletionItemKindClass + items := make([]protocol.CompletionItem, 0, len(types)) + for _, t := range types { + info := reg.ModuleTypes[t] + label := t + doc := info.Description + cat := info.Category + items = append(items, protocol.CompletionItem{ + Label: label, + Kind: &kind, + Documentation: doc, + Detail: &cat, + }) + } + return items +} + +// getStepTypeCompletions returns completion items for step type values. +func getStepTypeCompletions(reg *Registry) []protocol.CompletionItem { + types := make([]string, 0, len(reg.StepTypes)) + for t := range reg.StepTypes { + types = append(types, t) + } + sort.Strings(types) + + kind := protocol.CompletionItemKindFunction + items := make([]protocol.CompletionItem, 0, len(types)) + for _, t := range types { + info := reg.StepTypes[t] + label := t + doc := info.Description + items = append(items, protocol.CompletionItem{ + Label: label, + Kind: &kind, + Documentation: doc, + }) + } + return items +} + +// getTriggerTypeCompletions returns completion items for trigger type values. +func getTriggerTypeCompletions(reg *Registry) []protocol.CompletionItem { + types := make([]string, 0, len(reg.TriggerTypes)) + for t := range reg.TriggerTypes { + types = append(types, t) + } + sort.Strings(types) + + kind := protocol.CompletionItemKindEvent + items := make([]protocol.CompletionItem, 0, len(types)) + for _, t := range types { + info := reg.TriggerTypes[t] + label := t + doc := info.Description + items = append(items, protocol.CompletionItem{ + Label: label, + Kind: &kind, + Documentation: doc, + }) + } + return items +} + +// getModuleConfigKeyCompletions returns completions for config keys of a given module type. +func getModuleConfigKeyCompletions(reg *Registry, moduleType string) []protocol.CompletionItem { + info, ok := reg.ModuleTypes[moduleType] + if !ok { + return nil + } + + kind := protocol.CompletionItemKindProperty + items := make([]protocol.CompletionItem, 0, len(info.ConfigKeys)) + for _, k := range info.ConfigKeys { + key := k + items = append(items, protocol.CompletionItem{ + Label: key, + Kind: &kind, + }) + } + return items +} + +// getTemplateFunctionCompletions returns completion items for template functions. +func getTemplateFunctionCompletions() []protocol.CompletionItem { + fns := templateFunctions() + kind := protocol.CompletionItemKindFunction + items := make([]protocol.CompletionItem, 0, len(fns)) + for _, fn := range fns { + name := fn + items = append(items, protocol.CompletionItem{ + Label: name, + Kind: &kind, + Documentation: "Template function: " + name, + }) + } + return items +} + +// getModuleNamesFromContent returns module names declared in the document content +// for dependsOn completion. +func getModuleNamesFromContent(content string) []protocol.CompletionItem { + kind := protocol.CompletionItemKindValue + items := []protocol.CompletionItem{} + for _, line := range splitLines(content) { + trimmed := trimSpace(line) + if hasPrefix(trimmed, "name:") { + val := trimSpace(trimPrefix(trimmed, "name:")) + if val != "" { + name := val + items = append(items, protocol.CompletionItem{ + Label: name, + Kind: &kind, + }) + } + } + } + return items +} + +// Completions returns completion items for the given document and position context. +func Completions(reg *Registry, doc *Document, ctx PositionContext) []protocol.CompletionItem { + if ctx.InTemplate { + return getTemplateFunctionCompletions() + } + + switch ctx.Section { + case SectionTopLevel: + return getTopLevelKeys() + case SectionModules: + if ctx.DependsOn { + return getModuleNamesFromContent(doc.Content) + } + switch ctx.FieldName { + case "type": + return getModuleTypeCompletions(reg) + case "dependsOn": + return getModuleNamesFromContent(doc.Content) + } + if ctx.ModuleType != "" { + return getModuleConfigKeyCompletions(reg, ctx.ModuleType) + } + // Module-level field keys. + return moduleItemKeys() + case SectionPipeline: + if ctx.FieldName == "type" { + // Could be trigger or step type depending on nesting. + items := getStepTypeCompletions(reg) + items = append(items, getTriggerTypeCompletions(reg)...) + return items + } + case SectionTriggers: + return getTriggerTypeCompletions(reg) + case SectionWorkflow: + return getWorkflowTypeCompletions(reg) + } + + return nil +} + +// moduleItemKeys returns field key completions for a modules[] item. +func moduleItemKeys() []protocol.CompletionItem { + kind := protocol.CompletionItemKindProperty + keys := []string{"name", "type", "config", "dependsOn", "branches"} + items := make([]protocol.CompletionItem, 0, len(keys)) + for _, k := range keys { + key := k + items = append(items, protocol.CompletionItem{ + Label: key, + Kind: &kind, + }) + } + return items +} + +// getWorkflowTypeCompletions returns completions for workflow types. +func getWorkflowTypeCompletions(reg *Registry) []protocol.CompletionItem { + kind := protocol.CompletionItemKindModule + items := make([]protocol.CompletionItem, 0, len(reg.WorkflowTypes)) + for _, t := range reg.WorkflowTypes { + wt := t + items = append(items, protocol.CompletionItem{ + Label: wt, + Kind: &kind, + }) + } + return items +} + +func splitLines(s string) []string { + var lines []string + start := 0 + for i := 0; i < len(s); i++ { + if s[i] == '\n' { + lines = append(lines, s[start:i]) + start = i + 1 + } + } + lines = append(lines, s[start:]) + return lines +} + +func trimSpace(s string) string { + return trimLeft(trimRight(s)) +} + +func trimLeft(s string) string { + for i, c := range s { + if c != ' ' && c != '\t' { + return s[i:] + } + } + return "" +} + +func trimRight(s string) string { + for i := len(s) - 1; i >= 0; i-- { + if s[i] != ' ' && s[i] != '\t' && s[i] != '\r' && s[i] != '\n' { + return s[:i+1] + } + } + return "" +} + +func hasPrefix(s, prefix string) bool { + return len(s) >= len(prefix) && s[:len(prefix)] == prefix +} + +func trimPrefix(s, prefix string) string { + if hasPrefix(s, prefix) { + return s[len(prefix):] + } + return s +} diff --git a/lsp/diagnostics.go b/lsp/diagnostics.go new file mode 100644 index 00000000..bf875340 --- /dev/null +++ b/lsp/diagnostics.go @@ -0,0 +1,277 @@ +package lsp + +import ( + "fmt" + "strings" + + protocol "github.com/tliron/glsp/protocol_3_16" + "gopkg.in/yaml.v3" +) + +// Diagnostics analyses a document and returns LSP diagnostics. +func Diagnostics(reg *Registry, doc *Document) []protocol.Diagnostic { + var diags []protocol.Diagnostic + + if doc.Node == nil { + return diags + } + + // Walk the root document node. + if doc.Node.Kind != yaml.DocumentNode || len(doc.Node.Content) == 0 { + return diags + } + + root := doc.Node.Content[0] + if root.Kind != yaml.MappingNode { + return diags + } + + // Collect module names for dependsOn validation. + moduleNames := collectModuleNames(root) + + // Check for unclosed template expressions. + diags = append(diags, checkUnclosedTemplates(doc.Content)...) + + // Validate each top-level section. + for i := 0; i+1 < len(root.Content); i += 2 { + keyNode := root.Content[i] + valNode := root.Content[i+1] + + switch keyNode.Value { + case "modules": + diags = append(diags, validateModules(reg, valNode, moduleNames)...) + case "triggers": + diags = append(diags, validateTriggers(reg, valNode)...) + case "workflows": + diags = append(diags, validateWorkflows(reg, valNode)...) + } + } + + return diags +} + +// collectModuleNames returns a set of module names defined in the modules section. +func collectModuleNames(root *yaml.Node) map[string]bool { + names := make(map[string]bool) + for i := 0; i+1 < len(root.Content); i += 2 { + if root.Content[i].Value != "modules" { + continue + } + seq := root.Content[i+1] + if seq.Kind != yaml.SequenceNode { + continue + } + for _, item := range seq.Content { + if item.Kind != yaml.MappingNode { + continue + } + for j := 0; j+1 < len(item.Content); j += 2 { + if item.Content[j].Value == "name" { + names[item.Content[j+1].Value] = true + } + } + } + } + return names +} + +// validateModules validates the modules sequence. +func validateModules(reg *Registry, node *yaml.Node, moduleNames map[string]bool) []protocol.Diagnostic { + var diags []protocol.Diagnostic + if node.Kind != yaml.SequenceNode { + return diags + } + + for _, item := range node.Content { + if item.Kind != yaml.MappingNode { + continue + } + + var modType, modName string + var typeNode *yaml.Node + var dependsOnNode *yaml.Node + + for j := 0; j+1 < len(item.Content); j += 2 { + k := item.Content[j] + v := item.Content[j+1] + switch k.Value { + case "type": + modType = v.Value + typeNode = v + case "name": + modName = v.Value + case "dependsOn": + dependsOnNode = v + } + } + + // Validate module type. + if modType != "" && typeNode != nil { + if _, ok := reg.ModuleTypes[modType]; !ok { + sev := protocol.DiagnosticSeverityError + diags = append(diags, protocol.Diagnostic{ + Range: nodeRange(typeNode), + Severity: &sev, + Message: fmt.Sprintf("unknown module type %q", modType), + Source: strPtr("workflow-lsp"), + }) + } + } + + // Validate config keys if we know the module type. + if modType != "" { + for j := 0; j+1 < len(item.Content); j += 2 { + k := item.Content[j] + if k.Value == "config" { + configNode := item.Content[j+1] + diags = append(diags, validateModuleConfig(reg, modType, configNode)...) + } + } + } + + // Validate dependsOn references. + if dependsOnNode != nil && dependsOnNode.Kind == yaml.SequenceNode { + for _, dep := range dependsOnNode.Content { + if dep.Value != "" && !moduleNames[dep.Value] && dep.Value != modName { + sev := protocol.DiagnosticSeverityWarning + diags = append(diags, protocol.Diagnostic{ + Range: nodeRange(dep), + Severity: &sev, + Message: fmt.Sprintf("dependsOn references unknown module %q", dep.Value), + Source: strPtr("workflow-lsp"), + }) + } + } + } + } + return diags +} + +// validateModuleConfig checks that config keys are recognized for the module type. +func validateModuleConfig(reg *Registry, moduleType string, configNode *yaml.Node) []protocol.Diagnostic { + var diags []protocol.Diagnostic + info, ok := reg.ModuleTypes[moduleType] + if !ok || len(info.ConfigKeys) == 0 || configNode.Kind != yaml.MappingNode { + return diags + } + + knownKeys := make(map[string]bool, len(info.ConfigKeys)) + for _, k := range info.ConfigKeys { + knownKeys[k] = true + } + + for i := 0; i+1 < len(configNode.Content); i += 2 { + k := configNode.Content[i] + if k.Value != "" && !knownKeys[k.Value] { + sev := protocol.DiagnosticSeverityWarning + diags = append(diags, protocol.Diagnostic{ + Range: nodeRange(k), + Severity: &sev, + Message: fmt.Sprintf("unknown config key %q for module type %q", k.Value, moduleType), + Source: strPtr("workflow-lsp"), + }) + } + } + return diags +} + +// validateTriggers checks trigger types. +func validateTriggers(reg *Registry, node *yaml.Node) []protocol.Diagnostic { + var diags []protocol.Diagnostic + if node.Kind != yaml.MappingNode { + return diags + } + for i := 0; i+1 < len(node.Content); i += 2 { + k := node.Content[i] + if _, ok := reg.TriggerTypes[k.Value]; !ok { + sev := protocol.DiagnosticSeverityError + diags = append(diags, protocol.Diagnostic{ + Range: nodeRange(k), + Severity: &sev, + Message: fmt.Sprintf("unknown trigger type %q", k.Value), + Source: strPtr("workflow-lsp"), + }) + } + } + return diags +} + +// validateWorkflows checks workflow types. +func validateWorkflows(reg *Registry, node *yaml.Node) []protocol.Diagnostic { + var diags []protocol.Diagnostic + if node.Kind != yaml.MappingNode { + return diags + } + knownTypes := make(map[string]bool, len(reg.WorkflowTypes)) + for _, t := range reg.WorkflowTypes { + knownTypes[t] = true + } + for i := 0; i+1 < len(node.Content); i += 2 { + k := node.Content[i] + if !knownTypes[k.Value] { + sev := protocol.DiagnosticSeverityWarning + diags = append(diags, protocol.Diagnostic{ + Range: nodeRange(k), + Severity: &sev, + Message: fmt.Sprintf("unknown workflow type %q", k.Value), + Source: strPtr("workflow-lsp"), + }) + } + } + return diags +} + +// checkUnclosedTemplates scans for unclosed {{ template expressions. +func checkUnclosedTemplates(content string) []protocol.Diagnostic { + var diags []protocol.Diagnostic + lines := strings.Split(content, "\n") + for lineIdx, line := range lines { + rest := line + col := 0 + for { + openIdx := strings.Index(rest, "{{") + if openIdx < 0 { + break + } + closeIdx := strings.Index(rest[openIdx:], "}}") + if closeIdx < 0 { + // Unclosed template. + startCol := col + openIdx + sev := protocol.DiagnosticSeverityWarning + diags = append(diags, protocol.Diagnostic{ + Range: protocol.Range{ + Start: protocol.Position{Line: uint32(lineIdx), Character: uint32(startCol)}, //nolint:gosec // G115: line/col indexes are non-negative + End: protocol.Position{Line: uint32(lineIdx), Character: uint32(len(line))}, //nolint:gosec // G115: line/col indexes are non-negative + }, + Severity: &sev, + Message: "unclosed template expression {{", + Source: strPtr("workflow-lsp"), + }) + break + } + col += openIdx + closeIdx + 2 + rest = rest[openIdx+closeIdx+2:] + } + } + return diags +} + +// nodeRange converts a yaml.Node position to an LSP Range. +// yaml.Node lines are 1-based; LSP positions are 0-based. +func nodeRange(n *yaml.Node) protocol.Range { + line := uint32(0) + col := uint32(0) + if n.Line > 0 { + line = uint32(n.Line - 1) //nolint:gosec // G115: yaml line numbers are positive + } + if n.Column > 0 { + col = uint32(n.Column - 1) //nolint:gosec // G115: yaml column numbers are positive + } + endCol := col + uint32(len(n.Value)) //nolint:gosec // G115: string length is non-negative + return protocol.Range{ + Start: protocol.Position{Line: line, Character: col}, + End: protocol.Position{Line: line, Character: endCol}, + } +} + +func strPtr(s string) *string { return &s } diff --git a/lsp/document.go b/lsp/document.go new file mode 100644 index 00000000..a25eb084 --- /dev/null +++ b/lsp/document.go @@ -0,0 +1,248 @@ +package lsp + +import ( + "strings" + "sync" + + "gopkg.in/yaml.v3" +) + +// Document holds the content and parsed state of an open YAML file. +type Document struct { + URI string + Content string + Node *yaml.Node // root node (Kind == DocumentNode) +} + +// DocumentStore is a thread-safe store of open LSP documents. +type DocumentStore struct { + mu sync.RWMutex + docs map[string]*Document +} + +// NewDocumentStore creates an empty DocumentStore. +func NewDocumentStore() *DocumentStore { + return &DocumentStore{docs: make(map[string]*Document)} +} + +// Set stores or replaces a document. +func (ds *DocumentStore) Set(uri, content string) *Document { + ds.mu.Lock() + defer ds.mu.Unlock() + doc := &Document{URI: uri, Content: content} + doc.Node = parseYAML(content) + ds.docs[uri] = doc + return doc +} + +// Get returns a document by URI, or nil if not found. +func (ds *DocumentStore) Get(uri string) *Document { + ds.mu.RLock() + defer ds.mu.RUnlock() + return ds.docs[uri] +} + +// Delete removes a document from the store. +func (ds *DocumentStore) Delete(uri string) { + ds.mu.Lock() + defer ds.mu.Unlock() + delete(ds.docs, uri) +} + +// parseYAML parses YAML content and returns the root node, or nil on error. +func parseYAML(content string) *yaml.Node { + var root yaml.Node + if err := yaml.Unmarshal([]byte(content), &root); err != nil { + return nil + } + return &root +} + +// SectionKind identifies the YAML section the cursor is in. +type SectionKind string + +const ( + SectionUnknown SectionKind = "unknown" + SectionModules SectionKind = "modules" + SectionWorkflow SectionKind = "workflows" + SectionTriggers SectionKind = "triggers" + SectionPipeline SectionKind = "pipelines" + SectionRequires SectionKind = "requires" + SectionImports SectionKind = "imports" + SectionTopLevel SectionKind = "top_level" +) + +// PositionContext describes what context the cursor is in within the document. +type PositionContext struct { + Section SectionKind + ModuleType string // if inside a modules[] item config, the type value + FieldName string // the field name at the cursor + InTemplate bool // cursor is inside {{ }} + DependsOn bool // cursor is in a dependsOn array value + Line int + Character int +} + +// ContextAt analyses the document content at the given (zero-based) line and +// character position and returns a PositionContext describing what the cursor +// is positioned on. +func ContextAt(content string, line, char int) PositionContext { + ctx := PositionContext{ + Section: SectionUnknown, + Line: line, + Character: char, + } + + lines := strings.Split(content, "\n") + if line >= len(lines) { + return ctx + } + currentLine := lines[line] + + // Check for template expression. + if isInTemplate(lines, line, char) { + ctx.InTemplate = true + } + + // Determine indentation level and section. + indent := leadingSpaces(currentLine) + + if indent == 0 { + ctx.Section = SectionTopLevel + return ctx + } + + // Walk up the lines to find parent keys. + section, moduleType, field := inferContext(lines, line, indent) + ctx.Section = section + ctx.ModuleType = moduleType + ctx.FieldName = field + + // Check if in dependsOn. + for i := line; i >= 0; i-- { + l := strings.TrimSpace(lines[i]) + if strings.HasPrefix(l, "dependsOn:") { + ctx.DependsOn = true + break + } + if leadingSpaces(lines[i]) < indent && leadingSpaces(lines[i]) > 0 { + break + } + } + + return ctx +} + +// isInTemplate returns true if position (line, char) is inside a {{ }} expression. +func isInTemplate(lines []string, line, char int) bool { + if line >= len(lines) { + return false + } + l := lines[line] + if char > len(l) { + char = len(l) + } + prefix := l[:char] + openIdx := strings.LastIndex(prefix, "{{") + closeIdx := strings.LastIndex(prefix, "}}") + return openIdx >= 0 && openIdx > closeIdx +} + +// leadingSpaces returns the number of leading spaces in a string. +func leadingSpaces(s string) int { + for i, c := range s { + if c != ' ' { + return i + } + } + return len(s) +} + +// inferContext walks upward through lines to determine the YAML section, +// current module type (if any), and field name at the given line. +func inferContext(lines []string, line, curIndent int) (SectionKind, string, string) { + section := SectionUnknown + moduleType := "" + field := "" + + // Get the field on the current line. + cur := strings.TrimSpace(lines[line]) + if colonIdx := strings.Index(cur, ":"); colonIdx >= 0 { + field = strings.TrimSpace(cur[:colonIdx]) + } else { + // Could be a list item value. + field = strings.TrimPrefix(cur, "- ") + } + + // Walk upward to detect structure. + prevIndent := curIndent + for i := line - 1; i >= 0; i-- { + l := lines[i] + ind := leadingSpaces(l) + trimmed := strings.TrimSpace(l) + if trimmed == "" { + continue + } + if ind < prevIndent { + prevIndent = ind + key := "" + if colonIdx := strings.Index(trimmed, ":"); colonIdx >= 0 { + key = strings.TrimSpace(trimmed[:colonIdx]) + } else { + key = strings.TrimPrefix(trimmed, "- ") + } + + switch key { + case "modules": + section = SectionModules + return section, moduleType, field + case "workflows": + section = SectionWorkflow + return section, moduleType, field + case "triggers": + section = SectionTriggers + return section, moduleType, field + case "pipelines": + section = SectionPipeline + return section, moduleType, field + case "requires": + section = SectionRequires + return section, moduleType, field + case "imports": + section = SectionImports + return section, moduleType, field + case "config": + // The parent is config — find the type field in the same module block. + moduleType = findTypeInBlock(lines, i) + return section, moduleType, field + case "type": + // Inside a type field value — look for surrounding module block. + } + } + } + + return section, moduleType, field +} + +// findTypeInBlock searches upward from lineIdx to find a "type:" key +// at the same module-item indentation level. +func findTypeInBlock(lines []string, lineIdx int) string { + refIndent := leadingSpaces(lines[lineIdx]) + for i := lineIdx - 1; i >= 0; i-- { + l := lines[i] + ind := leadingSpaces(l) + trimmed := strings.TrimSpace(l) + if trimmed == "" { + continue + } + if ind < refIndent { + // Moved up out of the block. + break + } + if strings.HasPrefix(trimmed, "type:") { + val := strings.TrimSpace(strings.TrimPrefix(trimmed, "type:")) + return val + } + } + return "" +} diff --git a/lsp/hover.go b/lsp/hover.go new file mode 100644 index 00000000..5be557d4 --- /dev/null +++ b/lsp/hover.go @@ -0,0 +1,121 @@ +package lsp + +import ( + "fmt" + "strings" + + protocol "github.com/tliron/glsp/protocol_3_16" +) + +// Hover returns markdown hover content for the given position context, or nil +// if there is nothing to show. +func Hover(reg *Registry, _ *Document, ctx PositionContext) *protocol.Hover { + if ctx.InTemplate { + return hoverTemplateFunction(ctx.FieldName) + } + + switch ctx.Section { + case SectionModules: + if ctx.FieldName == "type" && ctx.ModuleType != "" { + return hoverModuleType(reg, ctx.ModuleType) + } + if ctx.ModuleType != "" && ctx.FieldName != "" { + return hoverConfigField(reg, ctx.ModuleType, ctx.FieldName) + } + if ctx.ModuleType != "" { + return hoverModuleType(reg, ctx.ModuleType) + } + case SectionTriggers: + if ctx.FieldName != "" { + return hoverTriggerType(reg, ctx.FieldName) + } + } + return nil +} + +// hoverModuleType generates hover markdown for a module type. +func hoverModuleType(reg *Registry, moduleType string) *protocol.Hover { + info, ok := reg.ModuleTypes[moduleType] + if !ok { + return nil + } + + var sb strings.Builder + sb.WriteString("**") + sb.WriteString(moduleType) + sb.WriteString("**") + if info.Label != "" && info.Label != moduleType { + sb.WriteString(" — ") + sb.WriteString(info.Label) + } + sb.WriteString("\n\n") + if info.Description != "" { + sb.WriteString(info.Description) + sb.WriteString("\n\n") + } + if info.Category != "" { + fmt.Fprintf(&sb, "**Category:** %s\n\n", info.Category) + } + if len(info.ConfigKeys) > 0 { + sb.WriteString("**Config keys:** `") + sb.WriteString(strings.Join(info.ConfigKeys, "`, `")) + sb.WriteString("`\n") + } + + return markdownHover(sb.String()) +} + +// hoverConfigField generates hover markdown for a module config field. +func hoverConfigField(reg *Registry, moduleType, field string) *protocol.Hover { + info, ok := reg.ModuleTypes[moduleType] + if !ok { + return nil + } + + for _, k := range info.ConfigKeys { + if k == field { + return markdownHover(fmt.Sprintf("**%s** — config key for `%s`", field, moduleType)) + } + } + return nil +} + +// hoverTriggerType generates hover markdown for a trigger type. +func hoverTriggerType(reg *Registry, triggerType string) *protocol.Hover { + info, ok := reg.TriggerTypes[triggerType] + if !ok { + return nil + } + return markdownHover(fmt.Sprintf("**%s** trigger\n\n%s", info.Type, info.Description)) +} + +// hoverTemplateFunction generates hover for template function names. +func hoverTemplateFunction(name string) *protocol.Hover { + docs := map[string]string{ + "uuidv4": "Generates a new UUID v4 string.", + "uuid": "Generates a new UUID v4 string (alias for uuidv4).", + "now": "Returns the current UTC time. Accepts an optional Go time layout string.", + "lower": "Converts a string to lower case.", + "default": "Returns the fallback value if the primary value is empty or nil.", + "trimPrefix": "Removes the given prefix from a string if present.", + "trimSuffix": "Removes the given suffix from a string if present.", + "json": "Marshals a value to a JSON string.", + "step": "Accesses step output by step name and optional nested keys.", + "trigger": "Accesses trigger data by nested keys.", + } + doc, ok := docs[name] + if !ok { + return nil + } + return markdownHover(fmt.Sprintf("**%s** — %s", name, doc)) +} + +// markdownHover wraps a markdown string in a Hover response. +func markdownHover(md string) *protocol.Hover { + return &protocol.Hover{ + Contents: protocol.MarkupContent{ + Kind: protocol.MarkupKindMarkdown, + Value: md, + }, + } +} diff --git a/lsp/lsp_test.go b/lsp/lsp_test.go new file mode 100644 index 00000000..2359657a --- /dev/null +++ b/lsp/lsp_test.go @@ -0,0 +1,281 @@ +package lsp + +import ( + "fmt" + "testing" + + protocol "github.com/tliron/glsp/protocol_3_16" +) + +const testYAML = `modules: + - name: server + type: http.server + config: + address: :8080 + - name: router + type: http.router + dependsOn: + - server + - name: mymod + type: nonexistent.module + +triggers: + http: + port: 8080 + badtrigger: + foo: bar +` + +// TestRegistry_ModuleTypes checks that the registry loads module types. +func TestRegistry_ModuleTypes(t *testing.T) { + reg := NewRegistry() + if len(reg.ModuleTypes) == 0 { + t.Fatal("registry has no module types") + } + // http.server must be registered. + info, ok := reg.ModuleTypes["http.server"] + if !ok { + t.Fatal("http.server not in registry") + } + if info.Type != "http.server" { + t.Errorf("unexpected type: %q", info.Type) + } + // Should have config keys. + if len(info.ConfigKeys) == 0 { + t.Error("http.server should have config keys") + } +} + +// TestRegistry_StepTypes checks step type registry. +func TestRegistry_StepTypes(t *testing.T) { + reg := NewRegistry() + if len(reg.StepTypes) == 0 { + t.Fatal("registry has no step types") + } + if _, ok := reg.StepTypes["step.set"]; !ok { + t.Error("step.set not in step type registry") + } +} + +// TestRegistry_TriggerTypes checks trigger type registry. +func TestRegistry_TriggerTypes(t *testing.T) { + reg := NewRegistry() + if _, ok := reg.TriggerTypes["http"]; !ok { + t.Error("http trigger not in registry") + } + if _, ok := reg.TriggerTypes["schedule"]; !ok { + t.Error("schedule trigger not in registry") + } +} + +// TestDocumentStore_SetGet checks basic document store operations. +func TestDocumentStore_SetGet(t *testing.T) { + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + if doc == nil { + t.Fatal("Set returned nil") + } + got := store.Get("file:///test.yaml") + if got == nil { + t.Fatal("Get returned nil") + } + if got.Content != testYAML { + t.Error("content mismatch") + } +} + +// TestDocumentStore_ParseYAML checks that YAML is parsed on Set. +func TestDocumentStore_ParseYAML(t *testing.T) { + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + if doc.Node == nil { + t.Fatal("document should have parsed YAML node") + } +} + +// TestDiagnostics_UnknownModuleType checks that unknown module types produce errors. +func TestDiagnostics_UnknownModuleType(t *testing.T) { + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + + diags := Diagnostics(reg, doc) + + found := false + for _, d := range diags { + if containsStr(d.Message, "nonexistent.module") { + found = true + break + } + } + if !found { + t.Errorf("expected diagnostic for nonexistent.module, got %d diags: %v", len(diags), diagMessages(diags)) + } +} + +// TestDiagnostics_UnknownTriggerType checks that unknown trigger types produce errors. +func TestDiagnostics_UnknownTriggerType(t *testing.T) { + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + + diags := Diagnostics(reg, doc) + + found := false + for _, d := range diags { + if containsStr(d.Message, "badtrigger") { + found = true + break + } + } + if !found { + t.Errorf("expected diagnostic for badtrigger, got: %v", diagMessages(diags)) + } +} + +// TestDiagnostics_ValidConfig checks no spurious errors on valid config. +func TestDiagnostics_ValidConfig(t *testing.T) { + validYAML := `modules: + - name: server + type: http.server + config: + address: :8080 + +triggers: + http: + port: 8080 +` + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///valid.yaml", validYAML) + diags := Diagnostics(reg, doc) + + // Should have no errors (warnings for unknown config keys are ok but + // there should be no unknown type errors). + for _, d := range diags { + if d.Severity != nil && *d.Severity == 1 { // DiagnosticSeverityError + t.Errorf("unexpected error: %s", d.Message) + } + } +} + +// TestCompletions_ModuleType checks that module type completions are returned. +func TestCompletions_ModuleType(t *testing.T) { + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + + ctx := PositionContext{ + Section: SectionModules, + FieldName: "type", + } + items := Completions(reg, doc, ctx) + if len(items) == 0 { + t.Fatal("no completions for module type") + } + found := false + for _, item := range items { + if item.Label == "http.server" { + found = true + break + } + } + if !found { + t.Error("http.server not in module type completions") + } +} + +// TestCompletions_TopLevel checks top-level key completions. +func TestCompletions_TopLevel(t *testing.T) { + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", "") + + ctx := PositionContext{Section: SectionTopLevel} + items := Completions(reg, doc, ctx) + if len(items) == 0 { + t.Fatal("no top-level completions") + } + + labels := make(map[string]bool, len(items)) + for _, item := range items { + labels[item.Label] = true + } + for _, expected := range []string{"modules", "workflows", "triggers"} { + if !labels[expected] { + t.Errorf("missing top-level key completion: %q", expected) + } + } +} + +// TestHover_ModuleType checks hover for module types. +func TestHover_ModuleType(t *testing.T) { + reg := NewRegistry() + store := NewDocumentStore() + doc := store.Set("file:///test.yaml", testYAML) + + ctx := PositionContext{ + Section: SectionModules, + ModuleType: "http.server", + FieldName: "type", + } + hover := Hover(reg, doc, ctx) + if hover == nil { + t.Fatal("expected hover for http.server") + } +} + +// TestContextAt checks basic context detection. +func TestContextAt(t *testing.T) { + yaml := `modules: + - name: server + type: http.server +` + ctx := ContextAt(yaml, 2, 10) + // Line 2 is " type: http.server" — should detect modules section. + if ctx.Section != SectionModules { + t.Errorf("expected SectionModules, got %q", ctx.Section) + } +} + +// TestTemplateFunctions checks the template functions list. +func TestTemplateFunctions(t *testing.T) { + fns := templateFunctions() + if len(fns) == 0 { + t.Fatal("no template functions") + } + foundUUID := false + for _, f := range fns { + if f == "uuidv4" { + foundUUID = true + break + } + } + if !foundUUID { + t.Error("uuidv4 not in template functions") + } +} + +// helpers + +func containsStr(s, sub string) bool { + return len(s) >= len(sub) && (s == sub || findSubstr(s, sub)) +} + +func findSubstr(s, sub string) bool { + for i := 0; i+len(sub) <= len(s); i++ { + if s[i:i+len(sub)] == sub { + return true + } + } + return false +} + +func diagMessages(diags []protocol.Diagnostic) []string { + msgs := make([]string, len(diags)) + for i, d := range diags { + msgs[i] = fmt.Sprintf("[%d] %s", i, d.Message) + } + return msgs +} + diff --git a/lsp/registry.go b/lsp/registry.go new file mode 100644 index 00000000..c23dc0be --- /dev/null +++ b/lsp/registry.go @@ -0,0 +1,117 @@ +// Package lsp implements a Language Server Protocol server for workflow +// configuration files, providing completions, diagnostics, and hover info. +package lsp + +import ( + "github.com/GoCodeAlone/workflow/schema" +) + +// ModuleTypeInfo holds metadata about a known module type for the LSP. +type ModuleTypeInfo struct { + Type string + Label string + Category string + Description string + ConfigKeys []string +} + +// StepTypeInfo holds metadata about a known step type for the LSP. +type StepTypeInfo struct { + Type string + Description string + ConfigKeys []string +} + +// TriggerTypeInfo holds metadata about a known trigger type. +type TriggerTypeInfo struct { + Type string + Description string +} + +// Registry aggregates all known workflow types for LSP use. +type Registry struct { + ModuleTypes map[string]ModuleTypeInfo + StepTypes map[string]StepTypeInfo + TriggerTypes map[string]TriggerTypeInfo + WorkflowTypes []string +} + +// NewRegistry builds a Registry from the schema package's known types and registry. +func NewRegistry() *Registry { + r := &Registry{ + ModuleTypes: make(map[string]ModuleTypeInfo), + StepTypes: make(map[string]StepTypeInfo), + TriggerTypes: make(map[string]TriggerTypeInfo), + WorkflowTypes: schema.KnownWorkflowTypes(), + } + + // Build module type info from ModuleSchemaRegistry. + reg := schema.NewModuleSchemaRegistry() + for _, ms := range reg.All() { + keys := make([]string, 0, len(ms.ConfigFields)) + for i := range ms.ConfigFields { + keys = append(keys, ms.ConfigFields[i].Key) + } + r.ModuleTypes[ms.Type] = ModuleTypeInfo{ + Type: ms.Type, + Label: ms.Label, + Category: ms.Category, + Description: ms.Description, + ConfigKeys: keys, + } + } + + // Supplement with types from KnownModuleTypes that aren't in the schema registry. + for _, t := range schema.KnownModuleTypes() { + if _, exists := r.ModuleTypes[t]; !exists { + r.ModuleTypes[t] = ModuleTypeInfo{ + Type: t, + Description: t + " module", + } + } + } + + // Build step type info. + for t := range schema.KnownStepTypes() { + r.StepTypes[t] = StepTypeInfo{ + Type: t, + Description: "Pipeline step: " + t, + } + } + + // Build trigger type info. + for _, t := range schema.KnownTriggerTypes() { + desc := map[string]string{ + "http": "HTTP trigger: fires on incoming HTTP requests", + "schedule": "Schedule trigger: fires on a cron schedule", + "event": "Event trigger: fires when a message is received on a topic", + "eventbus": "Event bus trigger: fires for events on the internal event bus", + } + d := desc[t] + if d == "" { + d = t + " trigger" + } + r.TriggerTypes[t] = TriggerTypeInfo{ + Type: t, + Description: d, + } + } + + return r +} + +// templateFunctions returns the list of template functions available in pipeline templates. +func templateFunctions() []string { + return []string{ + "uuidv4", + "uuid", + "now", + "lower", + "default", + "trimPrefix", + "trimSuffix", + "json", + "step", + "trigger", + } +} diff --git a/lsp/server.go b/lsp/server.go new file mode 100644 index 00000000..a007e75a --- /dev/null +++ b/lsp/server.go @@ -0,0 +1,151 @@ +package lsp + +import ( + "github.com/tliron/glsp" + protocol "github.com/tliron/glsp/protocol_3_16" + glspserver "github.com/tliron/glsp/server" +) + +// Version is set at build time. +var Version = "dev" + +// Server is the workflow LSP server. +type Server struct { + registry *Registry + store *DocumentStore + handler protocol.Handler + server *glspserver.Server +} + +// NewServer creates a new LSP server with all handlers registered. +func NewServer() *Server { + s := &Server{ + registry: NewRegistry(), + store: NewDocumentStore(), + } + s.handler = protocol.Handler{ + Initialize: s.initialize, + Initialized: s.initialized, + Shutdown: s.shutdown, + TextDocumentDidOpen: s.didOpen, + TextDocumentDidChange: s.didChange, + TextDocumentDidSave: s.didSave, + TextDocumentCompletion: s.completion, + TextDocumentHover: s.hover, + } + s.server = glspserver.NewServer(&s.handler, "workflow-lsp", false) + return s +} + +// RunStdio starts the LSP server over stdio (blocking). +func (s *Server) RunStdio() error { + return s.server.RunStdio() +} + +// initialize handles the LSP initialize request. +func (s *Server) initialize(_ *glsp.Context, params *protocol.InitializeParams) (any, error) { + _ = params + capabilities := s.handler.CreateServerCapabilities() + + syncKind := protocol.TextDocumentSyncKindFull + capabilities.TextDocumentSync = &protocol.TextDocumentSyncOptions{ + OpenClose: boolPtr(true), + Change: &syncKind, + Save: boolPtr(true), + } + + return protocol.InitializeResult{ + Capabilities: capabilities, + ServerInfo: &protocol.InitializeResultServerInfo{ + Name: "workflow-lsp-server", + Version: &Version, + }, + }, nil +} + +// initialized handles the initialized notification. +func (s *Server) initialized(_ *glsp.Context, _ *protocol.InitializedParams) error { + return nil +} + +// shutdown handles the shutdown request. +func (s *Server) shutdown(_ *glsp.Context) error { + return nil +} + +// didOpen handles textDocument/didOpen. +func (s *Server) didOpen(ctx *glsp.Context, params *protocol.DidOpenTextDocumentParams) error { + doc := s.store.Set(string(params.TextDocument.URI), params.TextDocument.Text) + s.publishDiagnostics(ctx, string(params.TextDocument.URI), doc) + return nil +} + +// didChange handles textDocument/didChange. +func (s *Server) didChange(ctx *glsp.Context, params *protocol.DidChangeTextDocumentParams) error { + if len(params.ContentChanges) == 0 { + return nil + } + // We use full sync — take the last change. + var content string + for _, change := range params.ContentChanges { + if c, ok := change.(protocol.TextDocumentContentChangeEventWhole); ok { + content = c.Text + } + } + doc := s.store.Set(string(params.TextDocument.URI), content) + s.publishDiagnostics(ctx, string(params.TextDocument.URI), doc) + return nil +} + +// didSave handles textDocument/didSave. +func (s *Server) didSave(ctx *glsp.Context, params *protocol.DidSaveTextDocumentParams) error { + doc := s.store.Get(string(params.TextDocument.URI)) + if doc != nil { + s.publishDiagnostics(ctx, string(params.TextDocument.URI), doc) + } + return nil +} + +// completion handles textDocument/completion. +func (s *Server) completion(_ *glsp.Context, params *protocol.CompletionParams) (any, error) { + uri := string(params.TextDocument.URI) + doc := s.store.Get(uri) + if doc == nil { + return nil, nil + } + line := int(params.Position.Line) + char := int(params.Position.Character) + ctx := ContextAt(doc.Content, line, char) + items := Completions(s.registry, doc, ctx) + return items, nil +} + +// hover handles textDocument/hover. +func (s *Server) hover(_ *glsp.Context, params *protocol.HoverParams) (*protocol.Hover, error) { + uri := string(params.TextDocument.URI) + doc := s.store.Get(uri) + if doc == nil { + return nil, nil + } + line := int(params.Position.Line) + char := int(params.Position.Character) + ctx := ContextAt(doc.Content, line, char) + return Hover(s.registry, doc, ctx), nil +} + +// publishDiagnostics sends textDocument/publishDiagnostics notification to the client. +func (s *Server) publishDiagnostics(ctx *glsp.Context, uri string, doc *Document) { + if doc == nil { + return + } + diags := Diagnostics(s.registry, doc) + params := protocol.PublishDiagnosticsParams{ + URI: protocol.DocumentUri(uri), + Diagnostics: diags, + } + if ctx.Notify != nil { + ctx.Notify(string(protocol.ServerTextDocumentPublishDiagnostics), params) + } +} + +func boolPtr(v bool) *bool { return &v } diff --git a/schema/schema.go b/schema/schema.go index e4423d03..064a8478 100644 --- a/schema/schema.go +++ b/schema/schema.go @@ -4,6 +4,7 @@ package schema import ( + "encoding/json" "sort" "sync" ) @@ -70,22 +71,70 @@ func UnregisterWorkflowType(workflowType string) { // Schema represents a JSON Schema document. type Schema struct { - Schema string `json:"$schema"` - Title string `json:"title"` - Description string `json:"description,omitempty"` - Type string `json:"type"` - Required []string `json:"required,omitempty"` - Properties map[string]*Schema `json:"properties,omitempty"` - Items *Schema `json:"items,omitempty"` - Enum []string `json:"enum,omitempty"` - AdditionalP *bool `json:"additionalProperties,omitempty"` - AnyOf []*Schema `json:"anyOf,omitempty"` - Default any `json:"default,omitempty"` - MinItems *int `json:"minItems,omitempty"` - Minimum *float64 `json:"minimum,omitempty"` - Pattern string `json:"pattern,omitempty"` - Definitions map[string]*Schema `json:"$defs,omitempty"` - Ref string `json:"$ref,omitempty"` + Schema string `json:"$schema,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Type string `json:"type,omitempty"` + Required []string `json:"required,omitempty"` + Properties map[string]*Schema `json:"properties,omitempty"` + Items *Schema `json:"items,omitempty"` + Enum []string `json:"enum,omitempty"` + AdditionalProperties json.RawMessage `json:"additionalProperties,omitempty"` + AnyOf []*Schema `json:"anyOf,omitempty"` + OneOf []*Schema `json:"oneOf,omitempty"` + AllOf []*Schema `json:"allOf,omitempty"` + If *Schema `json:"if,omitempty"` + Then *Schema `json:"then,omitempty"` + Default any `json:"default,omitempty"` + MinItems *int `json:"minItems,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + Pattern string `json:"pattern,omitempty"` + Definitions map[string]*Schema `json:"$defs,omitempty"` + Ref string `json:"$ref,omitempty"` +} + +// setAdditionalPropertiesBool sets additionalProperties to a boolean value. +func (s *Schema) setAdditionalPropertiesBool(v bool) { + if v { + s.AdditionalProperties = json.RawMessage(`true`) + } else { + s.AdditionalProperties = json.RawMessage(`false`) + } +} + +// configFieldDefToSchema converts a ConfigFieldDef to a JSON Schema property. +func configFieldDefToSchema(f ConfigFieldDef) *Schema { + s := &Schema{ + Description: f.Description, + } + if f.DefaultValue != nil { + s.Default = f.DefaultValue + } + switch f.Type { + case FieldTypeString, FieldTypeDuration, FieldTypeFilePath, FieldTypeSQL: + s.Type = "string" + case FieldTypeNumber: + s.Type = "number" + case FieldTypeBool: + s.Type = "boolean" + case FieldTypeSelect: + s.Type = "string" + if len(f.Options) > 0 { + s.Enum = f.Options + } + case FieldTypeArray: + s.Type = "array" + if f.ArrayItemType != "" { + s.Items = &Schema{Type: f.ArrayItemType} + } else { + s.Items = &Schema{Type: "string"} + } + case FieldTypeMap, FieldTypeJSON: + s.Type = "object" + default: + s.Type = "string" + } + return s } // coreModuleTypes is the hardcoded list of built-in module type identifiers @@ -319,13 +368,49 @@ func KnownWorkflowTypes() []string { return result } +// moduleIfThen builds an if/then conditional schema for a specific module type +// that adds per-type config property validation. +func moduleIfThen(moduleType string, ms *ModuleSchema) *Schema { + props := make(map[string]*Schema, len(ms.ConfigFields)) + required := make([]string, 0) + for i := range ms.ConfigFields { + f := &ms.ConfigFields[i] + props[f.Key] = configFieldDefToSchema(*f) + if f.Required { + required = append(required, f.Key) + } + } + configSchema := &Schema{ + Type: "object", + Properties: props, + } + configSchema.setAdditionalPropertiesBool(false) + if len(required) > 0 { + configSchema.Required = required + } + then := &Schema{ + Properties: map[string]*Schema{ + "config": configSchema, + }, + } + return &Schema{ + If: &Schema{ + Required: []string{"type"}, + Properties: map[string]*Schema{ + "type": {Enum: []string{moduleType}}, + }, + }, + Then: then, + } +} + // GenerateWorkflowSchema produces the full JSON Schema describing a valid // WorkflowConfig YAML file. func GenerateWorkflowSchema() *Schema { - f := false one := 1 + reg := NewModuleSchemaRegistry() - moduleConfigSchema := &Schema{ + moduleBase := &Schema{ Type: "object", Required: []string{"name", "type"}, Properties: map[string]*Schema{ @@ -337,7 +422,7 @@ func GenerateWorkflowSchema() *Schema { "type": { Type: "string", Description: "Module type identifier (built-in or plugin-provided)", - Enum: NewModuleSchemaRegistry().Types(), + Enum: reg.Types(), }, "config": { Type: "object", @@ -353,10 +438,109 @@ func GenerateWorkflowSchema() *Schema { Description: "Branch configuration for conditional routing", }, }, - AdditionalP: &f, } + moduleBase.setAdditionalPropertiesBool(false) - return &Schema{ + // Build if/then conditionals per registered module type. + allOf := make([]*Schema, 0, len(reg.schemas)) + types := reg.Types() + for _, t := range types { + ms := reg.Get(t) + if ms == nil || len(ms.ConfigFields) == 0 { + continue + } + allOf = append(allOf, moduleIfThen(t, ms)) + } + if len(allOf) > 0 { + moduleBase.AllOf = allOf + } + + // Step schema — type enum built from KnownStepTypes. + stepTypes := KnownStepTypes() + stepTypeEnum := make([]string, 0, len(stepTypes)) + for t := range stepTypes { + stepTypeEnum = append(stepTypeEnum, t) + } + sort.Strings(stepTypeEnum) + + stepSchema := &Schema{ + Type: "object", + Required: []string{"type"}, + Properties: map[string]*Schema{ + "type": { + Type: "string", + Description: "Step type identifier", + Enum: stepTypeEnum, + }, + "name": {Type: "string", Description: "Step name (used to reference output in later steps)"}, + "config": { + Type: "object", + Description: "Step-specific configuration", + }, + "dependsOn": { + Type: "array", + Items: &Schema{Type: "string"}, + }, + }, + } + + // Build per-step if/then config conditionals from the registry. + // TODO: register step config field schemas in ModuleSchemaRegistry so these + // conditionals can enforce per-step config shapes (similar to module types). + stepAllOf := make([]*Schema, 0) + for _, t := range stepTypeEnum { + ms := reg.Get(t) + if ms == nil || len(ms.ConfigFields) == 0 { + continue + } + stepAllOf = append(stepAllOf, moduleIfThen(t, ms)) + } + if len(stepAllOf) > 0 { + stepSchema.AllOf = stepAllOf + } + + // Trigger schema — KnownTriggerTypes() returns a sorted []string. + triggerEnum := KnownTriggerTypes() + + triggerSchema := &Schema{ + Type: "object", + Description: "Trigger configurations keyed by trigger type", + Properties: map[string]*Schema{}, + } + for _, t := range triggerEnum { + triggerSchema.Properties[t] = &Schema{ + Type: "object", + Description: "Configuration for the " + t + " trigger", + } + } + triggerSchema.setAdditionalPropertiesBool(false) + + // Pipeline schema. + pipelineSchema := &Schema{ + Type: "object", + Description: "Named pipeline definitions", + Properties: map[string]*Schema{ + "trigger": { + Type: "object", + Description: "Inline trigger definition for this pipeline", + Properties: map[string]*Schema{ + "type": { + Type: "string", + Description: "Trigger type", + Enum: triggerEnum, + }, + "config": {Type: "object", Description: "Trigger-specific configuration"}, + }, + }, + "steps": { + Type: "array", + Description: "Ordered list of pipeline steps", + Items: stepSchema, + }, + }, + } + + root := &Schema{ Schema: "https://json-schema.org/draft/2020-12/schema", Title: "Workflow Configuration", Description: "Schema for GoCodeAlone/workflow engine YAML configuration files", @@ -366,16 +550,91 @@ func GenerateWorkflowSchema() *Schema { "modules": { Type: "array", Description: "List of module definitions to instantiate", - Items: moduleConfigSchema, + Items: moduleBase, MinItems: &one, }, "workflows": { Type: "object", Description: "Workflow handler configurations keyed by workflow type (e.g. http, messaging, statemachine, scheduler, integration)", }, - "triggers": { + "triggers": triggerSchema, + "pipelines": buildPipelinesSchema(pipelineSchema), + "imports": { + Type: "array", + Description: "List of external config files to import", + Items: &Schema{Type: "string"}, + }, + "requires": { + Type: "object", + Description: "Plugin dependency declarations", + Properties: map[string]*Schema{ + "plugins": { + Type: "array", + Items: &Schema{Type: "string"}, + }, + "version": {Type: "string", Description: "Minimum engine version"}, + }, + }, + "platform": { + Type: "object", + Description: "Platform-level configuration (kubernetes, cloud, etc.)", + }, + }, + } + + return root +} + +// KnownStepTypes returns all step type identifiers derived from KnownModuleTypes +// by filtering for types with the "step." prefix. This ensures the set is always +// complete and consistent with the module type registry. +func KnownStepTypes() map[string]bool { + all := KnownModuleTypes() + result := make(map[string]bool, 64) + for _, t := range all { + if len(t) > 5 && t[:5] == "step." { + result[t] = true + } + } + return result +} + +// buildPipelinesSchema constructs the pipelines object schema using +// AdditionalProperties so that any pipeline name (arbitrary string key) is +// validated against pipelineSchema rather than creating a literal "*" property. +func buildPipelinesSchema(pipelineSchema *Schema) *Schema { + raw, err := json.Marshal(pipelineSchema) + if err != nil { + // Fallback: allow any object if marshal fails (should never happen). + s := &Schema{ + Type: "object", + Description: "Named pipeline definitions", + } + s.setAdditionalPropertiesBool(true) + return s + } + return &Schema{ + Type: "object", + Description: "Named pipeline definitions", + AdditionalProperties: json.RawMessage(raw), + } +} + +// GenerateApplicationSchema produces a JSON Schema for application-level configs. +func GenerateApplicationSchema() *Schema { + workflowSchema := GenerateWorkflowSchema() + return &Schema{ + Schema: "https://json-schema.org/draft/2020-12/schema", + Title: "Application Configuration", + Description: "Schema for GoCodeAlone/workflow application-level YAML configuration files", + Type: "object", + Properties: map[string]*Schema{ + "name": {Type: "string", Description: "Application name"}, + "version": {Type: "string", Description: "Application version"}, + "engine": workflowSchema, + "services": { Type: "object", - Description: "Trigger configurations keyed by trigger type (e.g. http, schedule, event, eventbus)", + Description: "Named service configurations", }, }, } diff --git a/schema/schema_enhanced_test.go b/schema/schema_enhanced_test.go new file mode 100644 index 00000000..3c3d146d --- /dev/null +++ b/schema/schema_enhanced_test.go @@ -0,0 +1,175 @@ +package schema + +import ( + "encoding/json" + "strings" + "testing" +) + +// TestGenerateWorkflowSchema_ValidJSON verifies the schema produces valid JSON. +func TestGenerateWorkflowSchema_ValidJSON(t *testing.T) { + s := GenerateWorkflowSchema() + data, err := json.MarshalIndent(s, "", " ") + if err != nil { + t.Fatalf("failed to marshal schema: %v", err) + } + if len(data) == 0 { + t.Fatal("schema JSON is empty") + } + var m map[string]any + if err := json.Unmarshal(data, &m); err != nil { + t.Fatalf("invalid JSON: %v", err) + } +} + +// TestGenerateWorkflowSchema_HTTPServerIfThen checks that the http.server module +// type has an if/then conditional with config field validation. +func TestGenerateWorkflowSchema_HTTPServerIfThen(t *testing.T) { + s := GenerateWorkflowSchema() + modules := s.Properties["modules"] + if modules == nil { + t.Fatal("modules property missing") + } + items := modules.Items + if items == nil { + t.Fatal("modules items missing") + } + + // Find the http.server if/then in allOf. + var httpServerThen *Schema + for _, cond := range items.AllOf { + if cond.If == nil { + continue + } + typeProp, ok := cond.If.Properties["type"] + if !ok { + continue + } + for _, e := range typeProp.Enum { + if e == "http.server" { + httpServerThen = cond.Then + break + } + } + } + + if httpServerThen == nil { + t.Fatal("http.server if/then not found in allOf") + } + + configProp := httpServerThen.Properties["config"] + if configProp == nil { + t.Fatal("http.server then.config missing") + } + + addressProp := configProp.Properties["address"] + if addressProp == nil { + t.Fatal("http.server config.address schema missing") + } + if addressProp.Type != "string" { + t.Errorf("address type should be string, got %q", addressProp.Type) + } +} + +// TestGenerateWorkflowSchema_PipelinesSection checks the pipelines section. +func TestGenerateWorkflowSchema_PipelinesSection(t *testing.T) { + s := GenerateWorkflowSchema() + pipelines := s.Properties["pipelines"] + if pipelines == nil { + t.Fatal("pipelines property missing") + } + if pipelines.Type != "object" { + t.Errorf("pipelines should be object, got %q", pipelines.Type) + } +} + +// TestGenerateWorkflowSchema_RequiresSection checks the requires section. +func TestGenerateWorkflowSchema_RequiresSection(t *testing.T) { + s := GenerateWorkflowSchema() + requires := s.Properties["requires"] + if requires == nil { + t.Fatal("requires property missing") + } + if requires.Type != "object" { + t.Errorf("requires should be object, got %q", requires.Type) + } + if requires.Properties["plugins"] == nil { + t.Error("requires.plugins missing") + } + if requires.Properties["version"] == nil { + t.Error("requires.version missing") + } +} + +// TestGenerateWorkflowSchema_ImportsSection checks the imports section. +func TestGenerateWorkflowSchema_ImportsSection(t *testing.T) { + s := GenerateWorkflowSchema() + imports := s.Properties["imports"] + if imports == nil { + t.Fatal("imports property missing") + } + if imports.Type != "array" { + t.Errorf("imports should be array, got %q", imports.Type) + } + if imports.Items == nil || imports.Items.Type != "string" { + t.Error("imports items should be string") + } +} + +// TestGenerateApplicationSchema checks the application schema top-level structure. +func TestGenerateApplicationSchema(t *testing.T) { + s := GenerateApplicationSchema() + if s.Schema != "https://json-schema.org/draft/2020-12/schema" { + t.Errorf("unexpected schema URI: %q", s.Schema) + } + if s.Properties["engine"] == nil { + t.Error("application schema should have engine property") + } + data, err := json.Marshal(s) + if err != nil { + t.Fatalf("failed to marshal application schema: %v", err) + } + if !strings.Contains(string(data), "modules") { + t.Error("application schema JSON should contain modules") + } +} + +// TestConfigFieldDefToSchema checks conversion of ConfigFieldDef to Schema. +func TestConfigFieldDefToSchema(t *testing.T) { + cases := []struct { + def ConfigFieldDef + wantType string + }{ + {ConfigFieldDef{Type: FieldTypeString}, "string"}, + {ConfigFieldDef{Type: FieldTypeNumber}, "number"}, + {ConfigFieldDef{Type: FieldTypeBool}, "boolean"}, + {ConfigFieldDef{Type: FieldTypeArray, ArrayItemType: "string"}, "array"}, + {ConfigFieldDef{Type: FieldTypeMap}, "object"}, + {ConfigFieldDef{Type: FieldTypeJSON}, "object"}, + {ConfigFieldDef{Type: FieldTypeDuration}, "string"}, + {ConfigFieldDef{Type: FieldTypeFilePath}, "string"}, + {ConfigFieldDef{Type: FieldTypeSQL}, "string"}, + {ConfigFieldDef{Type: FieldTypeSelect, Options: []string{"a", "b"}}, "string"}, + } + + for _, tc := range cases { + s := configFieldDefToSchema(tc.def) + if s.Type != tc.wantType { + t.Errorf("field type %q: got schema type %q, want %q", tc.def.Type, s.Type, tc.wantType) + } + } +} + +// TestKnownStepTypes checks the schema package's step type set. +func TestKnownStepTypes(t *testing.T) { + types := KnownStepTypes() + if len(types) == 0 { + t.Fatal("no step types returned") + } + // Spot-check some core types. + for _, expected := range []string{"step.set", "step.http_call", "step.json_response", "step.validate"} { + if !types[expected] { + t.Errorf("missing step type %q", expected) + } + } +} diff --git a/schema/snippets.go b/schema/snippets.go new file mode 100644 index 00000000..fccd259c --- /dev/null +++ b/schema/snippets.go @@ -0,0 +1,447 @@ +package schema + +// Snippet represents a code snippet for IDE support. +// Body uses ${N:placeholder} VSCode-style syntax. +type Snippet struct { + Name string // human-readable name + Prefix string // trigger prefix / keyword + Description string // short description shown in IDE + Body []string // lines of the snippet body +} + +// GetSnippets returns the canonical set of workflow configuration snippets. +func GetSnippets() []Snippet { + return []Snippet{ + // --------------------------------------------------------------- + // Module snippets (10) + // --------------------------------------------------------------- + { + Name: "HTTP Server Module", + Prefix: "mod-http-server", + Description: "HTTP server module listening on a configurable address", + Body: []string{ + "- name: ${1:server}", + " type: http.server", + " config:", + " address: ${2::8080}", + }, + }, + { + Name: "HTTP Router Module", + Prefix: "mod-http-router", + Description: "HTTP router module that dispatches requests to handlers", + Body: []string{ + "- name: ${1:router}", + " type: http.router", + " dependsOn:", + " - ${2:server}", + }, + }, + { + Name: "SQLite Storage Module", + Prefix: "mod-sqlite", + Description: "SQLite database storage module", + Body: []string{ + "- name: ${1:db}", + " type: storage.sqlite", + " config:", + " dbPath: ${2:data/app.db}", + " walMode: ${3:true}", + }, + }, + { + Name: "NoSQL Memory Module", + Prefix: "mod-nosql", + Description: "In-memory NoSQL document store", + Body: []string{ + "- name: ${1:store}", + " type: nosql.memory", + " config:", + " collection: ${2:documents}", + }, + }, + { + Name: "JWT Auth Module", + Prefix: "mod-jwt", + Description: "JWT authentication module", + Body: []string{ + "- name: ${1:auth}", + " type: auth.jwt", + " config:", + " secret: ${2:my-secret-key}", + " tokenExpiry: ${3:24h}", + " issuer: ${4:my-app}", + }, + }, + { + Name: "Messaging Broker Module", + Prefix: "mod-broker", + Description: "In-process messaging broker for pub/sub", + Body: []string{ + "- name: ${1:broker}", + " type: messaging.broker", + " config:", + " maxQueueSize: ${2:1000}", + }, + }, + { + Name: "State Machine Module", + Prefix: "mod-statemachine", + Description: "State machine engine for workflow orchestration", + Body: []string{ + "- name: ${1:state-engine}", + " type: statemachine.engine", + " config:", + " maxInstances: ${2:100}", + " instanceTTL: ${3:24h}", + }, + }, + { + Name: "OTEL Observability Module", + Prefix: "mod-otel", + Description: "OpenTelemetry observability module", + Body: []string{ + "- name: ${1:otel}", + " type: observability.otel", + " config:", + " endpoint: ${2:localhost:4317}", + " serviceName: ${3:my-service}", + }, + }, + { + Name: "Cache Modular Module", + Prefix: "mod-cache", + Description: "Modular cache adapter", + Body: []string{ + "- name: ${1:cache}", + " type: cache.modular", + }, + }, + { + Name: "Secrets Vault Module", + Prefix: "mod-secrets", + Description: "HashiCorp Vault secrets module", + Body: []string{ + "- name: ${1:secrets}", + " type: secrets.vault", + " config:", + " mode: ${2:dev}", + " address: ${3:http://localhost:8200}", + " token: ${4:root}", + " mountPath: ${5:secret}", + }, + }, + + // --------------------------------------------------------------- + // Pipeline scaffold (1) + // --------------------------------------------------------------- + { + Name: "Pipeline Scaffold", + Prefix: "pipeline", + Description: "Full pipeline definition with trigger and steps", + Body: []string{ + "${1:my-pipeline}:", + " trigger:", + " type: ${2:http}", + " config:", + " path: ${3:/api/v1/resource}", + " method: ${4:POST}", + " steps:", + " - type: step.validate", + " config:", + " required:", + " - ${5:field}", + " - type: step.json_response", + " config:", + " status: ${6:200}", + " body: ${7:{ \"ok\": true }}", + }, + }, + + // --------------------------------------------------------------- + // Step snippets (12+) + // --------------------------------------------------------------- + { + Name: "Step: Set Variable", + Prefix: "step-set", + Description: "Set a named variable in the pipeline context", + Body: []string{ + "- type: step.set", + " config:", + " values:", + " ${1:key}: ${2:value}", + }, + }, + { + Name: "Step: HTTP Call", + Prefix: "step-http-call", + Description: "Make an outbound HTTP request", + Body: []string{ + "- type: step.http_call", + " config:", + " url: ${1:https://api.example.com/endpoint}", + " method: ${2:POST}", + " headers:", + " Content-Type: application/json", + " body: ${3:{{ json . }}}", + " timeout: ${4:30s}", + }, + }, + { + Name: "Step: JSON Response", + Prefix: "step-json-response", + Description: "Return a JSON HTTP response", + Body: []string{ + "- type: step.json_response", + " config:", + " status: ${1:200}", + " body:", + " ${2:message}: ${3:ok}", + }, + }, + { + Name: "Step: Validate", + Prefix: "step-validate", + Description: "Validate request data against rules", + Body: []string{ + "- type: step.validate", + " config:", + " required:", + " - ${1:field1}", + " rules:", + " ${2:field1}: ${3:string}", + }, + }, + { + Name: "Step: Transform", + Prefix: "step-transform", + Description: "Transform data using a mapping template", + Body: []string{ + "- type: step.transform", + " config:", + " mapping:", + " ${1:output_field}: ${2:{{ .input_field }}}", + }, + }, + { + Name: "Step: DB Query", + Prefix: "step-db-query", + Description: "Execute a read-only SQL query", + Body: []string{ + "- type: step.db_query", + " config:", + " database: ${1:db}", + " query: ${2:SELECT * FROM ${3:table} WHERE id = ?}", + " params:", + " - ${4:{{ .id }}}", + }, + }, + { + Name: "Step: DB Exec", + Prefix: "step-db-exec", + Description: "Execute a SQL write statement", + Body: []string{ + "- type: step.db_exec", + " config:", + " database: ${1:db}", + " query: ${2:INSERT INTO ${3:table} (col) VALUES (?)}", + " params:", + " - ${4:{{ .value }}}", + }, + }, + { + Name: "Step: Auth Required", + Prefix: "step-auth", + Description: "Require authentication and optional role/scope", + Body: []string{ + "- type: step.auth_required", + " config:", + " roles:", + " - ${1:admin}", + }, + }, + { + Name: "Step: Cache Get", + Prefix: "step-cache-get", + Description: "Get a value from the cache", + Body: []string{ + "- type: step.cache_get", + " config:", + " cache: ${1:cache}", + " key: ${2:{{ .id }}}", + " output: ${3:cached_result}", + }, + }, + { + Name: "Step: Cache Set", + Prefix: "step-cache-set", + Description: "Store a value in the cache with optional TTL", + Body: []string{ + "- type: step.cache_set", + " config:", + " cache: ${1:cache}", + " key: ${2:{{ .id }}}", + " value: ${3:{{ .result }}}", + " ttl: ${4:5m}", + }, + }, + { + Name: "Step: Event Publish", + Prefix: "step-event-publish", + Description: "Publish an event to the message broker", + Body: []string{ + "- type: step.event_publish", + " config:", + " broker: ${1:broker}", + " topic: ${2:events.created}", + " event_type: ${3:resource.created}", + " payload: ${4:{{ json . }}}", + }, + }, + { + Name: "Step: Log", + Prefix: "step-log", + Description: "Log a message at the specified level", + Body: []string{ + "- type: step.log", + " config:", + " level: ${1:info}", + " message: ${2:Processing request: {{ .id }}}", + }, + }, + + // --------------------------------------------------------------- + // Trigger snippets (3) + // --------------------------------------------------------------- + { + Name: "Trigger: HTTP", + Prefix: "trigger-http", + Description: "HTTP trigger for a specific path and method", + Body: []string{ + "trigger:", + " type: http", + " config:", + " path: ${1:/api/v1/${2:resource}}", + " method: ${3:GET}", + }, + }, + { + Name: "Trigger: Schedule", + Prefix: "trigger-schedule", + Description: "Cron-based schedule trigger", + Body: []string{ + "trigger:", + " type: schedule", + " config:", + " cron: ${1:0 * * * *}", + " timezone: ${2:UTC}", + }, + }, + { + Name: "Trigger: Event", + Prefix: "trigger-event", + Description: "Event-driven trigger subscribing to a topic", + Body: []string{ + "trigger:", + " type: event", + " config:", + " topic: ${1:events.created}", + " broker: ${2:broker}", + }, + }, + + // --------------------------------------------------------------- + // Workflow snippets (3) + // --------------------------------------------------------------- + { + Name: "Workflow: HTTP", + Prefix: "workflow-http", + Description: "HTTP workflow handler with route definitions", + Body: []string{ + "workflows:", + " http:", + " routes:", + " - path: ${1:/api/v1/${2:resource}}", + " method: ${3:GET}", + " pipeline: ${4:get-resources}", + }, + }, + { + Name: "Workflow: Messaging", + Prefix: "workflow-messaging", + Description: "Messaging workflow handler with topic subscriptions", + Body: []string{ + "workflows:", + " messaging:", + " subscriptions:", + " - topic: ${1:events.created}", + " pipeline: ${2:handle-created}", + }, + }, + { + Name: "Workflow: State Machine", + Prefix: "workflow-statemachine", + Description: "State machine workflow handler", + Body: []string{ + "workflows:", + " statemachine:", + " engine: ${1:state-engine}", + " states:", + " - name: ${2:pending}", + " transitions:", + " - name: ${3:submit}", + " to: ${4:active}", + }, + }, + + // --------------------------------------------------------------- + // Structural snippets (3) + // --------------------------------------------------------------- + { + Name: "App Structure", + Prefix: "app", + Description: "Full application config skeleton", + Body: []string{ + "modules:", + " - name: ${1:server}", + " type: http.server", + " config:", + " address: ${2::8080}", + " - name: ${3:router}", + " type: http.router", + " dependsOn:", + " - ${1:server}", + "", + "workflows:", + " http:", + " routes: []", + "", + "triggers:", + " http:", + " port: ${4:8080}", + }, + }, + { + Name: "Requires Section", + Prefix: "requires", + Description: "Plugin and version dependency declarations", + Body: []string{ + "requires:", + " version: ${1:v0.2.0}", + " plugins:", + " - ${2:storage}", + " - ${3:auth}", + }, + }, + { + Name: "Imports Section", + Prefix: "imports", + Description: "Import external configuration files", + Body: []string{ + "imports:", + " - ${1:config/modules.yaml}", + " - ${2:config/workflows.yaml}", + }, + }, + } +} diff --git a/schema/snippets_export.go b/schema/snippets_export.go new file mode 100644 index 00000000..bfd499ec --- /dev/null +++ b/schema/snippets_export.go @@ -0,0 +1,189 @@ +package schema + +import ( + "encoding/json" + "encoding/xml" + "fmt" + "strings" +) + +// ExportSnippetsVSCode returns all snippets in VSCode snippet JSON format. +// The output can be saved to a .code-snippets file or a language-specific +// snippet file in the .vscode directory. +func ExportSnippetsVSCode() ([]byte, error) { + snippets := GetSnippets() + out := make(map[string]vscodeSnippet, len(snippets)) + for _, s := range snippets { + out[s.Name] = vscodeSnippet{ + Prefix: s.Prefix, + Body: s.Body, + Description: s.Description, + } + } + return json.MarshalIndent(out, "", " ") +} + +type vscodeSnippet struct { + Prefix string `json:"prefix"` + Body []string `json:"body"` + Description string `json:"description,omitempty"` +} + +// jetbrainsTemplateSet is the root XML element for JetBrains live templates. +type jetbrainsTemplateSet struct { + XMLName xml.Name `xml:"templateSet"` + Group string `xml:"group,attr"` + Templates []jetbrainsTemplate `xml:"template"` +} + +type jetbrainsTemplate struct { + Name string `xml:"name,attr"` + Value string `xml:"value,attr"` + Description string `xml:"description,attr"` + ToReformat bool `xml:"toReformat,attr"` + ToShortenFQ bool `xml:"toShortenFQNames,attr"` + Variables []jetbrainsVariable `xml:"variable,omitempty"` + Contexts []jetbrainsContext `xml:"context"` +} + +type jetbrainsVariable struct { + Name string `xml:"name,attr"` + Expression string `xml:"expression,attr"` + DefaultValue string `xml:"defaultValue,attr"` + AlwaysStop bool `xml:"alwaysStopAt,attr"` +} + +type jetbrainsContext struct { + Options []jetbrainsOption `xml:"option"` +} + +type jetbrainsOption struct { + Name string `xml:"name,attr"` + Value string `xml:"value,attr"` +} + +// ExportSnippetsJetBrains returns all snippets in JetBrains live template XML format. +// The output can be saved to a .xml file in the JetBrains templates directory. +func ExportSnippetsJetBrains() ([]byte, error) { + snippets := GetSnippets() + templates := make([]jetbrainsTemplate, 0, len(snippets)) + + for _, s := range snippets { + body := convertToJetBrainsBody(s.Body) + vars := extractJetBrainsVars(s.Body) + + tmpl := jetbrainsTemplate{ + Name: s.Prefix, + Value: body, + Description: s.Description, + ToReformat: true, + ToShortenFQ: false, + Variables: vars, + Contexts: []jetbrainsContext{ + {Options: []jetbrainsOption{ + {Name: "YAML", Value: "true"}, + }}, + }, + } + templates = append(templates, tmpl) + } + + ts := jetbrainsTemplateSet{ + Group: "workflow", + Templates: templates, + } + + output, err := xml.MarshalIndent(ts, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to marshal JetBrains templates: %w", err) + } + return append([]byte(xml.Header), output...), nil +} + +// convertToJetBrainsBody converts VSCode ${N:placeholder} syntax to +// JetBrains $VAR_N$ syntax and joins lines with (XML attribute newline). +func convertToJetBrainsBody(lines []string) string { + joined := strings.Join(lines, " ") + // Track seen tab-stop indices to assign unique variable names. + // Replace ${N:placeholder} with $SNIPPET_N$ and ${N} with $SNIPPET_N$. + result := strings.Builder{} + rest := joined + for len(rest) > 0 { + idx := strings.Index(rest, "${") + if idx < 0 { + result.WriteString(rest) + break + } + result.WriteString(rest[:idx]) + rest = rest[idx+2:] + // Find the matching closing } + // Handle nested braces (placeholder may contain :) + end := strings.Index(rest, "}") + if end < 0 { + result.WriteString("${") + continue + } + inner := rest[:end] + rest = rest[end+1:] + // inner is like "1:placeholder" or "1" + colon := strings.Index(inner, ":") + var num string + if colon >= 0 { + num = inner[:colon] + } else { + num = inner + } + result.WriteString("$SNIPPET_") + result.WriteString(num) + result.WriteString("$") + } + return result.String() +} + +// extractJetBrainsVars extracts unique tab-stop variables from VSCode snippet body lines. +func extractJetBrainsVars(lines []string) []jetbrainsVariable { + joined := strings.Join(lines, "\n") + seen := make(map[string]string) // num -> default value + order := []string{} + + rest := joined + for len(rest) > 0 { + idx := strings.Index(rest, "${") + if idx < 0 { + break + } + rest = rest[idx+2:] + end := strings.Index(rest, "}") + if end < 0 { + break + } + inner := rest[:end] + rest = rest[end+1:] + + colon := strings.Index(inner, ":") + var num, defVal string + if colon >= 0 { + num = inner[:colon] + defVal = inner[colon+1:] + } else { + num = inner + defVal = "" + } + + if _, exists := seen[num]; !exists { + seen[num] = defVal + order = append(order, num) + } + } + + vars := make([]jetbrainsVariable, 0, len(order)) + for _, num := range order { + vars = append(vars, jetbrainsVariable{ + Name: "SNIPPET_" + num, + Expression: "", + DefaultValue: `"` + seen[num] + `"`, + AlwaysStop: true, + }) + } + return vars +} diff --git a/schema/snippets_test.go b/schema/snippets_test.go new file mode 100644 index 00000000..1471dce4 --- /dev/null +++ b/schema/snippets_test.go @@ -0,0 +1,128 @@ +package schema + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestGetSnippets_Count(t *testing.T) { + snips := GetSnippets() + if len(snips) < 20 { + t.Errorf("expected at least 20 snippets, got %d", len(snips)) + } +} + +func TestGetSnippets_NonEmptyFields(t *testing.T) { + snips := GetSnippets() + for _, s := range snips { + if s.Name == "" { + t.Errorf("snippet has empty Name: %+v", s) + } + if s.Prefix == "" { + t.Errorf("snippet %q has empty Prefix", s.Name) + } + if len(s.Body) == 0 { + t.Errorf("snippet %q has empty Body", s.Name) + } + } +} + +func TestGetSnippets_HasModuleSnippets(t *testing.T) { + snips := GetSnippets() + prefixes := make(map[string]bool, len(snips)) + for _, s := range snips { + prefixes[s.Prefix] = true + } + required := []string{ + "mod-http-server", + "mod-jwt", + "mod-broker", + "mod-statemachine", + "pipeline", + "step-set", + "step-http-call", + "step-json-response", + "trigger-http", + "workflow-http", + "app", + } + for _, p := range required { + if !prefixes[p] { + t.Errorf("missing snippet with prefix %q", p) + } + } +} + +func TestExportSnippetsVSCode_ValidJSON(t *testing.T) { + data, err := ExportSnippetsVSCode() + if err != nil { + t.Fatalf("VSCode export failed: %v", err) + } + if len(data) == 0 { + t.Fatal("VSCode export is empty") + } + var m map[string]any + if err := json.Unmarshal(data, &m); err != nil { + t.Fatalf("VSCode export is not valid JSON: %v", err) + } + if len(m) == 0 { + t.Fatal("VSCode export has no snippets") + } + // Each entry should have prefix and body. + for name, v := range m { + entry, ok := v.(map[string]any) + if !ok { + t.Errorf("snippet %q is not an object", name) + continue + } + if entry["prefix"] == nil { + t.Errorf("snippet %q missing prefix", name) + } + if entry["body"] == nil { + t.Errorf("snippet %q missing body", name) + } + } +} + +func TestExportSnippetsJetBrains_ValidXML(t *testing.T) { + data, err := ExportSnippetsJetBrains() + if err != nil { + t.Fatalf("JetBrains export failed: %v", err) + } + if len(data) == 0 { + t.Fatal("JetBrains export is empty") + } + // Should be valid XML starting with XML declaration. + s := string(data) + if !strings.Contains(s, "") + } + if !strings.Contains(s, " elements") + } +} + +func TestExportSnippetsVSCode_BodyIsStringArray(t *testing.T) { + data, _ := ExportSnippetsVSCode() + var m map[string]map[string]any + if err := json.Unmarshal(data, &m); err != nil { + t.Fatalf("parse failed: %v", err) + } + for name, entry := range m { + body, ok := entry["body"] + if !ok { + continue + } + arr, ok := body.([]any) + if !ok { + t.Errorf("snippet %q body should be array, got %T", name, body) + continue + } + for i, item := range arr { + if _, ok := item.(string); !ok { + t.Errorf("snippet %q body[%d] should be string, got %T", name, i, item) + } + } + } +}