Skip to content

Commit bdc4829

Browse files
committed
test(e2e): add SpiritLM backend e2e tests
- Add tests/e2e/spiritlm_e2e_test.go with chat, TTS, and transcription specs - Register spiritlm mock backend and spirit-lm-base-7b model in e2e_suite_test.go - Add make test-e2e-spiritlm target; fix protogen-go PATH for protoc plugins - Update backend/python/spiritlm/E2E.md with tests/e2e coverage and run instructions Signed-off-by: mkdev11 <MkDev11@users.noreply.github.com>
1 parent eb600a8 commit bdc4829

4 files changed

Lines changed: 139 additions & 2 deletions

File tree

Makefile

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,10 @@ test-e2e: build-mock-backend prepare-e2e run-e2e-image
213213
$(MAKE) teardown-e2e
214214
docker rmi localai-tests
215215

216+
test-e2e-spiritlm: build-mock-backend
217+
@echo 'Running SpiritLM e2e tests (mock backend)'
218+
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="SpiritLM" --flake-attempts $(TEST_FLAKES) -v ./tests/e2e/...
219+
216220
teardown-e2e:
217221
rm -rf $(TEST_DIR) || true
218222
docker stop $$(docker ps -q --filter ancestor=localai-tests)
@@ -302,7 +306,7 @@ protoc:
302306
.PHONY: protogen-go
303307
protogen-go: protoc install-go-tools
304308
mkdir -p pkg/grpc/proto
305-
./protoc --experimental_allow_proto3_optional -Ibackend/ --go_out=pkg/grpc/proto/ --go_opt=paths=source_relative --go-grpc_out=pkg/grpc/proto/ --go-grpc_opt=paths=source_relative \
309+
PATH="$$(go env GOPATH)/bin:$$PATH" ./protoc --experimental_allow_proto3_optional -Ibackend/ --go_out=pkg/grpc/proto/ --go_opt=paths=source_relative --go-grpc_out=pkg/grpc/proto/ --go-grpc_opt=paths=source_relative \
306310
backend/backend.proto
307311

308312
.PHONY: protogen-go-clean

backend/python/spiritlm/E2E.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,28 @@
11
# SpiritLM E2E tests
22

3-
End-to-end tests for the SpiritLM backend live in `core/http/app_test.go` under the context **SpiritLM backend e2e** (label: `spiritlm`).
3+
SpiritLM is covered by two test layers:
4+
5+
1. **`tests/e2e/` (recommended for CI)** – Full e2e suite using the shared mock backend. File: `tests/e2e/spiritlm_e2e_test.go`, label: `SpiritLM`. No real SpiritLM backend or model required.
6+
2. **`core/http/app_test.go`** – Integration-style tests under context **SpiritLM backend e2e** (label: `spiritlm`). Requires the Python SpiritLM backend and fixtures.
47

58
## How to run
69

710
From the repo root:
811

12+
**E2E suite (mock backend, no Python backend needed):**
13+
14+
```bash
15+
make test-e2e-spiritlm
16+
```
17+
18+
Or run the full e2e suite (includes SpiritLM):
19+
20+
```bash
21+
make test-e2e
22+
```
23+
24+
**Integration tests (real SpiritLM backend):**
25+
926
```bash
1027
make test-spiritlm
1128
```

tests/e2e/e2e_suite_test.go

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,18 @@ var _ = BeforeSuite(func() {
9999
Expect(err).ToNot(HaveOccurred())
100100
Expect(os.WriteFile(configPath, configYAML, 0644)).To(Succeed())
101101

102+
// SpiritLM-style model config (same mock backend, for e2e coverage of SpiritLM path)
103+
spiritlmConfig := map[string]interface{}{
104+
"name": "spirit-lm-base-7b",
105+
"backend": "spiritlm",
106+
"parameters": map[string]interface{}{
107+
"model": "spirit-lm-base-7b",
108+
},
109+
}
110+
spiritlmConfigYAML, err := yaml.Marshal(spiritlmConfig)
111+
Expect(err).ToNot(HaveOccurred())
112+
Expect(os.WriteFile(filepath.Join(modelsPath, "spirit-lm-base-7b.yaml"), spiritlmConfigYAML, 0644)).To(Succeed())
113+
102114
// Set up system state
103115
systemState, err := system.GetSystemState(
104116
system.WithBackendPath(backendPath),
@@ -122,6 +134,7 @@ var _ = BeforeSuite(func() {
122134

123135
// Register backend with application's model loader
124136
application.ModelLoader().SetExternalBackend("mock-backend", mockBackendPath)
137+
application.ModelLoader().SetExternalBackend("spiritlm", mockBackendPath)
125138

126139
// Create HTTP app
127140
app, err = httpapi.API(application)

tests/e2e/spiritlm_e2e_test.go

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
package e2e_test
2+
3+
import (
4+
"bytes"
5+
"context"
6+
"encoding/binary"
7+
"io"
8+
"mime/multipart"
9+
"net/http"
10+
"strings"
11+
"time"
12+
13+
. "github.com/onsi/ginkgo/v2"
14+
. "github.com/onsi/gomega"
15+
"github.com/openai/openai-go/v3"
16+
)
17+
18+
var _ = Describe("SpiritLM backend E2E", Label("SpiritLM"), func() {
19+
Describe("Chat completions", func() {
20+
It("returns response for spirit-lm-base-7b", func() {
21+
resp, err := client.Chat.Completions.New(
22+
context.TODO(),
23+
openai.ChatCompletionNewParams{
24+
Model: "spirit-lm-base-7b",
25+
Messages: []openai.ChatCompletionMessageParamUnion{
26+
openai.UserMessage("Say hello."),
27+
},
28+
},
29+
)
30+
Expect(err).ToNot(HaveOccurred())
31+
Expect(len(resp.Choices)).To(Equal(1))
32+
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
33+
})
34+
})
35+
36+
Describe("TTS", func() {
37+
It("returns audio for spirit-lm-base-7b", func() {
38+
body := `{"model":"spirit-lm-base-7b","input":"Hello","voice":"default"}`
39+
req, err := http.NewRequest("POST", apiURL+"/audio/speech", io.NopCloser(strings.NewReader(body)))
40+
Expect(err).ToNot(HaveOccurred())
41+
req.Header.Set("Content-Type", "application/json")
42+
43+
httpClient := &http.Client{Timeout: 30 * time.Second}
44+
resp, err := httpClient.Do(req)
45+
Expect(err).ToNot(HaveOccurred())
46+
defer resp.Body.Close()
47+
Expect(resp.StatusCode).To(Equal(200))
48+
Expect(resp.Header.Get("Content-Type")).To(HavePrefix("audio/"))
49+
data, err := io.ReadAll(resp.Body)
50+
Expect(err).ToNot(HaveOccurred())
51+
Expect(len(data)).To(BeNumerically(">", 0))
52+
})
53+
})
54+
55+
Describe("Transcription", func() {
56+
It("returns transcription for spirit-lm-base-7b", func() {
57+
var buf bytes.Buffer
58+
w := multipart.NewWriter(&buf)
59+
part, err := w.CreateFormFile("file", "audio.wav")
60+
Expect(err).ToNot(HaveOccurred())
61+
_, _ = part.Write(minimalWAVBytes())
62+
_ = w.WriteField("model", "spirit-lm-base-7b")
63+
Expect(w.Close()).To(Succeed())
64+
65+
req, err := http.NewRequest("POST", apiURL+"/audio/transcriptions", &buf)
66+
Expect(err).ToNot(HaveOccurred())
67+
req.Header.Set("Content-Type", w.FormDataContentType())
68+
69+
httpClient := &http.Client{Timeout: 30 * time.Second}
70+
resp, err := httpClient.Do(req)
71+
Expect(err).ToNot(HaveOccurred())
72+
defer resp.Body.Close()
73+
Expect(resp.StatusCode).To(Equal(200))
74+
data, err := io.ReadAll(resp.Body)
75+
Expect(err).ToNot(HaveOccurred())
76+
Expect(string(data)).To(ContainSubstring("mocked"))
77+
})
78+
})
79+
})
80+
81+
func minimalWAVBytes() []byte {
82+
const sampleRate = 16000
83+
const numChannels = 1
84+
const bitsPerSample = 16
85+
const numSamples = 160
86+
dataSize := numSamples * numChannels * (bitsPerSample / 8)
87+
headerLen := 44
88+
var buf bytes.Buffer
89+
buf.Write([]byte("RIFF"))
90+
_ = binary.Write(&buf, binary.LittleEndian, uint32(headerLen-8+dataSize))
91+
buf.Write([]byte("WAVEfmt "))
92+
_ = binary.Write(&buf, binary.LittleEndian, uint32(16))
93+
_ = binary.Write(&buf, binary.LittleEndian, uint16(1))
94+
_ = binary.Write(&buf, binary.LittleEndian, uint16(numChannels))
95+
_ = binary.Write(&buf, binary.LittleEndian, uint32(sampleRate))
96+
_ = binary.Write(&buf, binary.LittleEndian, uint32(sampleRate*numChannels*(bitsPerSample/8)))
97+
_ = binary.Write(&buf, binary.LittleEndian, uint16(numChannels*(bitsPerSample/8)))
98+
_ = binary.Write(&buf, binary.LittleEndian, uint16(bitsPerSample))
99+
buf.Write([]byte("data"))
100+
_ = binary.Write(&buf, binary.LittleEndian, uint32(dataSize))
101+
buf.Write(make([]byte, dataSize))
102+
return buf.Bytes()
103+
}

0 commit comments

Comments
 (0)