Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 33 additions & 2 deletions e2e/helpers/scenario-harness.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ interface ScenarioResult {

const tsxCliPath = createRequire(import.meta.url).resolve("tsx/cli");
const DENO_COMMAND = process.platform === "win32" ? "deno.exe" : "deno";
const MISE_COMMAND = process.platform === "win32" ? "mise.exe" : "mise";
const DEFAULT_SCENARIO_TIMEOUT_MS = 15_000;
const HELPERS_DIR = path.dirname(fileURLToPath(import.meta.url));
const REPO_ROOT = path.resolve(HELPERS_DIR, "../..");
Expand Down Expand Up @@ -285,6 +286,37 @@ async function runProcess(
});
}

function isSpawnEnoent(error: unknown): boolean {
return (
error instanceof Error &&
"code" in error &&
(error as NodeJS.ErrnoException).code === "ENOENT"
);
}

async function runDenoProcess(
args: string[],
cwd: string,
env: Record<string, string>,
timeoutMs: number,
): Promise<ScenarioResult> {
try {
return await runProcess(DENO_COMMAND, args, cwd, env, timeoutMs);
} catch (error) {
if (!isSpawnEnoent(error)) {
throw error;
}

return await runProcess(
MISE_COMMAND,
["exec", "--", "deno", ...args],
cwd,
env,
timeoutMs,
);
}
}

function resolveEntryPath(scenarioDir: string, entry: string): string {
return path.join(scenarioDir, entry);
}
Expand Down Expand Up @@ -365,8 +397,7 @@ export async function runDenoScenarioDir(options: {
timeoutMs?: number;
}): Promise<ScenarioResult> {
const entry = options.entry ?? "runner.case.ts";
const result = await runProcess(
DENO_COMMAND,
const result = await runDenoProcess(
[
"test",
"--no-check",
Expand Down
1 change: 1 addition & 0 deletions e2e/helpers/scenario-installer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ export async function installScenarioDependencies({
scenarioDir,
"--ignore-workspace",
"--frozen-lockfile",
"--ignore-scripts=false",
"--strict-peer-dependencies=false",
];
if (preferOffline) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "huggingface",
"temperature": 0
},
Expand Down Expand Up @@ -137,7 +137,7 @@
"metadata": {
"created": 0,
"id": "<span:8>",
"model": "llama3.1-8b",
"model": "<model>",
"object": "chat.completion"
},
"output": [
Expand Down Expand Up @@ -225,7 +225,7 @@
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "huggingface",
"temperature": 0
},
Expand Down Expand Up @@ -271,7 +271,7 @@
"metadata": {
"created": 0,
"id": "<span:13>",
"model": "llama3.1-8b",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -361,7 +361,7 @@
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"max_tokens": 64,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0,
"tool_choice": "required",
Expand Down Expand Up @@ -429,7 +429,7 @@
"metadata": {
"created": 0,
"id": "<span:18>",
"model": "llama3.1-8b",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -514,7 +514,7 @@
"metadata": {
"endpointUrl": "https://router.huggingface.co/featherless-ai/v1/completions",
"max_tokens": 4,
"model": "arcee-ai/Trinity-Large-Thinking",
"model": "<model>",
"provider": "huggingface"
},
"metrics": {
Expand Down Expand Up @@ -631,7 +631,7 @@
"log_id": "g",
"metadata": {
"endpointUrl": "https://router.huggingface.co/hf-inference/models/thenlper/gte-large/pipeline/feature-extraction",
"model": "thenlper/gte-large",
"model": "<model>",
"provider": "huggingface"
},
"metrics": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"has_output": true,
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"model": "llama3.1-8b",
"model": "<model>",
"provider": "huggingface"
},
"metric_keys": [
Expand Down Expand Up @@ -80,7 +80,7 @@
"has_output": true,
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"model": "llama3.1-8b",
"model": "<model>",
"provider": "huggingface"
},
"metric_keys": [
Expand Down Expand Up @@ -122,7 +122,7 @@
"has_output": true,
"metadata": {
"endpointUrl": "https://router.huggingface.co",
"model": "llama3.1-8b",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [
Expand Down Expand Up @@ -167,7 +167,7 @@
"metadata": {
"endpointUrl": "https://router.huggingface.co/featherless-ai/v1/completions",
"finish_reason": "length",
"model": "arcee-ai/Trinity-Large-Thinking",
"model": "<model>",
"provider": "huggingface"
},
"metric_keys": [
Expand Down Expand Up @@ -208,7 +208,7 @@
"has_output": true,
"metadata": {
"endpointUrl": "https://router.huggingface.co/hf-inference/models/thenlper/gte-large/pipeline/feature-extraction",
"model": "thenlper/gte-large",
"model": "<model>",
"provider": "huggingface"
},
"metric_keys": [],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0
},
Expand Down Expand Up @@ -133,7 +133,7 @@
"metadata": {
"created": 0,
"id": "<span:8>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion"
},
"output": [
Expand Down Expand Up @@ -220,7 +220,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0
},
Expand Down Expand Up @@ -263,7 +263,7 @@
"metadata": {
"created": 0,
"id": "<span:13>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -352,7 +352,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 64,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0,
"tool_choice": "required",
Expand Down Expand Up @@ -417,7 +417,7 @@
"metadata": {
"created": 0,
"id": "<span:18>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -500,7 +500,7 @@
"input": "The capital of France is",
"log_id": "g",
"metadata": {
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"parameters": {
"do_sample": false,
"max_new_tokens": 4,
Expand Down Expand Up @@ -601,7 +601,7 @@
"input": "The capital of France is",
"log_id": "g",
"metadata": {
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"parameters": {
"do_sample": false,
"max_new_tokens": 4,
Expand Down Expand Up @@ -722,7 +722,7 @@
"input": "Paris France",
"log_id": "g",
"metadata": {
"model": "thenlper/gte-large",
"model": "<model>",
"provider": "hf-inference"
},
"metrics": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
"has_input": true,
"has_output": true,
"metadata": {
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [
Expand Down Expand Up @@ -74,7 +74,7 @@
"has_input": true,
"has_output": true,
"metadata": {
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [
Expand Down Expand Up @@ -111,7 +111,7 @@
"has_input": true,
"has_output": true,
"metadata": {
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [
Expand Down Expand Up @@ -148,7 +148,7 @@
"has_input": true,
"has_output": true,
"metadata": {
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [],
Expand Down Expand Up @@ -183,7 +183,7 @@
"has_output": true,
"metadata": {
"finish_reason": "length",
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"provider": "featherless-ai"
},
"metric_keys": [
Expand Down Expand Up @@ -223,7 +223,7 @@
"has_input": true,
"has_output": true,
"metadata": {
"model": "thenlper/gte-large",
"model": "<model>",
"provider": "hf-inference"
},
"metric_keys": [],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0
},
Expand Down Expand Up @@ -133,7 +133,7 @@
"metadata": {
"created": 0,
"id": "<span:8>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion"
},
"output": [
Expand Down Expand Up @@ -220,7 +220,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 16,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0
},
Expand Down Expand Up @@ -263,7 +263,7 @@
"metadata": {
"created": 0,
"id": "<span:13>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -352,7 +352,7 @@
"log_id": "g",
"metadata": {
"max_tokens": 64,
"model": "meta-llama/Llama-3.1-8B-Instruct",
"model": "<model>",
"provider": "featherless-ai",
"temperature": 0,
"tool_choice": "required",
Expand Down Expand Up @@ -417,7 +417,7 @@
"metadata": {
"created": 0,
"id": "<span:18>",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"model": "<model>",
"object": "chat.completion.chunk"
},
"output": {
Expand Down Expand Up @@ -500,7 +500,7 @@
"input": "The capital of France is",
"log_id": "g",
"metadata": {
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"parameters": {
"do_sample": false,
"max_new_tokens": 4,
Expand Down Expand Up @@ -601,7 +601,7 @@
"input": "The capital of France is",
"log_id": "g",
"metadata": {
"model": "meta-llama/Llama-3.1-8B",
"model": "<model>",
"parameters": {
"do_sample": false,
"max_new_tokens": 4,
Expand Down Expand Up @@ -722,7 +722,7 @@
"input": "Paris France",
"log_id": "g",
"metadata": {
"model": "thenlper/gte-large",
"model": "<model>",
"provider": "hf-inference"
},
"metrics": {
Expand Down
Loading
Loading