diff --git a/src/api/file.ts b/src/api/file.ts index 6f2c2e7..f16f17f 100644 --- a/src/api/file.ts +++ b/src/api/file.ts @@ -1,15 +1,20 @@ import { jsonResponse } from './utils' export const createFileApi = (fetcher: typeof fetch) => ({ - uploadFile: async (file: Blob, filename: string) => { + uploadFiles: async (files: Array<{ blob: Blob; filename: string }>) => { const form = new FormData() - form.append('file', file, filename) + for (const { blob, filename } of files) { + form.append('files', blob, filename) + } - const res = await fetcher('/api/public/v0/file', { + const res = await fetcher('/api/public/v0/file/batch', { method: 'POST', body: form, }) - return jsonResponse<{ id: string; url: string }>(res) + const { files: uploaded } = await jsonResponse<{ + files: Array<{ id: string; url: string }> + }>(res) + return uploaded }, }) diff --git a/src/api/index.ts b/src/api/index.ts index f9cd166..7cd7261 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -3,7 +3,7 @@ import { createFolderApi } from './folders' import { createProjectApi } from './projects' import { createRunApi } from './run' import { createTCaseApi } from './tcases' -import { withApiKey, withBaseUrl } from './utils' +import { withApiKey, withBaseUrl, withHttpRetry } from './utils' const getApi = (fetcher: typeof fetch) => { return { @@ -18,4 +18,4 @@ const getApi = (fetcher: typeof fetch) => { export type Api = ReturnType export const createApi = (baseUrl: string, apiKey: string) => - getApi(withApiKey(withBaseUrl(fetch, baseUrl), apiKey)) + getApi(withHttpRetry(withApiKey(withBaseUrl(fetch, baseUrl), apiKey))) diff --git a/src/api/utils.ts b/src/api/utils.ts index 9a58649..1948f3f 100644 --- a/src/api/utils.ts +++ b/src/api/utils.ts @@ -75,6 +75,64 @@ const updateSearchParams = (searchParams: URLSearchParams, obj }) } +interface HttpRetryOptions { + maxRetries: number + baseDelayMs: number + backoffFactor: number + jitterFraction: number + retryableStatuses: Set +} + +const DEFAULT_HTTP_RETRY_OPTIONS: HttpRetryOptions = { + maxRetries: 5, + baseDelayMs: 1000, + backoffFactor: 2, + jitterFraction: 0.25, + retryableStatuses: new Set([429, 502, 503]), +} + +export const withHttpRetry = ( + fetcher: typeof fetch, + options?: Partial +): typeof fetch => { + const opts = { ...DEFAULT_HTTP_RETRY_OPTIONS, ...options } + + return async (input: URL | RequestInfo, init?: RequestInit | undefined) => { + let lastResponse: Response | undefined + for (let attempt = 0; attempt <= opts.maxRetries; attempt++) { + lastResponse = await fetcher(input, init) + + if (!opts.retryableStatuses.has(lastResponse.status)) { + return lastResponse + } + + if (attempt === opts.maxRetries) { + break + } + + const retryAfter = lastResponse.headers.get('Retry-After') + let delayMs: number + + if (retryAfter !== null) { + const parsed = Number(retryAfter) + if (!Number.isNaN(parsed)) { + delayMs = parsed * 1000 + } else { + const date = Date.parse(retryAfter) + delayMs = Number.isNaN(date) ? opts.baseDelayMs : Math.max(0, date - Date.now()) + } + } else { + delayMs = opts.baseDelayMs * Math.pow(opts.backoffFactor, attempt) + } + + const jitter = delayMs * opts.jitterFraction * Math.random() + await new Promise((resolve) => setTimeout(resolve, delayMs + jitter)) + } + + return lastResponse! + } +} + export const appendSearchParams = (pathname: string, obj: T): string => { const searchParams = new URLSearchParams() updateSearchParams(searchParams, obj) diff --git a/src/tests/http-retry.spec.ts b/src/tests/http-retry.spec.ts new file mode 100644 index 0000000..e34b629 --- /dev/null +++ b/src/tests/http-retry.spec.ts @@ -0,0 +1,189 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest' +import { withHttpRetry } from '../api/utils' + +beforeEach(() => { + vi.useFakeTimers() +}) + +afterEach(() => { + vi.useRealTimers() +}) + +const mockFetch = (responses: Array<{ status: number; headers?: Record }>) => { + let callCount = 0 + const fn = vi.fn(async () => { + const response = responses[callCount] || responses[responses.length - 1] + callCount++ + return new Response(JSON.stringify({}), { + status: response.status, + headers: response.headers, + }) + }) as unknown as typeof fetch + return fn +} + +const advanceRetryTimers = async () => { + // Flush microtasks then advance timers repeatedly to handle retry delays + await vi.advanceTimersByTimeAsync(60_000) +} + +describe('withHttpRetry', () => { + test('passes through successful responses without retry', async () => { + const fetcher = mockFetch([{ status: 200 }]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const response = await retryFetcher('https://example.com/api') + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(1) + }) + + test('passes through non-retryable error responses without retry', async () => { + const fetcher = mockFetch([{ status: 400 }]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const response = await retryFetcher('https://example.com/api') + + expect(response.status).toBe(400) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(1) + }) + + test('retries on 429 and eventually succeeds', async () => { + const fetcher = mockFetch([{ status: 429 }, { status: 429 }, { status: 200 }]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + await advanceRetryTimers() + const response = await promise + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(3) + }) + + test('retries on 502 and 503', async () => { + const fetcher = mockFetch([{ status: 502 }, { status: 503 }, { status: 200 }]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + await advanceRetryTimers() + const response = await promise + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(3) + }) + + test('returns last response after exhausting max retries', async () => { + const fetcher = mockFetch([ + { status: 429 }, + { status: 429 }, + { status: 429 }, + { status: 429 }, + { status: 429 }, + { status: 429 }, + ]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + await advanceRetryTimers() + const response = await promise + + expect(response.status).toBe(429) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(6) // 1 initial + 5 retries + }) + + test('respects Retry-After header', async () => { + const fetcher = mockFetch([{ status: 429, headers: { 'Retry-After': '3' } }, { status: 200 }]) + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + + // Should not have retried yet at 2.9s + await vi.advanceTimersByTimeAsync(2900) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(1) + + // Should retry after 3s + await vi.advanceTimersByTimeAsync(200) + const response = await promise + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(2) + }) + + test('respects Retry-After header with HTTP-date', async () => { + // Use a dynamic mock so the date is computed relative to the faked Date.now() + // at the time the response is created, not at test setup time + let callCount = 0 + const fetcher = vi.fn(async () => { + callCount++ + if (callCount === 1) { + const futureDate = new Date(Date.now() + 5000).toUTCString() + return new Response(JSON.stringify({}), { + status: 429, + headers: { 'Retry-After': futureDate }, + }) + } + return new Response(JSON.stringify({}), { status: 200 }) + }) as unknown as typeof fetch + const retryFetcher = withHttpRetry(fetcher, { jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + + // toUTCString() has second-level precision, so up to 999ms can be lost. + // With a 5s target, actual delay is between ~4001ms and ~5000ms. + // Check at 3.9s (safely before minimum) and advance past 5s. + await vi.advanceTimersByTimeAsync(3900) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(1) + + await vi.advanceTimersByTimeAsync(1200) + const response = await promise + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(2) + }) + + test('uses exponential backoff with correct delays', async () => { + const fetcher = mockFetch([{ status: 429 }, { status: 429 }, { status: 429 }, { status: 200 }]) + const retryFetcher = withHttpRetry(fetcher, { + jitterFraction: 0, + baseDelayMs: 1000, + backoffFactor: 2, + }) + + const promise = retryFetcher('https://example.com/api') + + // After 999ms — still only 1 call + await vi.advanceTimersByTimeAsync(999) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(1) + + // After 1000ms — 2nd call + await vi.advanceTimersByTimeAsync(1) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(2) + + // After 1999ms more — still only 2 calls (need 2000ms for 2nd retry) + await vi.advanceTimersByTimeAsync(1999) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(2) + + // After 1ms more (total 2000ms) — 3rd call + await vi.advanceTimersByTimeAsync(1) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(3) + + // After 4000ms — 4th call (success) + await vi.advanceTimersByTimeAsync(4000) + const response = await promise + + expect(response.status).toBe(200) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(4) + }) + + test('respects custom maxRetries option', async () => { + const fetcher = mockFetch([{ status: 429 }, { status: 429 }, { status: 429 }]) + const retryFetcher = withHttpRetry(fetcher, { maxRetries: 2, jitterFraction: 0 }) + + const promise = retryFetcher('https://example.com/api') + await advanceRetryTimers() + const response = await promise + + expect(response.status).toBe(429) + expect(vi.mocked(fetcher)).toHaveBeenCalledTimes(3) // 1 initial + 2 retries + }) +}) diff --git a/src/tests/result-upload.spec.ts b/src/tests/result-upload.spec.ts index c9e4478..c843f97 100644 --- a/src/tests/result-upload.spec.ts +++ b/src/tests/result-upload.spec.ts @@ -116,12 +116,17 @@ const server = setupServer( }) } ), - http.post(`${baseURL}/api/public/v0/file`, async ({ request }) => { + http.post(`${baseURL}/api/public/v0/file/batch`, async ({ request }) => { expect(request.headers.get('Authorization')).toEqual('ApiKey QAS_TOKEN') expect(request.headers.get('Content-Type')).includes('multipart/form-data') + + const formData = await request.formData() + const files = formData.getAll('files') return HttpResponse.json({ - id: 'TEST', - url: 'http://example.com', + files: files.map((_, i) => ({ + id: `TEST-${i}`, + url: 'http://example.com', + })), }) }) ) @@ -135,11 +140,11 @@ afterAll(() => { afterEach(() => { server.resetHandlers() server.events.removeAllListeners() - setMaxResultsInRequest(50) + setMaxResultsInRequest(500) }) const countFileUploadApiCalls = () => - countMockedApiCalls(server, (req) => req.url.endsWith('/file')) + countMockedApiCalls(server, (req) => new URL(req.url).pathname.endsWith('/file/batch')) const countResultUploadApiCalls = () => countMockedApiCalls(server, (req) => new URL(req.url).pathname.endsWith('/result/batch')) const countCreateTCasesApiCalls = () => @@ -362,7 +367,7 @@ fileTypes.forEach((fileType) => { await run( `${fileType.command} -r ${runURL} --attachments ${fileType.dataBasePath}/matching-tcases.${fileType.fileExtension}` ) - expect(numFileUploadCalls()).toBe(5) + expect(numFileUploadCalls()).toBe(1) // all 5 files in one batch expect(numResultUploadCalls()).toBe(2) // 5 results total }) test('Missing attachments should throw an error', async () => { @@ -383,7 +388,7 @@ fileTypes.forEach((fileType) => { await run( `${fileType.command} -r ${runURL} --attachments --force ${fileType.dataBasePath}/missing-attachments.${fileType.fileExtension}` ) - expect(numFileUploadCalls()).toBe(4) + expect(numFileUploadCalls()).toBe(1) // all 4 files in one batch expect(numResultUploadCalls()).toBe(5) // 5 results total }) }) diff --git a/src/utils/result-upload/ResultUploader.ts b/src/utils/result-upload/ResultUploader.ts index eb6ecb8..6b96e65 100644 --- a/src/utils/result-upload/ResultUploader.ts +++ b/src/utils/result-upload/ResultUploader.ts @@ -7,8 +7,10 @@ import { Attachment, TestCaseResult } from './types' import { ResultUploadCommandArgs, UploadCommandType } from './ResultUploadCommandHandler' import type { MarkerParser } from './MarkerParser' -const MAX_CONCURRENT_FILE_UPLOADS = 10 -let MAX_RESULTS_IN_REQUEST = 50 // Only updated from tests, otherwise it's a constant +const MAX_CONCURRENT_BATCH_UPLOADS = 3 +const MAX_BATCH_SIZE_BYTES = 100 * 1024 * 1024 // 100 MiB +const MAX_BATCH_FILE_COUNT = 100 +let MAX_RESULTS_IN_REQUEST = 500 // Only updated from tests, otherwise it's a constant export class ResultUploader { private api: Api @@ -171,7 +173,6 @@ ${chalk.yellow('To fix this issue, choose one of the following options:')} attachment: Attachment tcaseIndex: number }> = [] - let uploadedCount = 0 results.forEach((item, index) => { item.result.attachments.forEach((attachment) => { @@ -185,37 +186,68 @@ ${chalk.yellow('To fix this issue, choose one of the following options:')} return results } - // Upload all attachments concurrently with progress tracking + // Group attachments into batches where total size <= MAX_BATCH_SIZE_BYTES + const batches: Array = [] + let currentBatch: typeof allAttachments = [] + let currentBatchSize = 0 + + for (const item of allAttachments) { + const size = item.attachment.buffer!.byteLength + if ( + currentBatch.length > 0 && + (currentBatchSize + size > MAX_BATCH_SIZE_BYTES || + currentBatch.length >= MAX_BATCH_FILE_COUNT) + ) { + batches.push(currentBatch) + currentBatch = [] + currentBatchSize = 0 + } + currentBatch.push(item) + currentBatchSize += size + } + if (currentBatch.length > 0) { + batches.push(currentBatch) + } + + // Upload batches concurrently with progress tracking + let uploadedCount = 0 loader.start(`Uploading attachments: 0/${allAttachments.length} files uploaded`) - const uploadedAttachments = await this.processConcurrently( - allAttachments, - async ({ attachment, tcaseIndex }) => { - const { url } = await this.api.files.uploadFile( - new Blob([attachment.buffer! as BlobPart]), - attachment.filename - ) - uploadedCount++ + + const batchResults = await this.processConcurrently( + batches, + async (batch) => { + const files = batch.map(({ attachment }) => ({ + blob: new Blob([attachment.buffer! as BlobPart]), + filename: attachment.filename, + })) + + const uploaded = await this.api.files.uploadFiles(files) + + uploadedCount += batch.length loader.setText( `Uploading attachments: ${uploadedCount}/${allAttachments.length} files uploaded` ) - return { - tcaseIndex, - url, - name: attachment.filename, - } + + return batch.map((item, i) => ({ + tcaseIndex: item.tcaseIndex, + url: uploaded[i].url, + name: item.attachment.filename, + })) }, - MAX_CONCURRENT_FILE_UPLOADS + MAX_CONCURRENT_BATCH_UPLOADS ) loader.stop() - // Group uploaded attachments by test case index + // Flatten batch results and group by test case index const attachmentsByTCase = new Map>() - uploadedAttachments.forEach(({ tcaseIndex, url, name }) => { - if (!attachmentsByTCase.has(tcaseIndex)) { - attachmentsByTCase.set(tcaseIndex, []) + for (const batchResult of batchResults) { + for (const { tcaseIndex, url, name } of batchResult) { + if (!attachmentsByTCase.has(tcaseIndex)) { + attachmentsByTCase.set(tcaseIndex, []) + } + attachmentsByTCase.get(tcaseIndex)!.push({ url, name }) } - attachmentsByTCase.get(tcaseIndex)!.push({ url, name }) - }) + } // Map results with their uploaded attachment URLs return results.map(({ tcase, result }, index) => { diff --git a/src/utils/result-upload/junitXmlParser.ts b/src/utils/result-upload/junitXmlParser.ts index 6af54ee..af1173d 100644 --- a/src/utils/result-upload/junitXmlParser.ts +++ b/src/utils/result-upload/junitXmlParser.ts @@ -117,7 +117,7 @@ export const parseJUnitXml: Parser = async ( name: tcase.$.name ?? '', timeTaken: Number.isFinite(timeTakenSeconds) && timeTakenSeconds >= 0 - ? timeTakenSeconds * 1000 + ? Math.round(timeTakenSeconds * 1000) : null, attachments: [], }) - 1