diff --git a/.changeset/fix-js-sdk-upload-content-length.md b/.changeset/fix-js-sdk-upload-content-length.md new file mode 100644 index 0000000000..7d4b690059 --- /dev/null +++ b/.changeset/fix-js-sdk-upload-content-length.md @@ -0,0 +1,5 @@ +--- +"e2b": patch +--- + +fix(js-sdk): buffer template tar archive before upload so `fetch` sets `Content-Length` instead of falling back to `Transfer-Encoding: chunked`. S3 presigned PUT URLs reject chunked requests with `501 NotImplemented`, breaking template uploads in self-hosted deployments backed by S3-compatible storage. Aligns the JS SDK with the Python SDK, which already buffers via `io.BytesIO`. diff --git a/packages/js-sdk/src/template/buildApi.ts b/packages/js-sdk/src/template/buildApi.ts index d699efe03c..b070d2bbb1 100644 --- a/packages/js-sdk/src/template/buildApi.ts +++ b/packages/js-sdk/src/template/buildApi.ts @@ -1,5 +1,5 @@ import { ApiClient, handleApiError, paths, components } from '../api' -import { stripAnsi } from '../utils' +import { dynamicImport, stripAnsi } from '../utils' import { BuildError, FileUploadError, TemplateError } from '../errors' import { LogEntry } from './logger' import { getBuildStepIndex, tarFileStreamUpload } from './utils' @@ -119,12 +119,25 @@ export async function uploadFile( resolveSymlinks ) - // The compiler assumes this is Web fetch API, but it's actually Node.js fetch API + // Buffer the archive before uploading so fetch sets Content-Length. + // S3 presigned PUT URLs reject Transfer-Encoding: chunked with 501 + // NotImplemented, which is what Node's fetch falls back to when the + // body is a Readable without a known length. See e2b-dev/e2b#1243. + // The Python SDK takes the same approach (build_api.py:upload_file). + // Dynamically import so the browser bundle doesn't pull in node:stream. + // tar's Pack extends Minipass and is iterable as AsyncIterable at + // runtime, but the cli's tsconfig (preserveSymlinks) doesn't surface that + // through the type chain — cast via unknown. + const { buffer } = await dynamicImport< + typeof import('node:stream/consumers') + >('node:stream/consumers') + const uploadBody = await buffer( + uploadStream as unknown as AsyncIterable + ) + const res = await fetch(url, { method: 'PUT', - // @ts-expect-error - body: uploadStream, - duplex: 'half', + body: uploadBody, }) if (!res.ok) { diff --git a/packages/js-sdk/tests/template/uploadFile.test.ts b/packages/js-sdk/tests/template/uploadFile.test.ts new file mode 100644 index 0000000000..3d0cd25b05 --- /dev/null +++ b/packages/js-sdk/tests/template/uploadFile.test.ts @@ -0,0 +1,68 @@ +import { describe, test, expect, beforeAll, afterAll } from 'vitest' +import { writeFile, mkdtemp, rm } from 'fs/promises' +import { join } from 'path' +import { tmpdir } from 'os' +import { createServer, type IncomingMessage, type Server } from 'http' +import { AddressInfo } from 'net' +import { uploadFile } from '../../src/template/buildApi' + +// Regression test for e2b-dev/e2b#1243 — uploadFile used to pass a Node +// Readable directly to fetch, which made undici fall back to +// Transfer-Encoding: chunked. S3 presigned PUT URLs reject that with 501 +// NotImplemented. The fix buffers the archive first so Content-Length is set. +describe('uploadFile transfer encoding', () => { + let testDir: string + let server: Server + let baseUrl: string + let capturedHeaders: IncomingMessage['headers'] = {} + let capturedBodyLength = 0 + + beforeAll(async () => { + testDir = await mkdtemp(join(tmpdir(), 'uploadFile-test-')) + await writeFile(join(testDir, 'hello.txt'), 'hello world') + + server = createServer((req, res) => { + capturedHeaders = req.headers + let bytes = 0 + req.on('data', (chunk: Buffer) => { + bytes += chunk.length + }) + req.on('end', () => { + capturedBodyLength = bytes + res.writeHead(200) + res.end() + }) + }) + await new Promise((resolve) => server.listen(0, '127.0.0.1', resolve)) + const { port } = server.address() as AddressInfo + baseUrl = `http://127.0.0.1:${port}/upload` + }) + + afterAll(async () => { + await new Promise((resolve) => server.close(() => resolve())) + await rm(testDir, { recursive: true, force: true }) + }) + + test('sets Content-Length and does not use chunked transfer encoding', async () => { + await uploadFile( + { + fileName: '*.txt', + fileContextPath: testDir, + url: baseUrl, + ignorePatterns: [], + resolveSymlinks: false, + }, + undefined + ) + + expect(capturedHeaders['content-length']).toBeDefined() + const contentLength = Number(capturedHeaders['content-length']) + expect(contentLength).toBeGreaterThan(0) + expect(contentLength).toBe(capturedBodyLength) + + const transferEncoding = capturedHeaders['transfer-encoding'] + if (transferEncoding !== undefined) { + expect(transferEncoding.toLowerCase()).not.toContain('chunked') + } + }) +})