Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/fix-js-sdk-upload-content-length.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"e2b": patch
---

fix(js-sdk): buffer template tar archive before upload so `fetch` sets `Content-Length` instead of falling back to `Transfer-Encoding: chunked`. S3 presigned PUT URLs reject chunked requests with `501 NotImplemented`, breaking template uploads in self-hosted deployments backed by S3-compatible storage. Aligns the JS SDK with the Python SDK, which already buffers via `io.BytesIO`.
13 changes: 9 additions & 4 deletions packages/js-sdk/src/template/buildApi.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { buffer } from 'node:stream/consumers'
import { ApiClient, handleApiError, paths, components } from '../api'
import { stripAnsi } from '../utils'
import { BuildError, FileUploadError, TemplateError } from '../errors'
Expand Down Expand Up @@ -119,12 +120,16 @@ export async function uploadFile(
resolveSymlinks
)

// The compiler assumes this is Web fetch API, but it's actually Node.js fetch API
// Buffer the archive before uploading so fetch sets Content-Length.
// S3 presigned PUT URLs reject Transfer-Encoding: chunked with 501
// NotImplemented, which is what Node's fetch falls back to when the
// body is a Readable without a known length. See e2b-dev/e2b#1243.
// The Python SDK takes the same approach (build_api.py:upload_file).
const uploadBody = await buffer(uploadStream)

const res = await fetch(url, {
method: 'PUT',
// @ts-expect-error
body: uploadStream,
duplex: 'half',
body: uploadBody,
})

if (!res.ok) {
Expand Down
72 changes: 72 additions & 0 deletions packages/js-sdk/tests/template/uploadFile.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { describe, test, expect, beforeEach, afterEach } from 'vitest'
import { writeFile, mkdir, rm } from 'fs/promises'
import { join } from 'path'
import { tmpdir } from 'os'
import { createServer, type IncomingMessage, type Server } from 'http'
import { AddressInfo } from 'net'
import { uploadFile } from '../../src/template/buildApi'

// Regression test for e2b-dev/e2b#1243 — uploadFile used to pass a Node
// Readable directly to fetch, which made undici fall back to
// Transfer-Encoding: chunked. S3 presigned PUT URLs reject that with 501
// NotImplemented. The fix buffers the archive first so Content-Length is set.
describe('uploadFile transfer encoding', () => {
let testDir: string
let server: Server
let baseUrl: string
let capturedHeaders: IncomingMessage['headers']
let capturedBodyLength: number

beforeEach(async () => {
testDir = join(tmpdir(), `uploadFile-test-${Date.now()}`)
await mkdir(testDir, { recursive: true })
await writeFile(join(testDir, 'hello.txt'), 'hello world')

capturedHeaders = {}
capturedBodyLength = 0

server = createServer((req, res) => {
capturedHeaders = req.headers
let bytes = 0
req.on('data', (chunk: Buffer) => {
bytes += chunk.length
})
req.on('end', () => {
capturedBodyLength = bytes
res.writeHead(200)
res.end()
})
})
await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve))
const { port } = server.address() as AddressInfo
baseUrl = `http://127.0.0.1:${port}/upload`
})

afterEach(async () => {
await new Promise<void>((resolve) => server.close(() => resolve()))
await rm(testDir, { recursive: true, force: true })
})

test('sets Content-Length and does not use chunked transfer encoding', async () => {
await uploadFile(
{
fileName: '*.txt',
fileContextPath: testDir,
url: baseUrl,
ignorePatterns: [],
resolveSymlinks: false,
},
undefined
)

expect(capturedHeaders['content-length']).toBeDefined()
const contentLength = Number(capturedHeaders['content-length'])
expect(contentLength).toBeGreaterThan(0)
expect(contentLength).toBe(capturedBodyLength)

const transferEncoding = capturedHeaders['transfer-encoding']
if (transferEncoding !== undefined) {
expect(transferEncoding.toLowerCase()).not.toContain('chunked')
}
})
})