Skip to content

Commit 0b91e8f

Browse files
authored
Merge 1323a27 into b97fd4d
2 parents b97fd4d + 1323a27 commit 0b91e8f

5 files changed

Lines changed: 229 additions & 5 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"e2b": patch
3+
---
4+
5+
fix(js-sdk): buffer template tar archive before upload so `fetch` sets `Content-Length` instead of falling back to `Transfer-Encoding: chunked`. S3 presigned PUT URLs reject chunked requests with `501 NotImplemented`, breaking template uploads in self-hosted deployments backed by S3-compatible storage. Aligns the JS SDK with the Python SDK, which already buffers via `io.BytesIO`.

packages/js-sdk/src/template/buildApi.ts

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { ApiClient, handleApiError, paths, components } from '../api'
2-
import { stripAnsi } from '../utils'
2+
import { dynamicImport, stripAnsi } from '../utils'
33
import { BuildError, FileUploadError, TemplateError } from '../errors'
44
import { LogEntry } from './logger'
55
import { getBuildStepIndex, tarFileStreamUpload } from './utils'
@@ -119,12 +119,25 @@ export async function uploadFile(
119119
resolveSymlinks
120120
)
121121

122-
// The compiler assumes this is Web fetch API, but it's actually Node.js fetch API
122+
// Buffer the archive before uploading so fetch sets Content-Length.
123+
// S3 presigned PUT URLs reject Transfer-Encoding: chunked with 501
124+
// NotImplemented, which is what Node's fetch falls back to when the
125+
// body is a Readable without a known length. See e2b-dev/e2b#1243.
126+
// The Python SDK takes the same approach (build_api.py:upload_file).
127+
// Dynamically import so the browser bundle doesn't pull in node:stream.
128+
// tar's Pack extends Minipass and is iterable as AsyncIterable<Buffer> at
129+
// runtime, but the cli's tsconfig (preserveSymlinks) doesn't surface that
130+
// through the type chain — cast via unknown.
131+
const { buffer } = await dynamicImport<
132+
typeof import('node:stream/consumers')
133+
>('node:stream/consumers')
134+
const uploadBody = await buffer(
135+
uploadStream as unknown as AsyncIterable<Buffer>
136+
)
137+
123138
const res = await fetch(url, {
124139
method: 'PUT',
125-
// @ts-expect-error
126-
body: uploadStream,
127-
duplex: 'half',
140+
body: uploadBody,
128141
})
129142

130143
if (!res.ok) {
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
import { describe, test, expect, beforeAll, afterAll } from 'vitest'
2+
import { writeFile, mkdtemp, rm } from 'fs/promises'
3+
import { join } from 'path'
4+
import { tmpdir } from 'os'
5+
import { createServer, type IncomingMessage, type Server } from 'http'
6+
import { AddressInfo } from 'net'
7+
import { uploadFile } from '../../src/template/buildApi'
8+
9+
// Regression test for e2b-dev/e2b#1243 — uploadFile used to pass a Node
10+
// Readable directly to fetch, which made undici fall back to
11+
// Transfer-Encoding: chunked. S3 presigned PUT URLs reject that with 501
12+
// NotImplemented. The fix buffers the archive first so Content-Length is set.
13+
describe('uploadFile transfer encoding', () => {
14+
let testDir: string
15+
let server: Server
16+
let baseUrl: string
17+
let capturedHeaders: IncomingMessage['headers'] = {}
18+
let capturedBodyLength = 0
19+
20+
beforeAll(async () => {
21+
testDir = await mkdtemp(join(tmpdir(), 'uploadFile-test-'))
22+
await writeFile(join(testDir, 'hello.txt'), 'hello world')
23+
24+
server = createServer((req, res) => {
25+
capturedHeaders = req.headers
26+
let bytes = 0
27+
req.on('data', (chunk: Buffer) => {
28+
bytes += chunk.length
29+
})
30+
req.on('end', () => {
31+
capturedBodyLength = bytes
32+
res.writeHead(200)
33+
res.end()
34+
})
35+
})
36+
await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve))
37+
const { port } = server.address() as AddressInfo
38+
baseUrl = `http://127.0.0.1:${port}/upload`
39+
})
40+
41+
afterAll(async () => {
42+
await new Promise<void>((resolve) => server.close(() => resolve()))
43+
await rm(testDir, { recursive: true, force: true })
44+
})
45+
46+
test('sets Content-Length and does not use chunked transfer encoding', async () => {
47+
await uploadFile(
48+
{
49+
fileName: '*.txt',
50+
fileContextPath: testDir,
51+
url: baseUrl,
52+
ignorePatterns: [],
53+
resolveSymlinks: false,
54+
},
55+
undefined
56+
)
57+
58+
expect(capturedHeaders['content-length']).toBeDefined()
59+
const contentLength = Number(capturedHeaders['content-length'])
60+
expect(contentLength).toBeGreaterThan(0)
61+
expect(contentLength).toBe(capturedBodyLength)
62+
63+
const transferEncoding = capturedHeaders['transfer-encoding']
64+
if (transferEncoding !== undefined) {
65+
expect(transferEncoding.toLowerCase()).not.toContain('chunked')
66+
}
67+
})
68+
})
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import threading
2+
from http.server import BaseHTTPRequestHandler, HTTPServer
3+
4+
from e2b.api.client.client import AuthenticatedClient
5+
from e2b.template_async.build_api import upload_file
6+
7+
8+
# Regression test for e2b-dev/e2b#1243 — upload_file must set Content-Length
9+
# and must not fall back to Transfer-Encoding: chunked. S3 presigned PUT URLs
10+
# reject chunked encoding with 501 NotImplemented. httpx sets Content-Length
11+
# automatically when we pass bytes (tar_buffer.getvalue()); this test guards
12+
# against someone swapping the bytes for a generator/stream later.
13+
#
14+
# The mock server runs in a daemon thread and doesn't need to be async — the
15+
# httpx.AsyncClient connects to it via asyncio sockets without blocking the
16+
# event loop.
17+
18+
19+
def _make_server():
20+
state = {"headers": None, "body_length": 0}
21+
22+
class Handler(BaseHTTPRequestHandler):
23+
def do_PUT(self):
24+
state["headers"] = dict(self.headers)
25+
length = int(self.headers.get("Content-Length", 0))
26+
body = self.rfile.read(length) if length else b""
27+
state["body_length"] = len(body)
28+
self.send_response(200)
29+
self.end_headers()
30+
31+
def log_message(self, *args, **kwargs):
32+
return
33+
34+
server = HTTPServer(("127.0.0.1", 0), Handler)
35+
thread = threading.Thread(target=server.serve_forever, daemon=True)
36+
thread.start()
37+
return server, thread, state
38+
39+
40+
async def test_upload_file_sets_content_length_and_no_chunked_encoding(tmp_path):
41+
(tmp_path / "hello.txt").write_text("hello world")
42+
43+
server, thread, state = _make_server()
44+
host, port = server.server_address
45+
url = f"http://{host}:{port}/upload"
46+
47+
try:
48+
client = AuthenticatedClient(base_url="http://test", token="test")
49+
await upload_file(
50+
api_client=client,
51+
file_name="*.txt",
52+
context_path=str(tmp_path),
53+
url=url,
54+
ignore_patterns=[],
55+
resolve_symlinks=False,
56+
stack_trace=None,
57+
)
58+
finally:
59+
server.shutdown()
60+
server.server_close()
61+
thread.join(timeout=5)
62+
63+
assert state["headers"] is not None
64+
content_length = state["headers"].get("Content-Length")
65+
assert content_length is not None
66+
assert int(content_length) > 0
67+
assert int(content_length) == state["body_length"]
68+
69+
transfer_encoding = state["headers"].get("Transfer-Encoding")
70+
if transfer_encoding is not None:
71+
assert "chunked" not in transfer_encoding.lower()
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
import threading
2+
from http.server import BaseHTTPRequestHandler, HTTPServer
3+
4+
from e2b.api.client.client import AuthenticatedClient
5+
from e2b.template_sync.build_api import upload_file
6+
7+
8+
# Regression test for e2b-dev/e2b#1243 — upload_file must set Content-Length
9+
# and must not fall back to Transfer-Encoding: chunked. S3 presigned PUT URLs
10+
# reject chunked encoding with 501 NotImplemented. httpx sets Content-Length
11+
# automatically when we pass bytes (tar_buffer.getvalue()); this test guards
12+
# against someone swapping the bytes for a generator/stream later.
13+
14+
15+
def _make_server():
16+
state = {"headers": None, "body_length": 0}
17+
18+
class Handler(BaseHTTPRequestHandler):
19+
def do_PUT(self):
20+
state["headers"] = dict(self.headers)
21+
length = int(self.headers.get("Content-Length", 0))
22+
body = self.rfile.read(length) if length else b""
23+
state["body_length"] = len(body)
24+
self.send_response(200)
25+
self.end_headers()
26+
27+
def log_message(self, *args, **kwargs):
28+
return
29+
30+
server = HTTPServer(("127.0.0.1", 0), Handler)
31+
thread = threading.Thread(target=server.serve_forever, daemon=True)
32+
thread.start()
33+
return server, thread, state
34+
35+
36+
def test_upload_file_sets_content_length_and_no_chunked_encoding(tmp_path):
37+
(tmp_path / "hello.txt").write_text("hello world")
38+
39+
server, thread, state = _make_server()
40+
host, port = server.server_address
41+
url = f"http://{host}:{port}/upload"
42+
43+
try:
44+
client = AuthenticatedClient(base_url="http://test", token="test")
45+
upload_file(
46+
api_client=client,
47+
file_name="*.txt",
48+
context_path=str(tmp_path),
49+
url=url,
50+
ignore_patterns=[],
51+
resolve_symlinks=False,
52+
stack_trace=None,
53+
)
54+
finally:
55+
server.shutdown()
56+
server.server_close()
57+
thread.join(timeout=5)
58+
59+
assert state["headers"] is not None
60+
content_length = state["headers"].get("Content-Length")
61+
assert content_length is not None
62+
assert int(content_length) > 0
63+
assert int(content_length) == state["body_length"]
64+
65+
transfer_encoding = state["headers"].get("Transfer-Encoding")
66+
if transfer_encoding is not None:
67+
assert "chunked" not in transfer_encoding.lower()

0 commit comments

Comments
 (0)