-
Notifications
You must be signed in to change notification settings - Fork 887
fix(sdk): buffer template upload to set Content-Length, add regression tests #1294
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
6 commits
Select commit
Hold shift + click to select a range
fbfa824
fix(js-sdk): buffer template upload to avoid S3 chunked PUT 501
truffle-dev f6ff5af
test(js-sdk): simplify uploadFile test setup
mishushakov 30c215c
fix(js-sdk): cast tar Pack to AsyncIterable for buffer()
mishushakov 653df46
fix(js-sdk): dynamically import node:stream/consumers in uploadFile
mishushakov 3948f4e
style(js-sdk): apply prettier formatting
mishushakov 1323a27
test(python-sdk): add regression tests for template upload Content-Le…
mishushakov File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,5 @@ | ||
| --- | ||
| "e2b": patch | ||
| --- | ||
|
|
||
| fix(js-sdk): buffer template tar archive before upload so `fetch` sets `Content-Length` instead of falling back to `Transfer-Encoding: chunked`. S3 presigned PUT URLs reject chunked requests with `501 NotImplemented`, breaking template uploads in self-hosted deployments backed by S3-compatible storage. Aligns the JS SDK with the Python SDK, which already buffers via `io.BytesIO`. |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,68 @@ | ||
| import { describe, test, expect, beforeAll, afterAll } from 'vitest' | ||
| import { writeFile, mkdtemp, rm } from 'fs/promises' | ||
| import { join } from 'path' | ||
| import { tmpdir } from 'os' | ||
| import { createServer, type IncomingMessage, type Server } from 'http' | ||
| import { AddressInfo } from 'net' | ||
| import { uploadFile } from '../../src/template/buildApi' | ||
|
|
||
| // Regression test for e2b-dev/e2b#1243 — uploadFile used to pass a Node | ||
| // Readable directly to fetch, which made undici fall back to | ||
| // Transfer-Encoding: chunked. S3 presigned PUT URLs reject that with 501 | ||
| // NotImplemented. The fix buffers the archive first so Content-Length is set. | ||
| describe('uploadFile transfer encoding', () => { | ||
| let testDir: string | ||
| let server: Server | ||
| let baseUrl: string | ||
| let capturedHeaders: IncomingMessage['headers'] = {} | ||
| let capturedBodyLength = 0 | ||
|
|
||
| beforeAll(async () => { | ||
| testDir = await mkdtemp(join(tmpdir(), 'uploadFile-test-')) | ||
| await writeFile(join(testDir, 'hello.txt'), 'hello world') | ||
|
|
||
| server = createServer((req, res) => { | ||
| capturedHeaders = req.headers | ||
| let bytes = 0 | ||
| req.on('data', (chunk: Buffer) => { | ||
| bytes += chunk.length | ||
| }) | ||
| req.on('end', () => { | ||
| capturedBodyLength = bytes | ||
| res.writeHead(200) | ||
| res.end() | ||
| }) | ||
| }) | ||
| await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve)) | ||
| const { port } = server.address() as AddressInfo | ||
| baseUrl = `http://127.0.0.1:${port}/upload` | ||
| }) | ||
|
|
||
| afterAll(async () => { | ||
| await new Promise<void>((resolve) => server.close(() => resolve())) | ||
| await rm(testDir, { recursive: true, force: true }) | ||
| }) | ||
|
|
||
| test('sets Content-Length and does not use chunked transfer encoding', async () => { | ||
| await uploadFile( | ||
| { | ||
| fileName: '*.txt', | ||
| fileContextPath: testDir, | ||
| url: baseUrl, | ||
| ignorePatterns: [], | ||
| resolveSymlinks: false, | ||
| }, | ||
| undefined | ||
| ) | ||
|
|
||
| expect(capturedHeaders['content-length']).toBeDefined() | ||
| const contentLength = Number(capturedHeaders['content-length']) | ||
| expect(contentLength).toBeGreaterThan(0) | ||
| expect(contentLength).toBe(capturedBodyLength) | ||
|
|
||
| const transferEncoding = capturedHeaders['transfer-encoding'] | ||
| if (transferEncoding !== undefined) { | ||
| expect(transferEncoding.toLowerCase()).not.toContain('chunked') | ||
| } | ||
| }) | ||
| }) |
71 changes: 71 additions & 0 deletions
71
packages/python-sdk/tests/async/template_async/test_upload_file.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,71 @@ | ||
| import threading | ||
| from http.server import BaseHTTPRequestHandler, HTTPServer | ||
|
|
||
| from e2b.api.client.client import AuthenticatedClient | ||
| from e2b.template_async.build_api import upload_file | ||
|
|
||
|
|
||
| # Regression test for e2b-dev/e2b#1243 — upload_file must set Content-Length | ||
| # and must not fall back to Transfer-Encoding: chunked. S3 presigned PUT URLs | ||
| # reject chunked encoding with 501 NotImplemented. httpx sets Content-Length | ||
| # automatically when we pass bytes (tar_buffer.getvalue()); this test guards | ||
| # against someone swapping the bytes for a generator/stream later. | ||
| # | ||
| # The mock server runs in a daemon thread and doesn't need to be async — the | ||
| # httpx.AsyncClient connects to it via asyncio sockets without blocking the | ||
| # event loop. | ||
|
|
||
|
|
||
| def _make_server(): | ||
| state = {"headers": None, "body_length": 0} | ||
|
|
||
| class Handler(BaseHTTPRequestHandler): | ||
| def do_PUT(self): | ||
| state["headers"] = dict(self.headers) | ||
| length = int(self.headers.get("Content-Length", 0)) | ||
| body = self.rfile.read(length) if length else b"" | ||
| state["body_length"] = len(body) | ||
| self.send_response(200) | ||
| self.end_headers() | ||
|
|
||
| def log_message(self, *args, **kwargs): | ||
| return | ||
|
|
||
| server = HTTPServer(("127.0.0.1", 0), Handler) | ||
| thread = threading.Thread(target=server.serve_forever, daemon=True) | ||
| thread.start() | ||
| return server, thread, state | ||
|
|
||
|
|
||
| async def test_upload_file_sets_content_length_and_no_chunked_encoding(tmp_path): | ||
| (tmp_path / "hello.txt").write_text("hello world") | ||
|
|
||
| server, thread, state = _make_server() | ||
| host, port = server.server_address | ||
| url = f"http://{host}:{port}/upload" | ||
|
|
||
| try: | ||
| client = AuthenticatedClient(base_url="http://test", token="test") | ||
| await upload_file( | ||
| api_client=client, | ||
| file_name="*.txt", | ||
| context_path=str(tmp_path), | ||
| url=url, | ||
| ignore_patterns=[], | ||
| resolve_symlinks=False, | ||
| stack_trace=None, | ||
| ) | ||
| finally: | ||
| server.shutdown() | ||
| server.server_close() | ||
| thread.join(timeout=5) | ||
|
|
||
| assert state["headers"] is not None | ||
| content_length = state["headers"].get("Content-Length") | ||
| assert content_length is not None | ||
| assert int(content_length) > 0 | ||
| assert int(content_length) == state["body_length"] | ||
|
|
||
| transfer_encoding = state["headers"].get("Transfer-Encoding") | ||
| if transfer_encoding is not None: | ||
| assert "chunked" not in transfer_encoding.lower() |
67 changes: 67 additions & 0 deletions
67
packages/python-sdk/tests/sync/template_sync/test_upload_file.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,67 @@ | ||
| import threading | ||
| from http.server import BaseHTTPRequestHandler, HTTPServer | ||
|
|
||
| from e2b.api.client.client import AuthenticatedClient | ||
| from e2b.template_sync.build_api import upload_file | ||
|
|
||
|
|
||
| # Regression test for e2b-dev/e2b#1243 — upload_file must set Content-Length | ||
| # and must not fall back to Transfer-Encoding: chunked. S3 presigned PUT URLs | ||
| # reject chunked encoding with 501 NotImplemented. httpx sets Content-Length | ||
| # automatically when we pass bytes (tar_buffer.getvalue()); this test guards | ||
| # against someone swapping the bytes for a generator/stream later. | ||
|
|
||
|
|
||
| def _make_server(): | ||
| state = {"headers": None, "body_length": 0} | ||
|
|
||
| class Handler(BaseHTTPRequestHandler): | ||
| def do_PUT(self): | ||
| state["headers"] = dict(self.headers) | ||
| length = int(self.headers.get("Content-Length", 0)) | ||
| body = self.rfile.read(length) if length else b"" | ||
| state["body_length"] = len(body) | ||
| self.send_response(200) | ||
| self.end_headers() | ||
|
|
||
| def log_message(self, *args, **kwargs): | ||
| return | ||
|
|
||
| server = HTTPServer(("127.0.0.1", 0), Handler) | ||
| thread = threading.Thread(target=server.serve_forever, daemon=True) | ||
| thread.start() | ||
| return server, thread, state | ||
|
|
||
|
|
||
| def test_upload_file_sets_content_length_and_no_chunked_encoding(tmp_path): | ||
| (tmp_path / "hello.txt").write_text("hello world") | ||
|
|
||
| server, thread, state = _make_server() | ||
| host, port = server.server_address | ||
| url = f"http://{host}:{port}/upload" | ||
|
|
||
| try: | ||
| client = AuthenticatedClient(base_url="http://test", token="test") | ||
| upload_file( | ||
| api_client=client, | ||
| file_name="*.txt", | ||
| context_path=str(tmp_path), | ||
| url=url, | ||
| ignore_patterns=[], | ||
| resolve_symlinks=False, | ||
| stack_trace=None, | ||
| ) | ||
| finally: | ||
| server.shutdown() | ||
| server.server_close() | ||
| thread.join(timeout=5) | ||
|
|
||
| assert state["headers"] is not None | ||
| content_length = state["headers"].get("Content-Length") | ||
| assert content_length is not None | ||
| assert int(content_length) > 0 | ||
| assert int(content_length) == state["body_length"] | ||
|
|
||
| transfer_encoding = state["headers"].get("Transfer-Encoding") | ||
| if transfer_encoding is not None: | ||
| assert "chunked" not in transfer_encoding.lower() |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.