diff --git a/.changeset/add-write-file-stream.md b/.changeset/add-write-file-stream.md new file mode 100644 index 000000000..8c04dd0de --- /dev/null +++ b/.changeset/add-write-file-stream.md @@ -0,0 +1,11 @@ +--- +'@cloudflare/sandbox': patch +--- + +`writeFile` now accepts `string | ReadableStream` as content, +removing the 32 MiB size limit for file uploads. When a ReadableStream is +provided, it is consumed and streamed directly to disk with no buffering. +The original stream cannot be reused after the call. + +The `encoding` option on `writeFile` is deprecated; prefer passing +a ReadableStream for binary data rather than base64-encoded strings. diff --git a/examples/stream-upload/.gitignore b/examples/stream-upload/.gitignore new file mode 100644 index 000000000..8e60be544 --- /dev/null +++ b/examples/stream-upload/.gitignore @@ -0,0 +1,199 @@ +# Created by https://www.toptal.com/developers/gitignore/api/macos,node,git +# Edit at https://www.toptal.com/developers/gitignore?templates=macos,node,git + +### Git ### +# Created by git for backups. To disable backups in Git: +# $ git config --global mergetool.keepBackup false +*.orig + +# Created by git when using merge tools for conflicts +*.BACKUP.* +*.BASE.* +*.LOCAL.* +*.REMOTE.* +*_BACKUP_*.txt +*_BASE_*.txt +*_LOCAL_*.txt +*_REMOTE_*.txt + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +### Node Patch ### +# Serverless Webpack directories +.webpack/ + +# Optional stylelint cache + +# SvelteKit build / generate output +.svelte-kit + +# End of https://www.toptal.com/developers/gitignore/api/macos,node,git + +### Wrangler ### +.wrangler/ +.env* +!.env.example +.dev.vars* +!.dev.vars.example diff --git a/examples/stream-upload/Dockerfile b/examples/stream-upload/Dockerfile new file mode 100644 index 000000000..4391c0541 --- /dev/null +++ b/examples/stream-upload/Dockerfile @@ -0,0 +1,4 @@ +FROM cloudflare/sandbox-test:0.7.18 + +# Required during local development to access exposed ports +EXPOSE 8080 diff --git a/examples/stream-upload/README.md b/examples/stream-upload/README.md new file mode 100644 index 000000000..fa9af9986 --- /dev/null +++ b/examples/stream-upload/README.md @@ -0,0 +1,59 @@ +# Stream Upload + +**Upload files of any size to a sandbox via streaming, then download and verify integrity.** + +A demo of the Sandbox SDK's `writeFile` and `readFileStream` APIs. When `writeFile` receives a `ReadableStream`, bytes are streamed directly to disk without base64 encoding or buffering, bypassing the 32 MiB size limit. + +## Features + +- Browser UI with file picker, upload, and SHA-256 verification +- Streaming upload via `writeFile` with a `ReadableStream` (no size limit) +- Streaming download via `readFileStream` + `streamFile` +- CLI test script for automated integrity checks + +## Quick Start + +```bash +npm install +npm run dev +``` + +Open http://localhost:8787 in your browser, pick a file, and click "Upload & Verify". + +## How It Works + +1. **Upload** - The browser sends the file as a raw binary stream via `POST /upload`. The Worker passes `request.body` directly to `sandbox.writeFile()`, which streams bytes to disk with zero buffering. + +2. **Download** - The browser requests `GET /download?path=...`. The Worker calls `sandbox.readFileStream()` and pipes the decoded chunks into the response body. + +3. **Verify** - The browser computes SHA-256 of both the original and downloaded bytes and compares them. + +## API + +| Endpoint | Method | Description | +| ----------- | ------ | -------------------------------------------------------------- | +| `/` | GET | Browser UI | +| `/upload` | POST | Stream a file to the sandbox. Query: `?filename=` | +| `/download` | GET | Stream a file back from the sandbox. Query: `?path=` | + +## CLI Test Script + +An automated test script is included for verifying the round-trip outside the browser: + +```bash +# Default: 35 MB random file against localhost:8787 +./test-upload.sh + +# Custom server and size +./test-upload.sh http://localhost:8788 50 +``` + +The script generates a random file with `dd`, uploads it, downloads it back, and compares SHA-256 hashes. + +## Deploy + +```bash +npm run deploy +``` + +After first deployment, wait 2-3 minutes for container provisioning before making requests. diff --git a/examples/stream-upload/package.json b/examples/stream-upload/package.json new file mode 100644 index 000000000..9096a6409 --- /dev/null +++ b/examples/stream-upload/package.json @@ -0,0 +1,22 @@ +{ + "name": "@cloudflare/sandbox-stream-upload-example", + "version": "1.0.0", + "type": "module", + "private": true, + "description": "Upload files to a sandbox via writeFileStream and verify integrity", + "scripts": { + "deploy": "wrangler deploy", + "dev": "wrangler dev", + "start": "wrangler dev", + "cf-typegen": "wrangler types", + "typecheck": "tsc --noEmit" + }, + "devDependencies": { + "@cloudflare/sandbox": "*", + "@types/node": "^24.10.1", + "typescript": "^5.9.3", + "wrangler": "^4.70.0" + }, + "author": "", + "license": "MIT" +} diff --git a/examples/stream-upload/src/index.ts b/examples/stream-upload/src/index.ts new file mode 100644 index 000000000..0bea7a8cc --- /dev/null +++ b/examples/stream-upload/src/index.ts @@ -0,0 +1,246 @@ +import { getSandbox, streamFile } from '@cloudflare/sandbox'; + +export { Sandbox } from '@cloudflare/sandbox'; + +export default { + async fetch(request: Request, env: Env): Promise { + const url = new URL(request.url); + + const sandbox = getSandbox(env.Sandbox, 'stream-upload'); + await sandbox.start(); + + if (url.pathname === '/') { + return new Response(getHTML(), { + headers: { 'Content-Type': 'text/html' } + }); + } + + if (url.pathname === '/upload' && request.method === 'POST') { + return handleUpload(request, sandbox); + } + + if (url.pathname === '/download' && request.method === 'GET') { + return handleDownload(url, sandbox); + } + + return new Response('Not Found', { status: 404 }); + } +}; + +type SandboxInstance = ReturnType; + +async function handleUpload( + request: Request, + sandbox: SandboxInstance +): Promise { + const url = new URL(request.url); + const filename = url.searchParams.get('filename'); + + if (!filename) { + return Response.json( + { error: 'Missing filename query parameter' }, + { status: 400 } + ); + } + + if (!request.body) { + return Response.json({ error: 'Missing request body' }, { status: 400 }); + } + + const path = `/workspace/${filename}`; + + try { + const result = await sandbox.writeFile(path, request.body); + return Response.json({ success: result.success, path: result.path }); + } catch (err) { + const message = err instanceof Error ? err.message : 'Unknown error'; + return Response.json({ error: message }, { status: 500 }); + } +} + +async function handleDownload( + url: URL, + sandbox: SandboxInstance +): Promise { + const path = url.searchParams.get('path'); + + if (!path) { + return Response.json( + { error: 'Missing path query parameter' }, + { status: 400 } + ); + } + + try { + const sseStream = await sandbox.readFileStream(path); + const encoder = new TextEncoder(); + const body = new ReadableStream({ + async start(controller) { + try { + for await (const chunk of streamFile(sseStream)) { + if (chunk instanceof Uint8Array) { + controller.enqueue(chunk); + } else { + controller.enqueue(encoder.encode(chunk)); + } + } + controller.close(); + } catch (err) { + controller.error(err); + } + } + }); + return new Response(body, { + headers: { 'Content-Type': 'application/octet-stream' } + }); + } catch (err) { + const message = err instanceof Error ? err.message : 'Unknown error'; + return Response.json({ error: message }, { status: 500 }); + } +} + +function getHTML(): string { + return ` + + + + +Stream Upload Demo + + + +
+

writeFile Streaming Demo

+

Upload a file to a sandbox, download it back, and verify integrity.

+ + + + + + +
+ + + +`; +} diff --git a/examples/stream-upload/test-upload.sh b/examples/stream-upload/test-upload.sh new file mode 100755 index 000000000..a4fb5ab69 --- /dev/null +++ b/examples/stream-upload/test-upload.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +set -euo pipefail + +BASE_URL="${1:-http://localhost:8787}" +SIZE_MB="${2:-35}" +TMPFILE=$(mktemp /tmp/upload-test-XXXXXX.bin) +DLFILE=$(mktemp /tmp/download-test-XXXXXX.bin) + +cleanup() { rm -f "$TMPFILE" "$DLFILE"; } +trap cleanup EXIT + +echo "=== Stream Upload/Download Integrity Test ===" +echo "Server: $BASE_URL" +echo "Size: ${SIZE_MB} MB" +echo "" + +# 1. Generate random test file +echo "Generating ${SIZE_MB} MB random file..." +dd if=/dev/urandom of="$TMPFILE" bs=1048576 count="$SIZE_MB" 2>/dev/null +ORIG_HASH=$(shasum -a 256 "$TMPFILE" | awk '{print $1}') +echo "Original SHA-256: $ORIG_HASH" +echo "" + +# 2. Upload +echo "Uploading..." +UPLOAD_RESP=$(curl -s -w "\n%{http_code}" \ + -X POST \ + -H "Content-Type: application/octet-stream" \ + --data-binary "@$TMPFILE" \ + "${BASE_URL}/upload?filename=test-${SIZE_MB}mb.bin") + +UPLOAD_HTTP=$(echo "$UPLOAD_RESP" | tail -1) +UPLOAD_BODY=$(echo "$UPLOAD_RESP" | sed '$d') + +if [ "$UPLOAD_HTTP" != "200" ]; then + echo "Upload FAILED (HTTP $UPLOAD_HTTP): $UPLOAD_BODY" + exit 1 +fi + +UPLOAD_PATH=$(echo "$UPLOAD_BODY" | python3 -c "import sys,json; print(json.load(sys.stdin)['path'])") +echo "Upload OK -> $UPLOAD_PATH" +echo "" + +# 3. Download +echo "Downloading..." +DL_HTTP=$(curl -s -o "$DLFILE" -w "%{http_code}" \ + "${BASE_URL}/download?path=${UPLOAD_PATH}") + +if [ "$DL_HTTP" != "200" ]; then + echo "Download FAILED (HTTP $DL_HTTP)" + cat "$DLFILE" + exit 1 +fi + +DL_HASH=$(shasum -a 256 "$DLFILE" | awk '{print $1}') +echo "Downloaded SHA-256: $DL_HASH" +echo "" + +# 4. Compare +ORIG_SIZE=$(wc -c < "$TMPFILE" | tr -d ' ') +DL_SIZE=$(wc -c < "$DLFILE" | tr -d ' ') + +echo "Original size: $ORIG_SIZE bytes" +echo "Downloaded size: $DL_SIZE bytes" +echo "" + +if [ "$ORIG_HASH" = "$DL_HASH" ]; then + echo "PASS - Files are identical" + exit 0 +else + echo "FAIL - Hash mismatch!" + exit 1 +fi diff --git a/examples/stream-upload/tsconfig.json b/examples/stream-upload/tsconfig.json new file mode 100644 index 000000000..5337caebc --- /dev/null +++ b/examples/stream-upload/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "esnext", + "lib": ["esnext"], + "module": "esnext", + "moduleResolution": "bundler", + "types": ["@types/node", "./worker-configuration.d.ts"], + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "noEmit": true + }, + "include": ["worker-configuration.d.ts", "src/**/*.ts"] +} diff --git a/examples/stream-upload/worker-configuration.d.ts b/examples/stream-upload/worker-configuration.d.ts new file mode 100644 index 000000000..2413f9b35 --- /dev/null +++ b/examples/stream-upload/worker-configuration.d.ts @@ -0,0 +1,12 @@ +/* eslint-disable */ +// Generated by Wrangler by running `wrangler types` +declare namespace Cloudflare { + interface GlobalProps { + mainModule: typeof import("./src/index"); + durableNamespaces: "Sandbox"; + } + interface Env { + Sandbox: DurableObjectNamespace; + } +} +interface Env extends Cloudflare.Env {} diff --git a/examples/stream-upload/wrangler.jsonc b/examples/stream-upload/wrangler.jsonc new file mode 100644 index 000000000..a973c9990 --- /dev/null +++ b/examples/stream-upload/wrangler.jsonc @@ -0,0 +1,32 @@ +{ + "$schema": "node_modules/wrangler/config-schema.json", + "name": "sandbox-stream-upload-example", + "main": "src/index.ts", + "compatibility_date": "2025-05-06", + "compatibility_flags": ["nodejs_compat"], + "observability": { + "enabled": true + }, + "containers": [ + { + "class_name": "Sandbox", + "image": "./Dockerfile", + "instance_type": "lite", + "max_instances": 1 + } + ], + "durable_objects": { + "bindings": [ + { + "class_name": "Sandbox", + "name": "Sandbox" + } + ] + }, + "migrations": [ + { + "new_sqlite_classes": ["Sandbox"], + "tag": "v1" + } + ] +} diff --git a/package-lock.json b/package-lock.json index 1ff610ad6..6bb3cde38 100644 --- a/package-lock.json +++ b/package-lock.json @@ -269,6 +269,17 @@ "wrangler": "^4.63.0" } }, + "examples/stream-upload": { + "name": "@cloudflare/sandbox-stream-upload-example", + "version": "1.0.0", + "license": "MIT", + "devDependencies": { + "@cloudflare/sandbox": "*", + "@types/node": "^24.10.1", + "typescript": "^5.9.3", + "wrangler": "^4.70.0" + } + }, "examples/time-machine": { "name": "@cloudflare/sandbox-time-machine-example", "version": "1.0.0", @@ -312,6 +323,203 @@ "wrangler": "^4.63.0" } }, + "examples/vite-sandbox": { + "name": "@cloudflare/sandbox-vite-example", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@cloudflare/vite-plugin": "^1.29.0" + }, + "devDependencies": { + "@cloudflare/sandbox": "*", + "@vitejs/plugin-react": "^4.0.0", + "react": "^18.0.0", + "react-dom": "^18.0.0", + "vite": "^6.0.0", + "wrangler": "^4.63.0" + } + }, + "examples/vite-sandbox/node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "examples/vite-sandbox/node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "examples/vite-sandbox/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "examples/vite-sandbox/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "examples/vite-sandbox/node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "examples/vite-sandbox/node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "examples/vite-sandbox/node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "examples/vite-sandbox/node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "examples/vite-sandbox/node_modules/vite": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, "node_modules/@actions/core": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", @@ -1808,6 +2016,10 @@ "resolved": "sites/sandbox", "link": true }, + "node_modules/@cloudflare/sandbox-stream-upload-example": { + "resolved": "examples/stream-upload", + "link": true + }, "node_modules/@cloudflare/sandbox-time-machine-example": { "resolved": "examples/time-machine", "link": true @@ -1816,14 +2028,18 @@ "resolved": "examples/typescript-validator", "link": true }, + "node_modules/@cloudflare/sandbox-vite-example": { + "resolved": "examples/vite-sandbox", + "link": true + }, "node_modules/@cloudflare/unenv-preset": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.14.0.tgz", - "integrity": "sha512-XKAkWhi1nBdNsSEoNG9nkcbyvfUrSjSf+VYVPfOto3gLTZVc3F4g6RASCMh6IixBKCG2yDgZKQIHGKtjcnLnKg==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.15.0.tgz", + "integrity": "sha512-EGYmJaGZKWl+X8tXxcnx4v2bOZSjQeNI5dWFeXivgX9+YCT69AkzHHwlNbVpqtEUTbew8eQurpyOpeN8fg00nw==", "license": "MIT OR Apache-2.0", "peerDependencies": { "unenv": "2.0.0-rc.24", - "workerd": "^1.20260218.0" + "workerd": "1.20260301.1 || ~1.20260302.1 || ~1.20260303.1 || ~1.20260304.1 || >1.20260305.0 <2.0.0-0" }, "peerDependenciesMeta": { "workerd": { @@ -1832,20 +2048,20 @@ } }, "node_modules/@cloudflare/vite-plugin": { - "version": "1.26.0", - "resolved": "https://registry.npmjs.org/@cloudflare/vite-plugin/-/vite-plugin-1.26.0.tgz", - "integrity": "sha512-F5jSOj9JeWMp9iQa2x+Ocjz++SCfK6Phcca/YLkaddPw5ie7W1VvEWudQ/gxYtRd47mQ/PfCLkE9QGyy6OGEng==", + "version": "1.29.1", + "resolved": "https://registry.npmjs.org/@cloudflare/vite-plugin/-/vite-plugin-1.29.1.tgz", + "integrity": "sha512-HTbVG43olzk3KTZD8b0nlmJTI3G6sQdFTmuD9HMJy7bkQbxKppkt16jayFo/NWAAs1GJXpFjLkC8udRdyn3aEg==", "license": "MIT", "dependencies": { - "@cloudflare/unenv-preset": "2.14.0", - "miniflare": "4.20260301.1", + "@cloudflare/unenv-preset": "2.15.0", + "miniflare": "4.20260317.0", "unenv": "2.0.0-rc.24", - "wrangler": "4.70.0", + "wrangler": "4.75.0", "ws": "8.18.0" }, "peerDependencies": { - "vite": "^6.1.0 || ^7.0.0", - "wrangler": "^4.70.0" + "vite": "^6.1.0 || ^7.0.0 || ^8.0.0", + "wrangler": "^4.75.0" } }, "node_modules/@cloudflare/vite-plugin/node_modules/ws": { @@ -2584,9 +2800,9 @@ } }, "node_modules/@cloudflare/workerd-darwin-64": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20260301.1.tgz", - "integrity": "sha512-+kJvwociLrvy1JV9BAvoSVsMEIYD982CpFmo/yMEvBwxDIjltYsLTE8DLi0mCkGsQ8Ygidv2fD9wavzXeiY7OQ==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20260317.1.tgz", + "integrity": "sha512-8hjh3sPMwY8M/zedq3/sXoA2Q4BedlGufn3KOOleIG+5a4ReQKLlUah140D7J6zlKmYZAFMJ4tWC7hCuI/s79g==", "cpu": [ "x64" ], @@ -2600,9 +2816,9 @@ } }, "node_modules/@cloudflare/workerd-darwin-arm64": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20260301.1.tgz", - "integrity": "sha512-PPIetY3e67YBr9O4UhILK8nbm5TqUDl14qx4rwFNrRSBOvlzuczzbd4BqgpAtbGVFxKp1PWpjAnBvGU/OI/tLQ==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20260317.1.tgz", + "integrity": "sha512-M/MnNyvO5HMgoIdr3QHjdCj2T1ki9gt0vIUnxYxBu9ISXS/jgtMl6chUVPJ7zHYBn9MyYr8ByeN6frjYxj0MGg==", "cpu": [ "arm64" ], @@ -2616,9 +2832,9 @@ } }, "node_modules/@cloudflare/workerd-linux-64": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20260301.1.tgz", - "integrity": "sha512-Gu5vaVTZuYl3cHa+u5CDzSVDBvSkfNyuAHi6Mdfut7TTUdcb3V5CIcR/mXRSyMXzEy9YxEWIfdKMxOMBjupvYQ==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20260317.1.tgz", + "integrity": "sha512-1ltuEjkRcS3fsVF7CxsKlWiRmzq2ZqMfqDN0qUOgbUwkpXsLVJsXmoblaLf5OP00ELlcgF0QsN0p2xPEua4Uug==", "cpu": [ "x64" ], @@ -2632,9 +2848,9 @@ } }, "node_modules/@cloudflare/workerd-linux-arm64": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20260301.1.tgz", - "integrity": "sha512-igL1pkyCXW6GiGpjdOAvqMi87UW0LMc/+yIQe/CSzuZJm5GzXoAMrwVTkCFnikk6JVGELrM5x0tGYlxa0sk5Iw==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20260317.1.tgz", + "integrity": "sha512-3QrNnPF1xlaNwkHpasvRvAMidOvQs2NhXQmALJrEfpIJ/IDL2la8g499yXp3eqhG3hVMCB07XVY149GTs42Xtw==", "cpu": [ "arm64" ], @@ -2648,9 +2864,9 @@ } }, "node_modules/@cloudflare/workerd-windows-64": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20260301.1.tgz", - "integrity": "sha512-Q0wMJ4kcujXILwQKQFc1jaYamVsNvjuECzvRrTI8OxGFMx2yq9aOsswViE4X1gaS2YQQ5u0JGwuGi5WdT1Lt7A==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20260317.1.tgz", + "integrity": "sha512-MfZTz+7LfuIpMGTa3RLXHX8Z/pnycZLItn94WRdHr8LPVet+C5/1Nzei399w/jr3+kzT4pDKk26JF/tlI5elpQ==", "cpu": [ "x64" ], @@ -2664,9 +2880,9 @@ } }, "node_modules/@cloudflare/workers-types": { - "version": "4.20260301.1", - "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20260301.1.tgz", - "integrity": "sha512-klKnECMb5A4GtVF0P5NH6rCjtyjqIEKJaz6kEtx9YPHhfFO2HUEarO+MI4F8WPchgeZqpGlEpDhRapzrOTw51Q==", + "version": "4.20260317.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20260317.1.tgz", + "integrity": "sha512-+G4eVwyCpm8Au1ex8vQBCuA9wnwqetz4tPNRoB/53qvktERWBRMQnrtvC1k584yRE3emMThtuY0gWshvSJ++PQ==", "devOptional": true, "license": "MIT OR Apache-2.0" }, @@ -10170,6 +10386,19 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, "node_modules/loupe": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", @@ -11095,15 +11324,15 @@ } }, "node_modules/miniflare": { - "version": "4.20260301.1", - "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20260301.1.tgz", - "integrity": "sha512-fqkHx0QMKswRH9uqQQQOU/RoaS3Wjckxy3CUX3YGJr0ZIMu7ObvI+NovdYi6RIsSPthNtq+3TPmRNxjeRiasog==", + "version": "4.20260317.0", + "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20260317.0.tgz", + "integrity": "sha512-xuwk5Kjv+shi5iUBAdCrRl9IaWSGnTU8WuTQzsUS2GlSDIMCJuu8DiF/d9ExjMXYiQG5ml+k9SVKnMj8cRkq0w==", "license": "MIT", "dependencies": { "@cspotcode/source-map-support": "0.8.1", "sharp": "^0.34.5", - "undici": "7.18.2", - "workerd": "1.20260301.1", + "undici": "7.24.4", + "workerd": "1.20260317.1", "ws": "8.18.0", "youch": "4.1.0-beta.10" }, @@ -11115,9 +11344,9 @@ } }, "node_modules/miniflare/node_modules/undici": { - "version": "7.18.2", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.18.2.tgz", - "integrity": "sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==", + "version": "7.24.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.24.4.tgz", + "integrity": "sha512-BM/JzwwaRXxrLdElV2Uo6cTLEjhSb3WXboncJamZ15NgUURmvlXvxa6xkwIOILIjPNo9i8ku136ZvWV0Uly8+w==", "license": "MIT", "engines": { "node": ">=20.18.1" @@ -14943,9 +15172,9 @@ } }, "node_modules/workerd": { - "version": "1.20260301.1", - "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20260301.1.tgz", - "integrity": "sha512-oterQ1IFd3h7PjCfT4znSFOkJCvNQ6YMOyZ40YsnO3nrSpgB4TbJVYWFOnyJAw71/RQuupfVqZZWKvsy8GO3fw==", + "version": "1.20260317.1", + "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20260317.1.tgz", + "integrity": "sha512-ZuEq1OdrJBS+NV+L5HMYPCzVn49a2O60slQiiLpG44jqtlOo+S167fWC76kEXteXLLLydeuRrluRel7WdOUa4g==", "hasInstallScript": true, "license": "Apache-2.0", "bin": { @@ -14955,11 +15184,11 @@ "node": ">=16" }, "optionalDependencies": { - "@cloudflare/workerd-darwin-64": "1.20260301.1", - "@cloudflare/workerd-darwin-arm64": "1.20260301.1", - "@cloudflare/workerd-linux-64": "1.20260301.1", - "@cloudflare/workerd-linux-arm64": "1.20260301.1", - "@cloudflare/workerd-windows-64": "1.20260301.1" + "@cloudflare/workerd-darwin-64": "1.20260317.1", + "@cloudflare/workerd-darwin-arm64": "1.20260317.1", + "@cloudflare/workerd-linux-64": "1.20260317.1", + "@cloudflare/workerd-linux-arm64": "1.20260317.1", + "@cloudflare/workerd-windows-64": "1.20260317.1" } }, "node_modules/workers-ai-provider": { @@ -14973,19 +15202,19 @@ } }, "node_modules/wrangler": { - "version": "4.70.0", - "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.70.0.tgz", - "integrity": "sha512-PNDZ9o4e+B5x+1bUbz62Hmwz6G9lw+I9pnYe/AguLddJFjfIyt2cmFOUOb3eOZSoXsrhcEPUg2YidYIbVwUkfw==", + "version": "4.75.0", + "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.75.0.tgz", + "integrity": "sha512-Efk1tcnm4eduBYpH1sSjMYydXMnIFPns/qABI3+fsbDrUk5GksNYX8nYGVP4sFygvGPO7kJc36YJKB5ooA7JAg==", "license": "MIT OR Apache-2.0", "dependencies": { "@cloudflare/kv-asset-handler": "0.4.2", - "@cloudflare/unenv-preset": "2.14.0", + "@cloudflare/unenv-preset": "2.15.0", "blake3-wasm": "2.1.5", "esbuild": "0.27.3", - "miniflare": "4.20260301.1", + "miniflare": "4.20260317.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.24", - "workerd": "1.20260301.1" + "workerd": "1.20260317.1" }, "bin": { "wrangler": "bin/wrangler.js", @@ -14998,7 +15227,7 @@ "fsevents": "~2.3.2" }, "peerDependencies": { - "@cloudflare/workers-types": "^4.20260226.1" + "@cloudflare/workers-types": "^4.20260317.1" }, "peerDependenciesMeta": { "@cloudflare/workers-types": { diff --git a/packages/sandbox-container/src/core/types.ts b/packages/sandbox-container/src/core/types.ts index 048cd1c4c..6a977c81e 100644 --- a/packages/sandbox-container/src/core/types.ts +++ b/packages/sandbox-container/src/core/types.ts @@ -307,11 +307,6 @@ export interface ReadOptions { encoding?: string; } -export interface WriteOptions { - encoding?: string; - mode?: string; -} - export interface MkdirOptions { recursive?: boolean; mode?: string; diff --git a/packages/sandbox-container/src/handlers/file-handler.ts b/packages/sandbox-container/src/handlers/file-handler.ts index 340d79163..5d351d642 100644 --- a/packages/sandbox-container/src/handlers/file-handler.ts +++ b/packages/sandbox-container/src/handlers/file-handler.ts @@ -15,7 +15,6 @@ import type { ReadFileResult, RenameFileRequest, RenameFileResult, - WriteFileRequest, WriteFileResult } from '@repo/shared'; import { ErrorCode } from '@repo/shared/errors'; @@ -159,25 +158,38 @@ export class FileHandler extends BaseHandler { request: Request, context: RequestContext ): Promise { - const body = await this.parseRequestBody(request); + const url = new URL(request.url); + const path = url.searchParams.get('path'); + const sessionId = url.searchParams.get('sessionId') ?? 'default'; - const options = - body.encoding !== undefined ? { encoding: body.encoding } : {}; + if (!path) { + return this.createErrorResponse( + { + message: 'Missing required query parameter: path', + code: ErrorCode.VALIDATION_FAILED + }, + context + ); + } - const result = await this.fileService.writeFile( - body.path, - body.content, - options, - body.sessionId - ); + if (!request.body) { + return this.createErrorResponse( + { + message: 'Request body is required for write', + code: ErrorCode.VALIDATION_FAILED + }, + context + ); + } + + const result = await this.fileService.write(path, request.body, sessionId); if (result.success) { const response: WriteFileResult = { success: true, - path: body.path, + path, timestamp: new Date().toISOString() }; - return this.createTypedResponse(response, context); } else { return this.createErrorResponse(result.error, context); diff --git a/packages/sandbox-container/src/services/file-service.ts b/packages/sandbox-container/src/services/file-service.ts index cbbd91d5d..bcbf17da9 100644 --- a/packages/sandbox-container/src/services/file-service.ts +++ b/packages/sandbox-container/src/services/file-service.ts @@ -1,4 +1,5 @@ -import { resolve } from 'node:path'; +import { mkdir } from 'node:fs/promises'; +import { dirname, resolve } from 'node:path'; import type { FileInfo, ListFilesOptions, Logger } from '@repo/shared'; import { shellEscape } from '@repo/shared'; import type { @@ -13,8 +14,7 @@ import type { FileStats, MkdirOptions, ReadOptions, - ServiceResult, - WriteOptions + ServiceResult } from '../core/types'; import { FileManager } from '../managers/file-manager'; import type { SessionManager } from './session-manager'; @@ -35,8 +35,7 @@ export interface FileSystemOperations { ): Promise>; write( path: string, - content: string, - options?: WriteOptions, + stream: ReadableStream, sessionId?: string ): Promise>; delete(path: string, sessionId?: string): Promise>; @@ -220,8 +219,7 @@ export class FileService implements FileSystemOperations { async write( path: string, - content: string, - options: WriteOptions = {}, + stream: ReadableStream, sessionId = 'default' ): Promise> { try { @@ -246,32 +244,6 @@ export class FileService implements FileSystemOperations { }; } - // 2. Write file using Bun native file operations - const normalizedEncoding = - options.encoding === 'utf8' ? 'utf-8' : options.encoding || 'utf-8'; - - if (normalizedEncoding === 'base64') { - // Validate that content only contains valid base64 characters - if (!/^[A-Za-z0-9+/=]*$/.test(content)) { - return { - success: false, - error: { - message: `Invalid base64 content for '${path}': must contain only A-Z, a-z, 0-9, +, /, =`, - code: ErrorCode.VALIDATION_FAILED, - details: { - validationErrors: [ - { - field: 'content', - message: 'Invalid base64 characters', - code: 'INVALID_BASE64' - } - ] - } satisfies ValidationFailedContext - } - }; - } - } - const writeResult = await this.sessionManager.withSession( sessionId, async (exec) => { @@ -297,11 +269,8 @@ export class FileService implements FileSystemOperations { } try { - const data = - normalizedEncoding === 'base64' - ? Buffer.from(content, 'base64') - : content; - await Bun.write(targetPath, data); + await mkdir(dirname(targetPath), { recursive: true }); + await Bun.write(targetPath, new Response(stream)); } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; @@ -1043,15 +1012,6 @@ export class FileService implements FileSystemOperations { return await this.read(path, options, sessionId); } - async writeFile( - path: string, - content: string, - options?: WriteOptions, - sessionId?: string - ): Promise> { - return await this.write(path, content, options, sessionId); - } - async deleteFile( path: string, sessionId?: string diff --git a/packages/sandbox-container/tests/handlers/file-handler.test.ts b/packages/sandbox-container/tests/handlers/file-handler.test.ts index da5b4c4c2..a8f7d77d4 100644 --- a/packages/sandbox-container/tests/handlers/file-handler.test.ts +++ b/packages/sandbox-container/tests/handlers/file-handler.test.ts @@ -17,7 +17,7 @@ import type { FileService } from '@sandbox-container/services/file-service'; // Mock the dependencies - use partial mock to avoid missing properties const mockFileService = { readFile: vi.fn(), - writeFile: vi.fn(), + deleteFile: vi.fn(), renameFile: vi.fn(), moveFile: vi.fn(), @@ -170,76 +170,59 @@ describe('FileHandler', () => { describe('handleWrite - POST /api/write', () => { it('should write file successfully', async () => { - const writeFileData = { - path: '/tmp/output.txt', - content: 'Hello, File!', - encoding: 'utf-8', - sessionId: 'session-123' - }; - - (mockFileService.writeFile as any).mockResolvedValue({ + (mockFileService.write as any).mockResolvedValue({ success: true }); - const request = new Request('http://localhost:3000/api/write', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(writeFileData) - }); + const request = new Request( + 'http://localhost:3000/api/write?path=/tmp/output.txt&sessionId=session-123', + { + method: 'POST', + body: new TextEncoder().encode('Hello, File!') + } + ); const response = await fileHandler.handle(request, mockContext); expect(response.status).toBe(200); const responseData = (await response.json()) as WriteFileResult; expect(responseData.success).toBe(true); - expect(responseData.path).toBe('/tmp/output.txt'); // ✅ Check path field + expect(responseData.path).toBe('/tmp/output.txt'); expect(responseData.timestamp).toBeDefined(); // Verify service was called correctly - expect(mockFileService.writeFile).toHaveBeenCalledWith( + expect(mockFileService.write).toHaveBeenCalledWith( '/tmp/output.txt', - 'Hello, File!', - { - encoding: 'utf-8' - }, + expect.anything(), 'session-123' ); }); - it('should pass undefined sessionId when not provided', async () => { - const writeFileData = { - path: '/tmp/output.txt', - content: 'Hello, File!' - }; - - (mockFileService.writeFile as any).mockResolvedValue({ + it('should use default sessionId when not provided', async () => { + (mockFileService.write as any).mockResolvedValue({ success: true }); - const request = new Request('http://localhost:3000/api/write', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(writeFileData) - }); + const request = new Request( + 'http://localhost:3000/api/write?path=/tmp/output.txt', + { + method: 'POST', + body: new TextEncoder().encode('Hello, File!') + } + ); const response = await fileHandler.handle(request, mockContext); expect(response.status).toBe(200); - expect(mockFileService.writeFile).toHaveBeenCalledWith( + expect(mockFileService.write).toHaveBeenCalledWith( '/tmp/output.txt', - 'Hello, File!', - {}, - undefined + expect.anything(), + 'default' ); }); it('should handle file write errors', async () => { - const writeFileData = { - path: '/readonly/file.txt', - content: 'content' - }; - - (mockFileService.writeFile as any).mockResolvedValue({ + (mockFileService.write as any).mockResolvedValue({ success: false, error: { message: 'Permission denied', @@ -248,11 +231,13 @@ describe('FileHandler', () => { } }); - const request = new Request('http://localhost:3000/api/write', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(writeFileData) - }); + const request = new Request( + 'http://localhost:3000/api/write?path=/readonly/file.txt', + { + method: 'POST', + body: new TextEncoder().encode('content') + } + ); const response = await fileHandler.handle(request, mockContext); @@ -263,6 +248,39 @@ describe('FileHandler', () => { expect(responseData.httpStatus).toBe(403); expect(responseData.timestamp).toBeDefined(); }); + + it('should return error when path is missing', async () => { + const request = new Request('http://localhost:3000/api/write', { + method: 'POST', + body: new Uint8Array([1, 2, 3]) + }); + + const response = await fileHandler.handle(request, mockContext); + + expect(response.status).toBe(400); + const responseData = (await response.json()) as ErrorResponse; + expect(responseData.code).toBe('VALIDATION_FAILED'); + expect(responseData.message).toContain('path'); + expect(responseData.timestamp).toBeDefined(); + }); + + it('should return error when request body is missing', async () => { + const request = new Request( + 'http://localhost:3000/api/write?path=/app/test.bin', + { + method: 'POST', + body: null + } + ); + + const response = await fileHandler.handle(request, mockContext); + + expect(response.status).toBe(400); + const responseData = (await response.json()) as ErrorResponse; + expect(responseData.code).toBe('VALIDATION_FAILED'); + expect(responseData.message).toContain('body'); + expect(responseData.timestamp).toBeDefined(); + }); }); describe('handleDelete - POST /api/delete', () => { @@ -718,7 +736,8 @@ describe('FileHandler', () => { }, { endpoint: '/api/write', - data: { path: '/tmp/test.txt', content: 'data' }, + data: null, + queryParams: '?path=/tmp/test.txt', mockResponse: { success: true }, expectedFields: ['success', 'path', 'timestamp'] }, @@ -740,7 +759,7 @@ describe('FileHandler', () => { operation.mockResponse ); } else if (operation.endpoint === '/api/write') { - (mockFileService.writeFile as any).mockResolvedValue( + (mockFileService.write as any).mockResolvedValue( operation.mockResponse ); } else if (operation.endpoint === '/api/delete') { @@ -749,12 +768,18 @@ describe('FileHandler', () => { ); } + const queryParams = (operation as any).queryParams || ''; + const isWrite = operation.endpoint === '/api/write'; const request = new Request( - `http://localhost:3000${operation.endpoint}`, + `http://localhost:3000${operation.endpoint}${queryParams}`, { method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(operation.data) + headers: isWrite + ? undefined + : { 'Content-Type': 'application/json' }, + body: isWrite + ? new TextEncoder().encode('data') + : JSON.stringify(operation.data) } ); diff --git a/packages/sandbox-container/tests/services/file-service.test.ts b/packages/sandbox-container/tests/services/file-service.test.ts index 3ea108693..ee4e97051 100644 --- a/packages/sandbox-container/tests/services/file-service.test.ts +++ b/packages/sandbox-container/tests/services/file-service.test.ts @@ -7,6 +7,7 @@ import { spyOn, vi } from 'bun:test'; +import * as fsPromises from 'node:fs/promises'; import type { Logger } from '@repo/shared'; import type { ServiceResult } from '@sandbox-container/core/types'; import { @@ -496,41 +497,46 @@ describe('FileService', () => { }); describe('write', () => { - it('should write file successfully with utf-8 encoding', async () => { - const testPath = '/tmp/test.txt'; - const testContent = 'Test content'; - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + // Mock fs.mkdir so tests don't touch the real filesystem + beforeEach(() => { + spyOn(fsPromises, 'mkdir').mockResolvedValue(undefined); + }); - const result = await fileService.write( - testPath, - testContent, - {}, - 'session-123' - ); + function stringToStream(content: string): ReadableStream { + return new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(content)); + controller.close(); + } + }); + } - expect(result.success).toBe(true); - expect(writeSpy).toHaveBeenCalledWith(testPath, testContent); - expect(mockSessionManager.executeInSession).not.toHaveBeenCalled(); - }); + function bytesToStream(data: Uint8Array): ReadableStream { + return new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + } + }); + } - it('should support utf8 as alias for utf-8 encoding in write', async () => { + it('should write file successfully', async () => { const testPath = '/tmp/test.txt'; - const testContent = 'Test content'; - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(12); const result = await fileService.write( testPath, - testContent, - { encoding: 'utf8' }, + stringToStream('Test content'), 'session-123' ); expect(result.success).toBe(true); - expect(writeSpy).toHaveBeenCalledWith(testPath, testContent); + expect(bunWriteSpy).toHaveBeenCalledWith(testPath, expect.any(Response)); }); it('should resolve relative paths using session working directory', async () => { - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(7); + mocked(mockSessionManager.executeInSession).mockResolvedValueOnce({ success: true, data: { exitCode: 0, stdout: '/workspace/project\n', stderr: '' } @@ -538,24 +544,20 @@ describe('FileService', () => { const result = await fileService.write( 'notes/todo.txt', - 'content', - {}, + stringToStream('content'), 'session-123' ); expect(result.success).toBe(true); - expect(mockSessionManager.executeInSession).toHaveBeenCalledWith( - 'session-123', - 'pwd' - ); - expect(writeSpy).toHaveBeenCalledWith( + expect(bunWriteSpy).toHaveBeenCalledWith( '/workspace/project/notes/todo.txt', - 'content' + expect.any(Response) ); }); it('should normalize relative paths before writing', async () => { - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(7); + mocked(mockSessionManager.executeInSession).mockResolvedValueOnce({ success: true, data: { exitCode: 0, stdout: '/workspace/project\n', stderr: '' } @@ -563,94 +565,113 @@ describe('FileService', () => { const result = await fileService.write( './notes/../todo.txt', - 'content', - {}, + stringToStream('content'), 'session-123' ); expect(result.success).toBe(true); - expect(writeSpy).toHaveBeenCalledWith( + expect(bunWriteSpy).toHaveBeenCalledWith( '/workspace/project/todo.txt', - 'content' + expect.any(Response) ); }); - it('should write binary file with base64 encoding option', async () => { + it('should write binary data from stream', async () => { const testPath = '/tmp/image.png'; - const binaryData = Buffer.from([0x89, 0x50, 0x4e, 0x47]); // PNG header - const base64Content = binaryData.toString('base64'); - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + const binaryData = new Uint8Array([0x89, 0x50, 0x4e, 0x47]); // PNG header + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(4); const result = await fileService.write( testPath, - base64Content, - { encoding: 'base64' }, + bytesToStream(binaryData), 'session-123' ); expect(result.success).toBe(true); - expect(writeSpy).toHaveBeenCalledWith(testPath, binaryData); + expect(bunWriteSpy).toHaveBeenCalledWith(testPath, expect.any(Response)); }); - it('should reject base64 content with invalid characters', async () => { - const testPath = '/tmp/test.txt'; - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); - - const maliciousInputs = [ - "abc'; rm -rf / #", - 'valid$(whoami)base64', - 'test\nmalicious', - 'test`whoami`test', - 'test|whoami', - 'test&whoami&' - ]; + it('should handle write errors', async () => { + spyOn(Bun, 'write').mockRejectedValue(new Error('Disk full')); - for (const maliciousContent of maliciousInputs) { - vi.clearAllMocks(); + const result = await fileService.write( + '/tmp/test.txt', + stringToStream('content') + ); - const result = await fileService.write( - testPath, - maliciousContent, - { encoding: 'base64' }, - 'session-123' - ); + expect(result.success).toBe(false); + if (!result.success) { + expect(result.error.code).toBe('FILESYSTEM_ERROR'); + } + }); + + it('should reject invalid paths', async () => { + mocked(mockSecurityService.validatePath).mockReturnValue({ + isValid: false, + errors: ['Path contains invalid characters'] + }); + + const result = await fileService.write( + '../escape', + stringToStream('content') + ); - expect(result.success).toBe(false); - if (result.success) throw new Error('Expected failure'); + expect(result.success).toBe(false); + if (!result.success) { expect(result.error.code).toBe('VALIDATION_FAILED'); - expect(writeSpy).not.toHaveBeenCalled(); - expect(mockSessionManager.executeInSession).not.toHaveBeenCalled(); } }); - it('should accept valid base64 content with padding', async () => { - const testPath = '/tmp/test.txt'; - const validBase64 = 'SGVsbG8gV29ybGQ='; - const writeSpy = vi.spyOn(Bun, 'write').mockResolvedValue(0); + it('should create an empty file from an empty stream', async () => { + const emptyStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(0); const result = await fileService.write( - testPath, - validBase64, - { encoding: 'base64' }, - 'session-123' + '/workspace/empty.bin', + emptyStream ); expect(result.success).toBe(true); - expect(writeSpy).toHaveBeenCalledWith( - testPath, - Buffer.from(validBase64, 'base64') + expect(bunWriteSpy).toHaveBeenCalledWith( + '/workspace/empty.bin', + expect.any(Response) ); }); - it('should handle write errors', async () => { - vi.spyOn(Bun, 'write').mockRejectedValue(new Error('Disk full')); + it('should handle multi-chunk streams', async () => { + const chunks = [ + new Uint8Array([1, 2, 3]), + new Uint8Array([4, 5, 6]), + new Uint8Array([7, 8, 9]) + ]; + let chunkIndex = 0; + const multiChunkStream = new ReadableStream({ + pull(controller) { + if (chunkIndex < chunks.length) { + controller.enqueue(chunks[chunkIndex++]); + } else { + controller.close(); + } + } + }); - const result = await fileService.write('/tmp/test.txt', 'content'); + const bunWriteSpy = spyOn(Bun, 'write').mockResolvedValue(9); - expect(result.success).toBe(false); - if (!result.success) { - expect(result.error.code).toBe('FILESYSTEM_ERROR'); - } + const result = await fileService.write( + '/workspace/multi.bin', + multiChunkStream + ); + + expect(result.success).toBe(true); + expect(bunWriteSpy).toHaveBeenCalledWith( + '/workspace/multi.bin', + expect.any(Response) + ); }); }); diff --git a/packages/sandbox/src/clients/file-client.ts b/packages/sandbox/src/clients/file-client.ts index af81c797f..cb10f782b 100644 --- a/packages/sandbox/src/clients/file-client.ts +++ b/packages/sandbox/src/clients/file-client.ts @@ -12,6 +12,20 @@ import type { import { BaseHttpClient } from './base-client'; import type { HttpClientOptions, SessionRequest } from './types'; +/** + * Decode a base64 string to bytes with a helpful error message on failure. + */ +export function decodeBase64(content: string): Uint8Array { + try { + return Uint8Array.from(atob(content), (c) => c.charCodeAt(0)); + } catch { + throw new Error( + 'writeFile: content is not valid base64. ' + + 'Decode the base64 string before calling writeFile, or pass a ReadableStream instead.' + ); + } +} + /** * Request interface for creating directories */ @@ -25,7 +39,7 @@ export interface MkdirRequest extends SessionRequest { */ export interface WriteFileRequest extends SessionRequest { path: string; - content: string; + content: string | ReadableStream; encoding?: string; } @@ -83,28 +97,81 @@ export class FileClient extends BaseHttpClient { /** * Write content to a file * @param path - File path to write to - * @param content - Content to write + * @param content - Content to write (string or binary stream) * @param sessionId - The session ID for this operation * @param options - Optional settings (encoding) */ async writeFile( path: string, - content: string, + content: string | ReadableStream, sessionId: string, options?: { encoding?: string } ): Promise { try { - const data = { - path, - content, - sessionId, - encoding: options?.encoding - }; + let stream: ReadableStream; - const response = await this.post('/api/write', data); + if (content instanceof ReadableStream) { + stream = content; + } else { + let bytes: Uint8Array; + if (options?.encoding === 'base64') { + bytes = decodeBase64(content); + } else { + bytes = new TextEncoder().encode(content); + } + stream = new ReadableStream({ + start(controller) { + controller.enqueue(bytes); + controller.close(); + } + }); + } - this.logSuccess('File written', `${path} (${content.length} chars)`); - return response; + const url = new URL('/api/write', 'http://placeholder'); + url.searchParams.set('path', path); + url.searchParams.set('sessionId', sessionId); + + await this.transport.waitForContainer(); + + // TODO: Refactor the transport layer to support a concept of operations + // that cannot be streamed over WebSocket (e.g. via a supportsStreamBody() + // method), so FileClient doesn't need to know about transport internals. + // + // File writes always bypass WebSocket transport and go directly over HTTP. + // Sending a ReadableStream body over WebSocket requires buffering the + // entire file in memory before encoding it, which defeats the purpose of + // streaming and breaks large file uploads. + const writePath = `/api/write?${url.searchParams.toString()}`; + const fetchOptions: RequestInit = { + method: 'POST', + body: stream, + duplex: 'half' + } as RequestInit; + + let response: Response; + if (this.options.stub) { + const writeUrl = `http://localhost:${this.options.port ?? 3000}${writePath}`; + response = await this.options.stub.containerFetch( + writeUrl, + fetchOptions, + this.options.port + ); + } else { + const baseUrl = this.options.baseUrl ?? 'http://localhost:3000'; + response = await globalThis.fetch( + `${baseUrl}${writePath}`, + fetchOptions + ); + } + + if (!response.ok) { + await this.handleErrorResponse(response); + } + + const result = (await response.json()) as WriteFileResult; + + this.logSuccess('File written', path); + return result; } catch (error) { this.logError('writeFile', error); throw error; @@ -112,8 +179,8 @@ export class FileClient extends BaseHttpClient { } /** - * Read content from a file - * @param path - File path to read from + * Read a file from the filesystem + * @param path - File path to read * @param sessionId - The session ID for this operation * @param options - Optional settings (encoding) */ diff --git a/packages/sandbox/src/clients/transport/base-transport.ts b/packages/sandbox/src/clients/transport/base-transport.ts index fe3a9996a..14e10f5e8 100644 --- a/packages/sandbox/src/clients/transport/base-transport.ts +++ b/packages/sandbox/src/clients/transport/base-transport.ts @@ -45,7 +45,28 @@ export abstract class BaseTransport implements ITransport { let attempt = 0; while (true) { - const response = await this.doFetch(path, options); + let response: Response; + try { + response = await this.doFetch(path, options); + } catch (error) { + // A TypeError here means the request body stream was already consumed by a + // prior attempt and cannot be replayed. Return a synthetic 503 so the caller + // receives a clean error rather than a raw TypeError. + if (error instanceof TypeError) { + this.logger.warn( + 'Request body stream already consumed, cannot retry', + { + path, + mode: this.getMode() + } + ); + return new Response(null, { + status: 503, + statusText: 'Stream body already consumed' + }); + } + throw error; + } // Check for retryable 503 (container starting) if (response.status === 503) { @@ -80,6 +101,51 @@ export abstract class BaseTransport implements ITransport { } } + /** + * Poll /api/ping until the container responds with a non-503 status or the + * retry budget is exhausted. + * + * Call this before sending a non-replayable request body (e.g. a + * ReadableStream) so the body is only consumed once the container is + * confirmed ready. Uses doFetch() directly to avoid the recursive retry + * loop in fetch(). + */ + async waitForContainer(): Promise { + const startTime = Date.now(); + let attempt = 0; + + while (true) { + const response = await this.doFetch('/api/ping', { method: 'GET' }); + + if (response.status !== 503) { + return; + } + + const elapsed = Date.now() - startTime; + const remaining = this.retryTimeoutMs - elapsed; + + if (remaining > MIN_TIME_FOR_RETRY_MS) { + const delay = Math.min(3000 * 2 ** attempt, 30000); + + this.logger.info('Container not ready, retrying', { + status: response.status, + attempt: attempt + 1, + delayMs: delay, + remainingSec: Math.floor(remaining / 1000), + mode: this.getMode() + }); + + await this.sleep(delay); + attempt++; + continue; + } + + throw new Error( + `Container failed to become ready after ${attempt + 1} attempts (${Math.floor(elapsed / 1000)}s)` + ); + } + } + /** * Transport-specific fetch implementation (no retry) * Subclasses implement the actual HTTP or WebSocket fetch. diff --git a/packages/sandbox/src/clients/transport/types.ts b/packages/sandbox/src/clients/transport/types.ts index 8a851497c..b6d1d7fec 100644 --- a/packages/sandbox/src/clients/transport/types.ts +++ b/packages/sandbox/src/clients/transport/types.ts @@ -93,4 +93,12 @@ export interface ITransport { * Update the 503 retry budget without recreating the transport */ setRetryTimeoutMs(ms: number): void; + + /** + * Poll /api/ping until the container is ready (non-503 response) or the + * retry budget is exhausted. Used before sending non-replayable request + * bodies (e.g. ReadableStream) so the body is only consumed once the + * container is confirmed ready. + */ + waitForContainer(): Promise; } diff --git a/packages/sandbox/src/clients/transport/ws-transport.ts b/packages/sandbox/src/clients/transport/ws-transport.ts index 6bec4f404..0c25a3cf2 100644 --- a/packages/sandbox/src/clients/transport/ws-transport.ts +++ b/packages/sandbox/src/clients/transport/ws-transport.ts @@ -124,7 +124,7 @@ export class WebSocketTransport extends BaseTransport { await this.connect(); const method = (options?.method || 'GET') as WSMethod; - const body = this.parseBody(options?.body); + const body = await this.parseBody(options?.body); const result = await this.request(method, path, body); @@ -148,7 +148,7 @@ export class WebSocketTransport extends BaseTransport { /** * Parse request body from RequestInit */ - private parseBody(body: RequestInit['body']): unknown { + private async parseBody(body: RequestInit['body']): Promise { if (!body) { return undefined; } @@ -163,6 +163,33 @@ export class WebSocketTransport extends BaseTransport { } } + // TODO: Investigate removing the WebSocket transport entirely. + // Sending stream bodies over WebSocket requires buffering the full payload + // in memory (defeating the point of streaming) and adds protocol complexity. + // HTTP transport handles ReadableStream bodies natively without buffering. + if (body instanceof ReadableStream) { + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + chunks.push(value); + } + const totalLength = chunks.reduce((sum, c) => sum + c.length, 0); + const merged = new Uint8Array(totalLength); + let offset = 0; + for (const chunk of chunks) { + merged.set(chunk, offset); + offset += chunk.length; + } + let binary = ''; + for (let i = 0; i < merged.length; i++) { + binary += String.fromCharCode(merged[i]); + } + const base64 = btoa(binary); + return { __streamBase64: base64 }; + } + throw new Error( `WebSocket transport only supports string bodies. Got: ${typeof body}` ); diff --git a/packages/sandbox/src/sandbox.ts b/packages/sandbox/src/sandbox.ts index 5ecad8261..174f32cba 100644 --- a/packages/sandbox/src/sandbox.ts +++ b/packages/sandbox/src/sandbox.ts @@ -43,6 +43,7 @@ import { } from '@repo/shared'; import { AwsClient } from 'aws4fetch'; import { type Desktop, type ExecuteResponse, SandboxClient } from './clients'; +import { decodeBase64 } from './clients/file-client'; import type { ErrorResponse } from './errors'; import { BackupCreateError, @@ -335,6 +336,14 @@ export function getSandbox>( terminal: (request: Request, opts?: PtyOptions) => proxyTerminal(stub, defaultSessionId, request, opts), wsConnect: connect(stub), + writeFile: ( + path: string, + content: string | ReadableStream, + options?: { encoding?: string; sessionId?: string } + ) => + stub.writeFile(path, contentToByteStream(content, options?.encoding), { + sessionId: options?.sessionId ?? defaultSessionId + }), // Client-side proxy for desktop operations. Each method call is dispatched // to the DO's callDesktop() method, avoiding RPC pipelining through getters. desktop: new Proxy({} as Desktop, { @@ -366,7 +375,17 @@ function enhanceSession( return { ...rpcSession, terminal: (request: Request, opts?: PtyOptions) => - proxyTerminal(stub, rpcSession.id, request, opts) + proxyTerminal(stub, rpcSession.id, request, opts), + writeFile: ( + path: string, + content: string | ReadableStream, + options?: { encoding?: string } + ) => + rpcSession.writeFile( + path, + contentToByteStream(content, options?.encoding), + options + ) }; } @@ -384,6 +403,61 @@ export function connect(stub: { }; } +/** + * Ensure a ReadableStream is byte-oriented (type: "bytes") for RPC transfer. + * Cloudflare's RPC streams byte-oriented ReadableStreams without buffering, + * bypassing the 32 MiB serialization limit. Regular streams are fully buffered + * before transfer which causes large uploads to fail. + */ +function toByteStream( + stream: ReadableStream +): ReadableStream { + const reader = stream.getReader(); + return new ReadableStream({ + type: 'bytes', + async pull(controller) { + const { done, value } = await reader.read(); + if (done) { + controller.close(); + return; + } + controller.enqueue(value); + }, + cancel(reason) { + reader.cancel(reason); + } + }); +} + +/** + * Convert writeFile content to a byte-oriented ReadableStream. + * Strings are encoded to UTF-8 (or decoded from base64 first), and + * ReadableStreams are wrapped to ensure they are byte-oriented for RPC. + */ +function contentToByteStream( + content: string | ReadableStream, + encoding?: string +): ReadableStream { + if (content instanceof ReadableStream) { + return toByteStream(content); + } + + let bytes: Uint8Array; + if (encoding === 'base64') { + bytes = decodeBase64(content); + } else { + bytes = new TextEncoder().encode(content); + } + + return new ReadableStream({ + type: 'bytes', + start(controller) { + controller.enqueue(bytes); + controller.close(); + } + }); +} + /** * Type guard for R2Bucket binding. * Checks for the minimal R2Bucket interface methods we use. @@ -2688,9 +2762,24 @@ export class Sandbox extends Container implements ISandbox { }); } + /** + * Write a file to the sandbox. + * + * Accepts string content (UTF-8 or base64) or a ReadableStream for binary + * data of any size. ReadableStream inputs are converted to a byte-oriented + * stream for efficient transfer across the Worker-to-Durable Object boundary + * without buffering. The original stream is consumed and cannot be reused. + * + * @param path - Destination file path in the sandbox + * @param content - File content as a UTF-8 string or a ReadableStream of bytes + * @param options.encoding - (Deprecated) String encoding, only applies when content + * is a string. Use 'base64' to decode a base64-encoded string before writing. + * Prefer passing a ReadableStream for binary data instead. + * @param options.sessionId - Session ID for the operation + */ async writeFile( path: string, - content: string, + content: string | ReadableStream, options: { encoding?: string; sessionId?: string } = {} ) { const session = options.sessionId ?? (await this.ensureDefaultSession()); diff --git a/packages/sandbox/tests/file-client.test.ts b/packages/sandbox/tests/file-client.test.ts index ff266cab4..d5bc232ff 100644 --- a/packages/sandbox/tests/file-client.test.ts +++ b/packages/sandbox/tests/file-client.test.ts @@ -417,6 +417,126 @@ database: }); }); + describe('writeFile with ReadableStream', () => { + it('should write ReadableStream successfully', async () => { + const mockResponse: WriteFileResult = { + success: true, + exitCode: 0, + path: '/app/data.bin', + timestamp: '2023-01-01T00:00:00Z' + }; + + mockFetch.mockResolvedValue( + new Response(JSON.stringify(mockResponse), { status: 200 }) + ); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1, 2, 3])); + controller.close(); + } + }); + + const result = await client.writeFile( + '/app/data.bin', + stream, + 'session-upload' + ); + + expect(result.success).toBe(true); + expect(result.path).toBe('/app/data.bin'); + expect(result.timestamp).toBe('2023-01-01T00:00:00Z'); + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining('/api/write'), + expect.objectContaining({ + method: 'POST' + }) + ); + const calledUrl = mockFetch.mock.calls[1][0] as string; + expect(calledUrl).toContain('path=%2Fapp%2Fdata.bin'); + expect(calledUrl).toContain('sessionId=session-upload'); + }); + + it('should handle error responses for stream content', async () => { + const errorResponse = { + error: 'Failed to write file', + code: 'FILESYSTEM_ERROR', + path: '/app/data.bin' + }; + + mockFetch.mockResolvedValue( + new Response(JSON.stringify(errorResponse), { status: 500 }) + ); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1, 2, 3])); + controller.close(); + } + }); + + await expect( + client.writeFile('/app/data.bin', stream, 'session-upload') + ).rejects.toThrow(FileSystemError); + }); + + it('should handle network errors for stream content', async () => { + mockFetch.mockRejectedValue(new Error('Network timeout')); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1, 2, 3])); + controller.close(); + } + }); + + await expect( + client.writeFile('/app/data.bin', stream, 'session-upload') + ).rejects.toThrow('Network timeout'); + }); + + it('should wait for container before streaming', async () => { + const mockResponse: WriteFileResult = { + success: true, + exitCode: 0, + path: '/app/data.bin', + timestamp: '2023-01-01T00:00:00Z' + }; + + // First call is the ping (503 - container starting), second is a ping + // retry (200 - container ready), third is the write (200 - success). + // Use the client's retry budget so the 503 is retried rather than thrown. + mockFetch + .mockResolvedValueOnce(new Response('starting', { status: 503 })) + .mockResolvedValueOnce(new Response('ok', { status: 200 })) + .mockResolvedValueOnce( + new Response(JSON.stringify(mockResponse), { status: 200 }) + ); + + // Make sleep a no-op so the test doesn't actually wait + vi.spyOn((client as any).transport, 'sleep').mockResolvedValue(undefined); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1, 2, 3])); + controller.close(); + } + }); + + const result = await client.writeFile( + '/app/data.bin', + stream, + 'session-upload' + ); + + expect(result.success).toBe(true); + expect(mockFetch).toHaveBeenCalledTimes(3); + expect(mockFetch.mock.calls[0][0]).toContain('/api/ping'); + expect(mockFetch.mock.calls[1][0]).toContain('/api/ping'); + expect(mockFetch.mock.calls[2][0]).toContain('/api/write'); + }); + }); + describe('deleteFile', () => { it('should delete files successfully', async () => { const mockResponse: DeleteFileResult = { @@ -780,7 +900,7 @@ database: await expect( client.writeFile('/app/file.txt', 'content', 'session-err') - ).rejects.toThrow(SandboxError); + ).rejects.toThrow(); }); it('should handle server errors with proper mapping', async () => { diff --git a/packages/sandbox/tests/transport.test.ts b/packages/sandbox/tests/transport.test.ts index b21c6e045..251040fa6 100644 --- a/packages/sandbox/tests/transport.test.ts +++ b/packages/sandbox/tests/transport.test.ts @@ -138,6 +138,103 @@ describe('Transport', () => { ); expect(mockFetch).not.toHaveBeenCalled(); }); + + it('should return a synthetic 503 when body stream is already consumed on retry', async () => { + const transport = createTransport({ + mode: 'http', + baseUrl: 'http://localhost:3000' + }); + + // First call returns 503 (container starting), second call throws TypeError + // simulating the Workers runtime rejecting a re-read of a consumed stream + mockFetch + .mockResolvedValueOnce(new Response(null, { status: 503 })) + .mockRejectedValueOnce( + new TypeError('This ReadableStream is currently locked to a reader') + ); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([1, 2, 3])); + controller.close(); + } + }); + + // Use fake timers so the retry sleep doesn't actually wait + vi.useFakeTimers(); + try { + const fetchPromise = transport.fetch('/api/write?path=test', { + method: 'POST', + body: stream + }); + // Advance timers past the retry delay (first backoff is 3000ms) + await vi.runAllTimersAsync(); + const response = await fetchPromise; + + expect(response.status).toBe(503); + expect(response.statusText).toBe('Stream body already consumed'); + expect(mockFetch).toHaveBeenCalledTimes(2); + } finally { + vi.useRealTimers(); + } + }); + + describe('waitForContainer', () => { + it('should return immediately when /api/ping responds 200', async () => { + const transport = createTransport({ + mode: 'http', + baseUrl: 'http://localhost:3000' + }); + + mockFetch.mockResolvedValue(new Response('ok', { status: 200 })); + + await expect(transport.waitForContainer()).resolves.toBeUndefined(); + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(mockFetch).toHaveBeenCalledWith( + 'http://localhost:3000/api/ping', + expect.objectContaining({ method: 'GET' }) + ); + }); + + it('should retry when ping returns 503 twice then succeeds', async () => { + const transport = createTransport({ + mode: 'http', + baseUrl: 'http://localhost:3000' + }); + + mockFetch + .mockResolvedValueOnce(new Response('starting', { status: 503 })) + .mockResolvedValueOnce(new Response('starting', { status: 503 })) + .mockResolvedValueOnce(new Response('ok', { status: 200 })); + + // Override sleep to avoid real delays in tests + const sleepSpy = vi + .spyOn( + transport as unknown as { sleep: (ms: number) => Promise }, + 'sleep' + ) + .mockResolvedValue(undefined); + + await expect(transport.waitForContainer()).resolves.toBeUndefined(); + expect(mockFetch).toHaveBeenCalledTimes(3); + expect(sleepSpy).toHaveBeenCalledTimes(2); + }); + + it('should throw when all ping attempts return 503 and budget is exhausted', async () => { + const transport = createTransport({ + mode: 'http', + baseUrl: 'http://localhost:3000' + }); + + transport.setRetryTimeoutMs(0); + mockFetch.mockResolvedValue(new Response('starting', { status: 503 })); + + await expect(transport.waitForContainer()).rejects.toThrow( + 'Container failed to become ready' + ); + expect(mockFetch).toHaveBeenCalledTimes(1); + }); + }); }); describe('WebSocket mode', () => { diff --git a/packages/sandbox/tests/ws-transport.test.ts b/packages/sandbox/tests/ws-transport.test.ts index 9d2202e07..d9efe81b3 100644 --- a/packages/sandbox/tests/ws-transport.test.ts +++ b/packages/sandbox/tests/ws-transport.test.ts @@ -11,7 +11,7 @@ import { isWSResponse, isWSStreamChunk } from '@repo/shared'; -import { describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; import { WebSocketTransport } from '../src/clients/transport'; /** @@ -187,6 +187,74 @@ describe('WebSocket Protocol Types', () => { }); }); + describe('parseBody', () => { + let transport: WebSocketTransport; + + beforeEach(() => { + transport = new WebSocketTransport({ wsUrl: 'ws://localhost:3000/ws' }); + }); + + it('should return undefined for falsy body', async () => { + const result = await (transport as any).parseBody(undefined); + expect(result).toBeUndefined(); + }); + + it('should parse valid JSON string', async () => { + const result = await (transport as any).parseBody( + JSON.stringify({ foo: 'bar' }) + ); + expect(result).toEqual({ foo: 'bar' }); + }); + + it('should throw for invalid JSON string', async () => { + await expect((transport as any).parseBody('not json')).rejects.toThrow(); + }); + + it('should drain ReadableStream and return base64 object', async () => { + const bytes = new Uint8Array([1, 2, 3, 4, 5]); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(bytes); + controller.close(); + } + }); + const result = (await (transport as any).parseBody(stream)) as { + __streamBase64: string; + }; + expect(result).toHaveProperty('__streamBase64'); + const decoded = Uint8Array.from(atob(result.__streamBase64), (c) => + c.charCodeAt(0) + ); + expect(decoded).toEqual(bytes); + }); + + it('should drain multi-chunk ReadableStream', async () => { + const chunk1 = new Uint8Array([1, 2, 3]); + const chunk2 = new Uint8Array([4, 5, 6]); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(chunk1); + controller.enqueue(chunk2); + controller.close(); + } + }); + const result = (await (transport as any).parseBody(stream)) as { + __streamBase64: string; + }; + expect(result).toHaveProperty('__streamBase64'); + const decoded = Uint8Array.from(atob(result.__streamBase64), (c) => + c.charCodeAt(0) + ); + expect(decoded).toEqual(new Uint8Array([1, 2, 3, 4, 5, 6])); + }); + + it('should throw for unsupported body types', async () => { + await expect((transport as any).parseBody(12345)).rejects.toThrow( + 'WebSocket transport only supports string bodies' + ); + }); + }); + describe('stream request first-message handling', () => { it('should reject before returning stream when first message is an error response', async () => { vi.useFakeTimers(); diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index cf4442889..2cc8357cd 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -83,8 +83,7 @@ export type { RestoreBackupResponse, SessionCreateRequest, SessionDeleteRequest, - StartProcessRequest, - WriteFileRequest + StartProcessRequest } from './request-types.js'; // Export shell utilities export { shellEscape } from './shell-escape.js'; diff --git a/packages/shared/src/request-types.ts b/packages/shared/src/request-types.ts index d14eba9ec..af0ae44db 100644 --- a/packages/shared/src/request-types.ts +++ b/packages/shared/src/request-types.ts @@ -39,16 +39,6 @@ export interface ReadFileRequest { sessionId?: string; } -/** - * Request to write a file - */ -export interface WriteFileRequest { - path: string; - content: string; - encoding?: string; - sessionId?: string; -} - /** * Request to delete a file */ diff --git a/packages/shared/src/types.ts b/packages/shared/src/types.ts index 622d4fea1..f6674e418 100644 --- a/packages/shared/src/types.ts +++ b/packages/shared/src/types.ts @@ -956,10 +956,24 @@ export interface ExecutionSession { ): Promise>; // File operations + /** + * Write a file to the sandbox. + * + * @param content - File content as a UTF-8 string or a ReadableStream of bytes. + * When a ReadableStream is provided, it is consumed and converted to a + * byte-oriented stream internally. The original stream cannot be reused. + * @param options.encoding - (Deprecated) String encoding for string content. + */ writeFile( path: string, - content: string, - options?: { encoding?: string } + content: string | ReadableStream, + options?: { + /** + * @deprecated Prefer passing a ReadableStream for binary data instead of + * base64-encoded strings. Decode the base64 string before calling writeFile. + */ + encoding?: string; + } ): Promise; readFile( path: string, @@ -1213,10 +1227,28 @@ export interface ISandbox { ): Promise<{ stdout: string; stderr: string; processId: string }>; // File operations + /** + * Write a file to the sandbox. + * + * @param path - Destination file path in the sandbox + * @param content - File content as a UTF-8 string or a ReadableStream of bytes. + * When a ReadableStream is provided, it is consumed and converted to a + * byte-oriented stream internally for transfer. The original stream will be + * fully read and cannot be reused after this call. + * @param options.encoding - (Deprecated) String encoding, only applies when + * content is a string. Use 'base64' to decode a base64-encoded string to + * bytes before writing. Prefer passing a ReadableStream for binary data. + */ writeFile( path: string, - content: string, - options?: { encoding?: string } + content: string | ReadableStream, + options?: { + /** + * @deprecated Prefer passing a ReadableStream for binary data instead of + * base64-encoded strings. Decode the base64 string before calling writeFile. + */ + encoding?: string; + } ): Promise; readFile( path: string, diff --git a/tests/e2e/test-worker/Dockerfile b/tests/e2e/test-worker/Dockerfile index 5b964d6a5..6ac06db9d 100644 --- a/tests/e2e/test-worker/Dockerfile +++ b/tests/e2e/test-worker/Dockerfile @@ -1,6 +1,6 @@ # Base image Dockerfile (no Python) # Used for testing Python-not-available error handling -FROM docker.io/cloudflare/sandbox-test:0.7.18 +FROM docker.io/cloudflare/sandbox-test:0.7.19 # Install snapshot tools: squashfs-tools (create), squashfuse (mount), fuse-overlayfs (COW) RUN apt-get update && apt-get install -y squashfs-tools squashfuse fuse-overlayfs && rm -rf /var/lib/apt/lists/* diff --git a/tests/e2e/test-worker/Dockerfile.desktop b/tests/e2e/test-worker/Dockerfile.desktop index 6394f6921..84663b496 100644 --- a/tests/e2e/test-worker/Dockerfile.desktop +++ b/tests/e2e/test-worker/Dockerfile.desktop @@ -1,6 +1,6 @@ # E2E test Dockerfile for Desktop environment # Uses the -desktop variant for testing desktop/computer-use features -FROM docker.io/cloudflare/sandbox-test:0.7.18-desktop +FROM docker.io/cloudflare/sandbox-test:0.7.19-desktop # Install snapshot tools: squashfs-tools (create), squashfuse (mount), fuse-overlayfs (COW) RUN apt-get update && apt-get install -y squashfs-tools squashfuse fuse-overlayfs && rm -rf /var/lib/apt/lists/* diff --git a/tests/e2e/test-worker/Dockerfile.musl b/tests/e2e/test-worker/Dockerfile.musl index 8daf734e0..8c8e7ea65 100644 --- a/tests/e2e/test-worker/Dockerfile.musl +++ b/tests/e2e/test-worker/Dockerfile.musl @@ -1,4 +1,4 @@ -FROM docker.io/cloudflare/sandbox-test:0.7.18-musl +FROM docker.io/cloudflare/sandbox-test:0.7.19-musl # Install snapshot tools (Alpine uses apk) # squashfs-tools: create archives, squashfuse: mount, fuse-overlayfs: COW layer diff --git a/tests/e2e/test-worker/Dockerfile.opencode b/tests/e2e/test-worker/Dockerfile.opencode index ce611f519..abd5bb907 100644 --- a/tests/e2e/test-worker/Dockerfile.opencode +++ b/tests/e2e/test-worker/Dockerfile.opencode @@ -1,6 +1,6 @@ # E2E test Dockerfile for OpenCode # Uses the -opencode variant for testing the OpenCode integration -FROM docker.io/cloudflare/sandbox-test:0.7.18-opencode +FROM docker.io/cloudflare/sandbox-test:0.7.19-opencode # Install snapshot tools: squashfs-tools (create), squashfuse (mount), fuse-overlayfs (COW) RUN apt-get update && apt-get install -y squashfs-tools squashfuse fuse-overlayfs && rm -rf /var/lib/apt/lists/* diff --git a/tests/e2e/test-worker/Dockerfile.python b/tests/e2e/test-worker/Dockerfile.python index a092affc9..4214d6db1 100644 --- a/tests/e2e/test-worker/Dockerfile.python +++ b/tests/e2e/test-worker/Dockerfile.python @@ -1,6 +1,6 @@ # Integration test Dockerfile # Uses the -python variant because E2E tests include Python code execution tests -FROM docker.io/cloudflare/sandbox-test:0.7.18-python +FROM docker.io/cloudflare/sandbox-test:0.7.19-python # Install snapshot tools: squashfs-tools (create), squashfuse (mount), fuse-overlayfs (COW) RUN apt-get update && apt-get install -y squashfs-tools squashfuse fuse-overlayfs && rm -rf /var/lib/apt/lists/* diff --git a/tests/e2e/test-worker/Dockerfile.standalone b/tests/e2e/test-worker/Dockerfile.standalone index 998d90758..ae18cd640 100644 --- a/tests/e2e/test-worker/Dockerfile.standalone +++ b/tests/e2e/test-worker/Dockerfile.standalone @@ -1,7 +1,7 @@ # Test the standalone binary pattern with an arbitrary base image # This validates that users can add sandbox capabilities to any Docker image -ARG BASE_IMAGE=cloudflare/sandbox-test:0.7.18 +ARG BASE_IMAGE=cloudflare/sandbox-test:0.7.19 FROM ${BASE_IMAGE} AS sandbox-source FROM node:20-slim