Upload files from your local filesystem to sandboxes. This is useful for transferring data files, scripts, or any local files you want to use in your sandbox.
Overview
File uploading is ideal for:
- Transferring local data files to sandboxes
- Uploading scripts and code files
- Moving configuration files
- Transferring datasets or models
Upload supports both text and binary files. Large files may take longer to upload - set appropriate timeouts for large transfers.
Basic Upload
Upload a file from local to sandbox:
from hopx_ai import Sandbox
sandbox = Sandbox.create(template="code-interpreter")
# Upload a local file
sandbox.files.upload('./local_data.csv', '/workspace/data.csv')
print("File uploaded successfully")
# Verify upload
if sandbox.files.exists('/workspace/data.csv'):
content = sandbox.files.read('/workspace/data.csv')
print(f"Uploaded content: {content[:100]}...")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// Note: JavaScript SDK upload may require FormData implementation
// For now, use write() with file content
import * as fs from 'fs';
const localContent = fs.readFileSync('./local_data.csv', 'utf-8');
await sandbox.files.write('/workspace/data.csv', localContent);
console.log('File uploaded successfully');
// Verify upload
const exists = await sandbox.files.exists('/workspace/data.csv');
if (exists) {
const content = await sandbox.files.read('/workspace/data.csv');
console.log(`Uploaded content: ${content.substring(0, 100)}...`);
}
await sandbox.kill();
Upload with Timeout
Set timeout for large file uploads:
from hopx_ai import Sandbox
sandbox = Sandbox.create(template="code-interpreter")
# Upload large file with extended timeout
sandbox.files.upload('./large_dataset.zip', '/workspace/dataset.zip', timeout=300)
print("Large file uploaded")
# Verify
files = sandbox.files.list('/workspace')
for f in files:
if f.name == 'dataset.zip':
print(f"Uploaded: {f.name} ({f.size_mb:.2f} MB)")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// Upload large file (use writeBytes for binary)
const largeContent = fs.readFileSync('./large_dataset.zip');
await sandbox.files.writeBytes('/workspace/dataset.zip', largeContent);
console.log('Large file uploaded');
// Verify
const files = await sandbox.files.list('/workspace');
const datasetFile = files.find(f => f.name === 'dataset.zip');
if (datasetFile) {
console.log(`Uploaded: ${datasetFile.name} (${(datasetFile.size / 1024 / 1024).toFixed(2)} MB)`);
}
await sandbox.kill();
Upload Multiple Files
Upload multiple files in a loop:
from hopx_ai import Sandbox
import os
sandbox = Sandbox.create(template="code-interpreter")
# List of files to upload
files_to_upload = [
('./config.json', '/workspace/config.json'),
('./data.csv', '/workspace/data.csv'),
('./script.py', '/workspace/script.py')
]
# Upload all files
for local_path, remote_path in files_to_upload:
if os.path.exists(local_path):
sandbox.files.upload(local_path, remote_path)
print(f"✅ Uploaded: {local_path} → {remote_path}")
else:
print(f"⚠️ File not found: {local_path}")
# Verify all uploads
files = sandbox.files.list('/workspace')
print(f"\nUploaded {len(files)} files")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
import * as path from 'path';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// List of files to upload
const filesToUpload = [
['./config.json', '/workspace/config.json'],
['./data.csv', '/workspace/data.csv'],
['./script.py', '/workspace/script.py']
];
// Upload all files
for (const [localPath, remotePath] of filesToUpload) {
try {
if (fs.existsSync(localPath)) {
const content = fs.readFileSync(localPath, 'utf-8');
await sandbox.files.write(remotePath, content);
console.log(`✅ Uploaded: ${localPath} → ${remotePath}`);
} else {
console.log(`⚠️ File not found: ${localPath}`);
}
} catch (error) {
console.error(`Error uploading ${localPath}: ${error.message}`);
}
}
// Verify all uploads
const files = await sandbox.files.list('/workspace');
console.log(`\nUploaded ${files.length} files`);
await sandbox.kill();
Upload Binary Files
Upload binary files (images, PDFs, etc.):
from hopx_ai import Sandbox
sandbox = Sandbox.create(template="code-interpreter")
# Upload image file
sandbox.files.upload('./image.png', '/workspace/image.png', timeout=60)
print("Image uploaded")
# Upload PDF
sandbox.files.upload('./document.pdf', '/workspace/document.pdf', timeout=60)
print("PDF uploaded")
# Verify binary files
files = sandbox.files.list('/workspace')
for f in files:
if f.name.endswith(('.png', '.pdf')):
print(f"Binary file: {f.name} ({f.size_kb:.2f} KB)")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// Upload image file
const imageData = fs.readFileSync('./image.png');
await sandbox.files.writeBytes('/workspace/image.png', imageData);
console.log('Image uploaded');
// Upload PDF
const pdfData = fs.readFileSync('./document.pdf');
await sandbox.files.writeBytes('/workspace/document.pdf', pdfData);
console.log('PDF uploaded');
// Verify binary files
const files = await sandbox.files.list('/workspace');
for (const f of files) {
if (f.name.endsWith('.png') || f.name.endsWith('.pdf')) {
console.log(`Binary file: ${f.name} (${(f.size / 1024).toFixed(2)} KB)`);
}
}
await sandbox.kill();
Upload to Specific Directory
Upload files to specific directories:
from hopx_ai import Sandbox
sandbox = Sandbox.create(template="code-interpreter")
# Create directory structure
sandbox.files.mkdir('/workspace/project')
sandbox.files.mkdir('/workspace/project/data')
sandbox.files.mkdir('/workspace/project/src')
# Upload to specific directories
sandbox.files.upload('./dataset.csv', '/workspace/project/data/dataset.csv')
sandbox.files.upload('./main.py', '/workspace/project/src/main.py')
sandbox.files.upload('./config.json', '/workspace/project/config.json')
# Verify structure
print("Project structure:")
for path in ['/workspace/project', '/workspace/project/data', '/workspace/project/src']:
files = sandbox.files.list(path)
print(f"\n{path}:")
for f in files:
print(f" {f.name}")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// Create directory structure
await sandbox.files.mkdir('/workspace/project');
await sandbox.files.mkdir('/workspace/project/data');
await sandbox.files.mkdir('/workspace/project/src');
// Upload to specific directories
const datasetContent = fs.readFileSync('./dataset.csv', 'utf-8');
await sandbox.files.write('/workspace/project/data/dataset.csv', datasetContent);
const mainContent = fs.readFileSync('./main.py', 'utf-8');
await sandbox.files.write('/workspace/project/src/main.py', mainContent);
const configContent = fs.readFileSync('./config.json', 'utf-8');
await sandbox.files.write('/workspace/project/config.json', configContent);
// Verify structure
console.log('Project structure:');
for (const dirPath of ['/workspace/project', '/workspace/project/data', '/workspace/project/src']) {
const files = await sandbox.files.list(dirPath);
console.log(`\n${dirPath}:`);
for (const f of files) {
console.log(` ${f.name}`);
}
}
await sandbox.kill();
Error Handling
Handle upload errors:
from hopx_ai import Sandbox
from hopx_ai.errors import FileOperationError
import os
sandbox = Sandbox.create(template="code-interpreter")
local_file = './data.csv'
try:
# Check if local file exists
if not os.path.exists(local_file):
print(f"Local file not found: {local_file}")
else:
# Upload with error handling
sandbox.files.upload(local_file, '/workspace/data.csv', timeout=120)
print("Upload successful")
except FileOperationError as e:
print(f"Upload failed: {e}")
except Exception as e:
print(f"Error: {e}")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
import { FileOperationError } from '@hopx-ai/sdk';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
const localFile = './data.csv';
try {
// Check if local file exists
if (!fs.existsSync(localFile)) {
console.error(`Local file not found: ${localFile}`);
} else {
// Upload with error handling
const content = fs.readFileSync(localFile, 'utf-8');
await sandbox.files.write('/workspace/data.csv', content);
console.log('Upload successful');
}
} catch (error) {
if (error instanceof FileOperationError) {
console.error(`Upload failed: ${error.message}`);
} else {
console.error(`Error: ${error.message}`);
}
}
await sandbox.kill();
Complete Example
Here’s a complete upload workflow:
from hopx_ai import Sandbox
import os
sandbox = Sandbox.create(template="code-interpreter")
# Create project structure
sandbox.files.mkdir('/workspace/ml-project')
sandbox.files.mkdir('/workspace/ml-project/data')
sandbox.files.mkdir('/workspace/ml-project/models')
# Upload project files
files_to_upload = {
'./train.py': '/workspace/ml-project/train.py',
'./data/train.csv': '/workspace/ml-project/data/train.csv',
'./data/test.csv': '/workspace/ml-project/data/test.csv',
'./config.yaml': '/workspace/ml-project/config.yaml'
}
print("📤 Uploading files...")
for local_path, remote_path in files_to_upload.items():
if os.path.exists(local_path):
try:
sandbox.files.upload(local_path, remote_path, timeout=120)
file_size = os.path.getsize(local_path) / 1024
print(f" ✅ {local_path} → {remote_path} ({file_size:.2f} KB)")
except Exception as e:
print(f" ❌ Failed to upload {local_path}: {e}")
else:
print(f" ⚠️ File not found: {local_path}")
# Verify uploads
print("\n📋 Verifying uploads...")
files = sandbox.files.list('/workspace/ml-project')
total_size = 0
for f in files:
if f.is_file:
total_size += f.size
print(f" 📄 {f.path} ({f.size_kb:.2f} KB)")
print(f"\n✅ Total uploaded: {total_size / 1024:.2f} KB")
sandbox.kill()
import { Sandbox } from '@hopx-ai/sdk';
import * as fs from 'fs';
import * as path from 'path';
const sandbox = await Sandbox.create({ template: 'code-interpreter' });
// Create project structure
await sandbox.files.mkdir('/workspace/ml-project');
await sandbox.files.mkdir('/workspace/ml-project/data');
await sandbox.files.mkdir('/workspace/ml-project/models');
// Upload project files
const filesToUpload = {
'./train.py': '/workspace/ml-project/train.py',
'./data/train.csv': '/workspace/ml-project/data/train.csv',
'./data/test.csv': '/workspace/ml-project/data/test.csv',
'./config.yaml': '/workspace/ml-project/config.yaml'
};
console.log('📤 Uploading files...');
let totalSize = 0;
for (const [localPath, remotePath] of Object.entries(filesToUpload)) {
try {
if (fs.existsSync(localPath)) {
const stats = fs.statSync(localPath);
const content = fs.readFileSync(localPath, 'utf-8');
await sandbox.files.write(remotePath, content);
const fileSize = stats.size / 1024;
console.log(` ✅ ${localPath} → ${remotePath} (${fileSize.toFixed(2)} KB)`);
totalSize += stats.size;
} else {
console.log(` ⚠️ File not found: ${localPath}`);
}
} catch (error) {
console.error(` ❌ Failed to upload ${localPath}: ${error.message}`);
}
}
// Verify uploads
console.log('\n📋 Verifying uploads...');
const files = await sandbox.files.list('/workspace/ml-project');
for (const f of files) {
if (f.isFile) {
console.log(` 📄 ${f.path} (${(f.size / 1024).toFixed(2)} KB)`);
}
}
console.log(`\n✅ Total uploaded: ${(totalSize / 1024).toFixed(2)} KB`);
await sandbox.kill();
Best Practices
1. Check Local File Existence
Always verify local files exist before attempting upload to avoid errors.
2. Set Appropriate Timeouts
For large files, set extended timeouts (60+ seconds) to avoid connection timeouts.
3. Create Directories First
Use files.mkdir() to create target directories before uploading files.
4. Handle Errors Gracefully
Wrap uploads in try/catch blocks to handle network errors, file not found, etc.
5. Verify Uploads
After uploading, verify files exist and check their sizes to ensure successful transfer.
Next Steps