Database Import & Export
Export database data to JSON, CSV, or SQL formats, and import data back into your Sandbox databases.
Export to JSON
Export table data as JSON.
const result = await sandbox.database.exportTableToJSON('myapp', 'users');
console.log(result.data);const response = await fetch(
`${sandbox.url}/database/myapp/tables/users/export/json`,
{
headers: {
'Authorization': `Bearer ${sandbox.token}`
}
}
);
const result = await response.json();curl https://sandbox.oblien.com/database/myapp/tables/users/export/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN"Response:
{
"success": true,
"table": "users",
"data": [
{
"id": 1,
"name": "John Doe",
"email": "john@example.com",
"age": 30,
"created_at": "2025-10-27T12:00:00Z"
},
{
"id": 2,
"name": "Jane Smith",
"email": "jane@example.com",
"age": 25,
"created_at": "2025-10-27T12:05:00Z"
}
],
"rowCount": 2
}Export to CSV
Export table data as CSV.
const result = await sandbox.database.exportTableToCSV('myapp', 'users');
console.log(result.csv);const response = await fetch(
`${sandbox.url}/database/myapp/tables/users/export/csv`,
{
headers: {
'Authorization': `Bearer ${sandbox.token}`
}
}
);
const result = await response.json();
// Or if downloading directly:
const csvText = await response.text();curl https://sandbox.oblien.com/database/myapp/tables/users/export/csv \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN"Response:
{
"success": true,
"table": "users",
"csv": "id,name,email,age,created_at\n1,John Doe,john@example.com,30,2025-10-27T12:00:00Z\n2,Jane Smith,jane@example.com,25,2025-10-27T12:05:00Z",
"rowCount": 2
}CSV Content:
id,name,email,age,created_at
1,John Doe,john@example.com,30,2025-10-27T12:00:00Z
2,Jane Smith,jane@example.com,25,2025-10-27T12:05:00ZExport Database to SQL
Export entire database schema and data as SQL dump.
const result = await sandbox.database.exportDatabaseToSQL('myapp');
console.log(result.sql);
// Save to file
await sandbox.files.create({
fullPath: '/opt/backups/myapp-backup.sql',
content: result.sql
});const response = await fetch(
`${sandbox.url}/database/myapp/export/sql`,
{
headers: {
'Authorization': `Bearer ${sandbox.token}`
}
}
);
const result = await response.json();
// Or download directly:
const sqlText = await response.text();curl https://sandbox.oblien.com/database/myapp/export/sql \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-o myapp-backup.sqlResponse:
{
"success": true,
"database": "myapp",
"sql": "-- SQLite Database Export\n-- Database: myapp\n-- Timestamp: 2025-10-27T14:30:00Z\n\nBEGIN TRANSACTION;\n\nCREATE TABLE users (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL,\n email TEXT UNIQUE NOT NULL,\n age INTEGER,\n created_at DATETIME DEFAULT CURRENT_TIMESTAMP\n);\n\nINSERT INTO users VALUES(1,'John Doe','john@example.com',30,'2025-10-27 12:00:00');\nINSERT INTO users VALUES(2,'Jane Smith','jane@example.com',25,'2025-10-27 12:05:00');\n\nCOMMIT;",
"tables": 1,
"size": 1024
}Import from JSON
Import data from JSON into a table.
await sandbox.database.importFromJSON({
database: 'myapp',
table: 'users',
data: [
{
name: 'Charlie Wilson',
email: 'charlie@example.com',
age: 32
},
{
name: 'Diana Prince',
email: 'diana@example.com',
age: 29
}
],
options: {
truncate: false, // Clear table before import
skipErrors: true // Continue on errors
}
});Parameters:
database(string, required) - Database nametable(string, required) - Table namedata(array, required) - Array of objects to importoptions(object) - Import optionstruncate(boolean) - Clear table before importskipErrors(boolean) - Continue if individual inserts fail
await fetch(`${sandbox.url}/database/myapp/tables/users/import/json`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${sandbox.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
data: [
{
name: 'Charlie Wilson',
email: 'charlie@example.com',
age: 32
}
],
options: {
truncate: false,
skipErrors: true
}
})
});curl -X POST https://sandbox.oblien.com/database/myapp/tables/users/import/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"data": [
{
"name": "Charlie Wilson",
"email": "charlie@example.com",
"age": 32
}
],
"options": {
"skipErrors": true
}
}'Response:
{
"success": true,
"message": "Data imported successfully",
"imported": 2,
"skipped": 0,
"errors": []
}Complete Backup & Restore Example
// ========== BACKUP ==========
// Export all data to JSON
const usersBackup = await sandbox.database.exportTableToJSON('myapp', 'users');
const postsBackup = await sandbox.database.exportTableToJSON('myapp', 'posts');
// Save to files
await sandbox.files.create({
fullPath: '/opt/backups/users-backup.json',
content: JSON.stringify(usersBackup.data, null, 2)
});
await sandbox.files.create({
fullPath: '/opt/backups/posts-backup.json',
content: JSON.stringify(postsBackup.data, null, 2)
});
// Export complete SQL dump
const sqlDump = await sandbox.database.exportDatabaseToSQL('myapp');
await sandbox.files.create({
fullPath: '/opt/backups/myapp-full.sql',
content: sqlDump.sql
});
console.log('Backup completed!');
// ========== RESTORE ==========
// Read backup files
const usersData = await sandbox.files.get({
filePath: '/opt/backups/users-backup.json'
});
const users = JSON.parse(usersData.content);
// Import data
await sandbox.database.importFromJSON({
database: 'myapp',
table: 'users',
data: users,
options: {
truncate: true, // Clear existing data
skipErrors: false
}
});
console.log('Restore completed!');// Export table
const exportResponse = await fetch(
`${sandbox.url}/database/myapp/tables/users/export/json`,
{
headers: {
'Authorization': `Bearer ${sandbox.token}`
}
}
);
const backup = await exportResponse.json();
// Save backup
await fetch(`${sandbox.url}/files/create`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${sandbox.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
fullPath: '/opt/backups/users-backup.json',
content: JSON.stringify(backup.data)
})
});
// Import backup
await fetch(`${sandbox.url}/database/myapp/tables/users/import/json`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${sandbox.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
data: backup.data,
options: { truncate: true }
})
});# Export to JSON
curl https://sandbox.oblien.com/database/myapp/tables/users/export/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-o users-backup.json
# Export to SQL
curl https://sandbox.oblien.com/database/myapp/export/sql \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-o myapp-backup.sql
# Import from JSON
curl -X POST https://sandbox.oblien.com/database/myapp/tables/users/import/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-H "Content-Type: application/json" \
-d @users-backup.jsonMigration Example
// Export from source database
const oldData = await sandbox.database.exportTableToJSON('old_db', 'customers');
// Transform data if needed
const transformedData = oldData.data.map(customer => ({
name: customer.full_name, // Column rename
email: customer.email_address.toLowerCase(),
age: parseInt(customer.age),
status: 'migrated'
}));
// Import to new database
await sandbox.database.importFromJSON({
database: 'new_db',
table: 'users',
data: transformedData,
options: {
skipErrors: true // Skip invalid records
}
});
console.log(`Migrated ${transformedData.length} records`);// Export from old database
const response = await fetch(
`${sandbox.url}/database/old_db/tables/customers/export/json`,
{
headers: {
'Authorization': `Bearer ${sandbox.token}`
}
}
);
const oldData = await response.json();
// Transform and import
const transformedData = oldData.data.map(c => ({
name: c.full_name,
email: c.email_address.toLowerCase(),
status: 'migrated'
}));
await fetch(`${sandbox.url}/database/new_db/tables/users/import/json`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${sandbox.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
data: transformedData,
options: { skipErrors: true }
})
});# Export from old database
curl https://sandbox.oblien.com/database/old_db/tables/customers/export/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-o old-data.json
# Transform with jq (example)
cat old-data.json | jq '.data | map({
name: .full_name,
email: (.email_address | ascii_downcase),
status: "migrated"
})' > new-data.json
# Import to new database
curl -X POST https://sandbox.oblien.com/database/new_db/tables/users/import/json \
-H "Authorization: Bearer YOUR_SANDBOX_TOKEN" \
-H "Content-Type: application/json" \
-d @new-data.jsonScheduled Backups Example
async function createBackup() {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = `/opt/backups/${timestamp}`;
// Create backup directory
await sandbox.files.create({
fullPath: backupDir,
isFolder: true
});
// Get list of databases
const databases = await sandbox.database.listDatabases();
// Backup each database
for (const db of databases.databases) {
const sql = await sandbox.database.exportDatabaseToSQL(db.name);
await sandbox.files.create({
fullPath: `${backupDir}/${db.name}.sql`,
content: sql.sql
});
console.log(`Backed up ${db.name}`);
}
return backupDir;
}
// Run backup
const backupPath = await createBackup();
console.log(`Backup saved to: ${backupPath}`);async function createBackup() {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
// List databases
const dbResponse = await fetch(`${sandbox.url}/database/list`, {
headers: { 'Authorization': `Bearer ${sandbox.token}` }
});
const databases = await dbResponse.json();
// Export each database
for (const db of databases.databases) {
const sqlResponse = await fetch(
`${sandbox.url}/database/${db.name}/export/sql`,
{
headers: { 'Authorization': `Bearer ${sandbox.token}` }
}
);
const result = await sqlResponse.json();
// Save backup
await fetch(`${sandbox.url}/files/create`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${sandbox.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
fullPath: `/opt/backups/${timestamp}/${db.name}.sql`,
content: result.sql
})
});
}
}#!/bin/bash
# Create backup directory
TIMESTAMP=$(date +%Y-%m-%d_%H-%M-%S)
BACKUP_DIR="/opt/backups/$TIMESTAMP"
# Export databases
curl https://sandbox.oblien.com/database/myapp/export/sql \
-H "Authorization: Bearer $SANDBOX_TOKEN" \
-o "$BACKUP_DIR/myapp.sql"
echo "Backup saved to $BACKUP_DIR"Best Practices
-
Regular backups:
- Schedule automated backups
- Store backups outside the database directory
- Keep multiple backup versions
-
Test restores:
- Periodically test backup restoration
- Verify data integrity after restore
- Document restore procedures
-
Use appropriate formats:
- JSON for structured data and transformation
- CSV for spreadsheet compatibility
- SQL for complete database dumps
-
Handle large exports:
// For large tables, export in chunks const pages = 10; const limit = 1000; for (let page = 1; page <= pages; page++) { const data = await sandbox.database.getTableData({ database: 'myapp', table: 'large_table', page, limit }); // Process or save chunk await sandbox.files.create({ fullPath: `/opt/exports/large_table_${page}.json`, content: JSON.stringify(data.data) }); } -
Import validation:
// Validate before importing const validateData = (records) => { return records.every(record => record.email && record.email.includes('@') && record.age > 0 ); }; if (validateData(importData)) { await sandbox.database.importFromJSON({ database: 'myapp', table: 'users', data: importData }); } -
Compression for large backups:
// Export and compress const sql = await sandbox.database.exportDatabaseToSQL('myapp'); // Save and compress using terminal await sandbox.files.create({ fullPath: '/tmp/backup.sql', content: sql.sql }); await sandbox.terminal.execute({ command: 'gzip /tmp/backup.sql && mv /tmp/backup.sql.gz /opt/backups/', cwd: '/opt' });
Next Steps
- Database Operations - Basic database operations
- Database Tables - Schema management
- Database Queries - Advanced queries