Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
f92192a
Import tar correctly.
samwho Jul 29, 2025
30caf1d
Replace memory-intensive TAR library with streaming implementation
samwho Jul 30, 2025
81ff055
Add direct streaming for backup exports without intermediate temp files
samwho Jul 30, 2025
e7ebe02
Implement complete streaming export with encryption support
samwho Jul 30, 2025
f74887a
Working towards moving all app exports/imports to streamed variants.
samwho Jul 31, 2025
c1897fd
Merge master.
samwho Jul 31, 2025
d38c26c
Simplify some types.
samwho Jul 31, 2025
ddc19ec
checkpoint
samwho Aug 4, 2025
d75fd07
checkpoint
samwho Aug 4, 2025
bd7e08f
Merge branch 'master' of github.com:budibase/budibase into tar-cpu
samwho Aug 4, 2025
d42c34b
checkpoint
samwho Aug 4, 2025
aab3842
Merge master.
samwho Aug 4, 2025
820e4b7
Fix import path, remove some dead code.
samwho Aug 4, 2025
181355a
Fix application.spec.ts
samwho Aug 5, 2025
2d016b9
Merge branch 'master' of github.com:budibase/budibase into tar-cpu
samwho Aug 5, 2025
81e35e8
Nudge.
samwho Aug 5, 2025
1dea57d
Remove nudge file.
samwho Aug 5, 2025
5b1fce9
Fix plugin.spec.ts.
samwho Aug 5, 2025
7208a43
Fix public application.spec.ts.
samwho Aug 6, 2025
8c90262
Merge branch 'master' of github.com:budibase/budibase into tar-cpu
samwho Aug 6, 2025
f12702f
Update pro.
samwho Aug 6, 2025
d645fba
Update pro.
samwho Aug 6, 2025
41617b8
Merge branch 'master' of github.com:budibase/budibase into tar-cpu
samwho Aug 7, 2025
9759ec7
Revert changes to packages/cli
samwho Aug 7, 2025
a36bff4
Update pro.
samwho Aug 7, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion packages/backend-core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@
"rotating-file-stream": "3.1.0",
"sanitize-s3-objectkey": "0.0.1",
"semver": "^7.5.4",
"tar-fs": "2.1.2",
"tar-fs": "2.1.3",
"tar-stream": "3.1.7",
"uuid": "^8.3.2"
},
"devDependencies": {
Expand Down
5 changes: 3 additions & 2 deletions packages/backend-core/src/db/couch/DatabaseImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@ import {
import { getCouchInfo } from "./connections"
import { directCouchUrlCall } from "./utils"
import { getPouchDB } from "./pouchDB"
import { ReadStream, WriteStream } from "fs"
import { WriteStream } from "fs"
import { newid } from "../../docIds/newid"
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
import { DDInstrumentedDatabase } from "../instrumentation"
import { checkSlashesInUrl } from "../../helpers"
import { sqlLog } from "../../sql/utils"
import { Readable } from "stream"

const DATABASE_NOT_FOUND = "Database does not exist."

Expand Down Expand Up @@ -507,7 +508,7 @@ export class DatabaseImpl implements Database {
return pouch.dump(stream, opts)
}

async load(stream: ReadStream) {
async load(stream: Readable) {
const pouch = getPouchDB(this.name)
// @ts-ignore
return pouch.load(stream)
Expand Down
98 changes: 89 additions & 9 deletions packages/backend-core/src/security/encryption.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import fs from "fs"
import zlib from "zlib"
import env from "../environment"
import { join } from "path"
import { PassThrough, Readable } from "stream"

const ALGO = "aes-256-ctr"
const SEPARATOR = "-"
Expand Down Expand Up @@ -79,24 +80,103 @@ export async function encryptFile(
const inputFile = fs.createReadStream(filePath)
const outputFile = fs.createWriteStream(join(dir, outputFileName))

encryptStream(inputFile, secret).pipe(outputFile)

return new Promise<{ filename: string; dir: string }>((resolve, reject) => {
outputFile.on("finish", () => {
resolve({
filename: outputFileName,
dir,
})
})
const cleanupReject = (error: Error) => {
inputFile.close()
outputFile.close()
reject(error)
}
outputFile.on("error", cleanupReject)
inputFile.on("error", cleanupReject)
})
}

export function encryptStream(inputStream: Readable, secret: string): Readable {
const salt = crypto.randomBytes(SALT_LENGTH)
const iv = crypto.randomBytes(IV_LENGTH)
const stretched = stretchString(secret, salt)
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
const gzip = zlib.createGzip()

outputFile.write(salt)
outputFile.write(iv)
const outputStream = new PassThrough()
outputStream.write(salt)
outputStream.write(iv)

inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
// Set up error propagation
inputStream.on("error", err => {
gzip.destroy(err)
if (!outputStream.destroyed) {
outputStream.destroy(err)
}
})
gzip.on("error", err => {
cipher.destroy(err)
if (!outputStream.destroyed) {
outputStream.destroy(err)
}
})
cipher.on("error", err => {
if (!outputStream.destroyed) {
outputStream.destroy(err)
}
})

return new Promise<{ filename: string; dir: string }>(r => {
outputFile.on("finish", () => {
r({
filename: outputFileName,
dir,
inputStream.pipe(gzip).pipe(cipher).pipe(outputStream)

return outputStream
}

export async function decryptStream(
inputStream: Readable,
secret: string
): Promise<Readable> {
const outputStream = new PassThrough()

let decipher: crypto.Decipher | null = null
let gunzip: zlib.Gunzip | null = null
let firstChunk = true

inputStream.on("data", chunk => {
if (firstChunk) {
firstChunk = false
const salt = chunk.slice(0, SALT_LENGTH)
const iv = chunk.slice(SALT_LENGTH, SALT_LENGTH + IV_LENGTH)
chunk = chunk.slice(SALT_LENGTH + IV_LENGTH)

const stretched = stretchString(secret, salt)
decipher = crypto.createDecipheriv(ALGO, stretched, iv)
gunzip = zlib.createGunzip()

inputStream.on("error", err => {
decipher!.destroy(err)
})
})
decipher.on("error", err => {
gunzip!.destroy(err)
})
gunzip.on("error", err => {
outputStream.destroy(err)
})

decipher.pipe(gunzip).pipe(outputStream)
}

decipher!.write(chunk)
})

inputStream.on("end", () => {
decipher?.end()
gunzip?.end()
})

return outputStream
}

async function getSaltAndIV(path: string) {
Expand Down
2 changes: 1 addition & 1 deletion packages/pro
Submodule pro updated from 014389 to b4ad9b
3 changes: 2 additions & 1 deletion packages/server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@
"snowflake-sdk": "^1.15.0",
"socket.io": "4.8.1",
"svelte": "4.2.19",
"tar": "6.2.1",
"tar-stream": "3.1.7",
"tmp": "0.2.3",
"to-json-schema": "0.2.5",
"uuid": "^8.3.2",
Expand Down Expand Up @@ -155,6 +155,7 @@
"@types/supertest": "2.0.14",
"@types/swagger-jsdoc": "^6.0.4",
"@types/tar": "6.1.5",
"@types/tar-stream": "3.1.4",
"@types/tmp": "0.2.6",
"@types/uuid": "8.3.4",
"chance": "^1.1.12",
Expand Down
47 changes: 32 additions & 15 deletions packages/server/src/api/controllers/application.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import {
deleteAppFiles,
revertClientLibrary,
updateClientLibrary,
storeTempFileStream,
downloadTemplate,
} from "../../utilities/fileSystem"
import {
AppStatus,
Expand All @@ -27,6 +29,7 @@ import {
env as envCore,
events,
features,
HTTPError,
objectStore,
roles,
tenancy,
Expand Down Expand Up @@ -81,6 +84,8 @@ import * as appMigrations from "../../appMigrations"
import { createSampleDataTableScreen } from "../../constants/screens"
import { defaultAppNavigator } from "../../constants/definitions"
import { processMigrations } from "../../appMigrations/migrationsProcessor"
import { ImportOpts } from "../../sdk/app/backups/imports"
import { join } from "path"

// utility function, need to do away with this
async function getLayouts() {
Expand Down Expand Up @@ -158,11 +163,23 @@ async function createInstance(appId: string, template: AppTemplate) {
await createAllSearchIndex()

if (template && template.useTemplate) {
const opts = {
const opts: ImportOpts = {
importObjStoreContents: true,
updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates
password: template.file?.password,
}
await sdk.backups.importApp(appId, db, template, opts)

let path = template.file?.path
if (!path && template.key) {
const [type, name] = template.key.split("/")
const tmpPath = await downloadTemplate(type, name)
path = join(tmpPath, name, "db", "dump.txt")
}
if (!path) {
throw new HTTPError("App export must have path", 400)
}

await sdk.backups.importApp(appId, db, path, opts)
} else {
// create the users table
await db.put(USERS_TABLE_SCHEMA)
Expand Down Expand Up @@ -853,23 +870,25 @@ export async function importToApp(
ctx: UserCtx<ImportToUpdateAppRequest, ImportToUpdateAppResponse>
) {
const { appId } = ctx.params

const appExport = ctx.request.files?.appExport
const password = ctx.request.body.encryptionPassword
if (!appExport) {
ctx.throw(400, "Must supply app export to import")
}
if (Array.isArray(appExport)) {
ctx.throw(400, "Must only supply one app export")
}
const fileAttributes = { type: appExport.type!, path: appExport.path! }
try {
await sdk.applications.updateWithExport(appId, fileAttributes, password)
} catch (err: any) {
ctx.throw(
500,
`Unable to perform update, please retry - ${err?.message || err}`
)

if (!appExport.path) {
ctx.throw(400, "App export must have path")
}

await sdk.applications.updateWithExport(
appId,
appExport.path,
ctx.request.body.encryptionPassword
)

ctx.body = { message: "app updated" }
}

Expand All @@ -894,10 +913,8 @@ export async function duplicateApp(
const url = sdk.applications.getAppUrl({ name: appName, url: possibleUrl })
checkAppUrl(ctx, apps, url)

const tmpPath = await sdk.backups.exportApp(sourceAppId, {
excludeRows: false,
tar: false,
})
const stream = await sdk.backups.exportApp(sourceAppId)
const tmpPath = await storeTempFileStream(stream)

const createRequestBody: CreateAppRequest = {
name: appName,
Expand Down
5 changes: 3 additions & 2 deletions packages/server/src/api/controllers/backup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,12 @@ export async function exportAppDump(
const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
ctx.attachment(backupIdentifier)
ctx.body = await sdk.backups.streamExportApp({
appId,
const stream = await sdk.backups.exportApp(appId, {
excludeRows,
encryptPassword,
})
stream.pipe(ctx.res)
ctx.status = 200

await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB()
Expand Down
11 changes: 3 additions & 8 deletions packages/server/src/api/controllers/plugin/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,7 @@ export async function downloadUnzipTarball(
name: string,
headers = {}
) {
try {
const path = createTempFolder(name)
await objectStore.downloadTarballDirect(url, path, headers)

return path
} catch (e: any) {
throw new Error(e.message)
}
const path = createTempFolder(name)
await objectStore.downloadTarballDirect(url, path, headers)
return path
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ describe("check export/import", () => {
expect(res.headers["content-disposition"]).toMatch(
/attachment; filename=".*-export-.*\.tar.gz"/g
)
expect(res.body instanceof Buffer).toBe(true)
expect(res.body).toBeInstanceOf(Buffer)
expect(res.status).toBe(200)
})

Expand Down
12 changes: 3 additions & 9 deletions packages/server/src/sdk/app/applications/import.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,26 +138,20 @@ async function getImportableDocuments(db: Database) {

export async function updateWithExport(
appId: string,
file: FileAttributes,
path: string,
password?: string
) {
const devId = dbCore.getDevAppID(appId)
const tempAppName = `temp_${devId}`
const tempDb = dbCore.getDB(tempAppName)
const appDb = dbCore.getDB(devId)
try {
const template = {
file: {
type: file.type!,
path: file.path!,
password,
},
}
// get a temporary version of the import
// don't need obj store, the existing app already has everything we need
await backups.importApp(devId, tempDb, template, {
await backups.importApp(devId, tempDb, path, {
importObjStoreContents: false,
updateAttachmentColumns: true,
password,
})
const newMetadata = await getNewAppMetadata(tempDb, appDb)
// get the documents to copy
Expand Down
Loading