Resilient file uploads using background sync

Large file uploads can be slow and unreliable, resulting in a poor user experience and frequent failures. In this post, we explore multiple strategies to accelerate and safeguard file uploads in web applications. We combine chunked uploads, parallel processing with Web Workers, the tus protocol for resumability, and background synchronization to ensure uploads complete even under challenging network conditions.
Important: The Background Sync API is currently supported only in Chromium-based browsers (Chrome, Edge, Opera). Firefox, Safari, and many mobile browsers do not support background sync as of 2024. Additionally, Background Sync requires HTTPS and a valid SSL certificate in production (localhost is considered secure during development).
Understanding chunked uploads
Chunked uploads break large files into smaller pieces that can be uploaded in parallel. This approach offers several advantages:
- Better handling of network interruptions
- Improved upload speeds through parallelization
- More accurate progress reporting
- Reduced memory usage
Implementing chunked uploads
The following code demonstrates how to implement basic file chunking in JavaScript:
class FileChunker {
constructor(file, chunkSize = 1024 * 1024) {
this.file = file
this.chunkSize = chunkSize
this.totalChunks = Math.ceil(file.size / chunkSize)
}
getChunk(index) {
const start = index * this.chunkSize
const end = Math.min(start + this.chunkSize, this.file.size)
return this.file.slice(start, end)
}
*chunks() {
for (let i = 0; i < this.totalChunks; i++) {
yield this.getChunk(i)
}
}
}
class ChunkedUploader {
constructor(endpoint) {
this.endpoint = endpoint
}
async uploadChunk(chunk, index, totalChunks) {
const formData = new FormData()
formData.append('chunk', chunk)
formData.append('index', index)
formData.append('total', totalChunks)
const response = await fetch(this.endpoint, {
method: 'POST',
body: formData,
})
if (!response.ok) {
throw new Error(`Upload failed for chunk ${index}`)
}
return response.json()
}
async upload(file, onProgress) {
const chunker = new FileChunker(file)
const uploadPromises = []
let completedChunks = 0
for (const [index, chunk] of Array.from(chunker.chunks()).entries()) {
const promise = this.uploadChunk(chunk, index, chunker.totalChunks).then(() => {
completedChunks++
onProgress(completedChunks / chunker.totalChunks)
})
uploadPromises.push(promise)
}
return Promise.all(uploadPromises)
}
}
Parallel uploads with web workers
Web Workers enable true parallel processing in the browser. The example below illustrates how you can leverage Web Workers for concurrent chunk uploads:
// upload-worker.js
self.addEventListener('message', async (e) => {
const { chunk, index, endpoint } = e.data
try {
const formData = new FormData()
formData.append('chunk', chunk)
formData.append('index', index)
const response = await fetch(endpoint, {
method: 'POST',
body: formData,
})
if (!response.ok) {
throw new Error(`Worker upload failed for chunk ${index}`)
}
self.postMessage({ success: true, index })
} catch (error) {
self.postMessage({ success: false, error: error.message, index })
}
})
// main.js
class ParallelUploader {
constructor(endpoint, maxWorkers = navigator.hardwareConcurrency || 4) {
this.endpoint = endpoint
this.maxWorkers = maxWorkers
this.workers = []
}
createWorker() {
const worker = new Worker('upload-worker.js')
this.workers.push(worker)
return worker
}
async upload(file, onProgress) {
const chunker = new FileChunker(file)
const chunks = Array.from(chunker.chunks())
let completedChunks = 0
const processChunk = async (chunk, index) => {
const worker = this.createWorker()
return new Promise((resolve, reject) => {
worker.onmessage = (e) => {
if (e.data.success) {
completedChunks++
onProgress(completedChunks / chunks.length)
resolve()
} else {
reject(new Error(e.data.error))
}
worker.terminate()
}
worker.postMessage({ chunk, index, endpoint: this.endpoint })
})
}
const results = []
for (let i = 0; i < chunks.length; i += this.maxWorkers) {
const batch = chunks.slice(i, i + this.maxWorkers)
const batchPromises = batch.map((chunk, idx) => processChunk(chunk, i + idx))
results.push(...(await Promise.all(batchPromises)))
}
return results
}
}
Using the Tus protocol for resumable uploads
The tus protocol standardizes resumable uploads. The snippet below uses the tus-js-client to implement resumable file uploads:
import * as tus from 'tus-js-client'
class ResumableUploader {
constructor(endpoint) {
this.endpoint = endpoint
}
upload(file, onProgress) {
return new Promise((resolve, reject) => {
const upload = new tus.Upload(file, {
endpoint: this.endpoint,
retryDelays: [0, 3000, 5000],
metadata: {
filename: file.name,
filetype: file.type,
},
onError: reject,
onProgress: (bytesUploaded, bytesTotal) => {
onProgress(bytesUploaded / bytesTotal)
},
onSuccess: () => resolve(upload.url),
})
upload.findPreviousUploads().then((previousUploads) => {
if (previousUploads.length) {
upload.resumeFromPreviousUpload(previousUploads[0])
}
upload.start()
})
})
}
}
Integrating background sync
Background Sync enables you to queue file uploads when connectivity is intermittent and process them once the network is stable. This approach enhances the resilience of your uploads.
Begin by detecting support for Background Sync, Service Workers, and IndexedDB:
function isBackgroundSyncSupported() {
return 'serviceWorker' in navigator && 'SyncManager' in window && 'indexedDB' in window
}
if (isBackgroundSyncSupported()) {
// Register your service worker and prepare for background sync tasks.
} else {
console.warn('Background Sync is not supported. Falling back to direct uploads.')
// Implement an alternative upload strategy.
}
Next, store upload requests using IndexedDB. The code below demonstrates how to open an IndexedDB, store form data, and later retrieve it for processing:
const DB_NAME = 'UploadQueueDB'
const STORE_NAME = 'uploads'
async function openDB() {
return new Promise((resolve, reject) => {
const request = indexedDB.open(DB_NAME, 1)
request.onerror = () => reject(request.error)
request.onsuccess = () => resolve(request.result)
request.onupgradeneeded = (event) => {
const db = event.target.result
db.createObjectStore(STORE_NAME, { keyPath: 'id', autoIncrement: true })
}
})
}
async function storeFormData(formData) {
const db = await openDB()
const tx = db.transaction(STORE_NAME, 'readwrite')
const store = tx.objectStore(STORE_NAME)
await store.add({
data: Array.from(formData.entries()),
timestamp: Date.now(),
})
}
In your service worker, listen for sync events and process queued uploads with robust error handling:
self.addEventListener('sync', (event) => {
if (event.tag === 'file-upload-sync') {
event.waitUntil(
processQueuedUploads().catch((error) => {
console.error('Sync failed:', error)
throw error
}),
)
}
})
async function processQueuedUploads() {
const db = await openDB()
const tx = db.transaction(STORE_NAME, 'readwrite')
const store = tx.objectStore(STORE_NAME)
const uploads = await store.getAll()
for (const upload of uploads) {
try {
const formData = new FormData()
upload.data.forEach(([key, value]) => formData.append(key, value))
const response = await fetch('/upload', {
method: 'POST',
body: formData,
})
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`)
}
await store.delete(upload.id)
} catch (error) {
console.error(`Failed to process upload ${upload.id}:`, error)
throw error
}
}
}
Remember that Background Sync requires a secure context (HTTPS) in production. For local development, localhost is considered secure.
Best practices
- Choose appropriate chunk sizes (1–5 MB is often optimal).
- Implement robust retry logic for failed uploads.
- Leverage Web Workers for parallel uploads.
- Use feature detection to verify Background Sync support.
- Queue uploads with IndexedDB and clear successfully processed items.
- Provide clear progress feedback to users.
- Monitor memory usage when handling large files.
- Test thoroughly under various network conditions using developer tools.
- Use unique tags for different sync operations and implement fallback strategies when Background Sync is unsupported.
Conclusion
Combining strategies like chunked uploads, parallel processing, resumable uploads via the tus protocol, and Background Sync helps ensure that your files are uploaded reliably, even in adverse network conditions. For a production-ready solution that incorporates these best practices out of the box, consider using Transloadit's file uploading capabilities.