Resumable file uploads in Angular with RxJS

Uploading large files can be challenging due to network interruptions, browser limitations, and user cancellations. Chunked and resumable uploads provide a robust solution by breaking files into smaller, manageable pieces and allowing uploads to resume after interruptions.
Why chunked and resumable uploads?
Chunked uploads split large files into smaller segments, reducing memory usage and improving reliability. Resumable uploads allow interrupted uploads to continue from the last successful chunk, enhancing the user experience.
Setting up your Angular project
First, ensure you have the latest Angular CLI installed:
# Install the latest Angular CLI globally
npm install -g @angular/cli
# Create a new Angular project
ng new chunked-upload-app
cd chunked-upload-app
RxJS is bundled with Angular by default, so no separate installation is needed.
For Angular applications using the traditional NgModule approach, ensure HttpClientModule
is
imported in your app.module.ts
:
import { NgModule } from '@angular/core'
import { BrowserModule } from '@angular/platform-browser'
import { HttpClientModule } from '@angular/common/http'
import { AppComponent } from './app.component'
@NgModule({
declarations: [AppComponent],
imports: [BrowserModule, HttpClientModule],
bootstrap: [AppComponent],
})
export class AppModule {}
For standalone applications (Angular 15+), use the provideHttpClient
function in your
app.config.ts
:
import { ApplicationConfig } from '@angular/core'
import { provideHttpClient } from '@angular/common/http'
export const appConfig: ApplicationConfig = {
providers: [provideHttpClient()],
}
Implementing file slicing
Use the HTML5 File API to slice files into chunks, with error handling:
sliceFile(file: File, chunkSize: number): Blob[] {
const chunks: Blob[] = [];
let offset = 0;
try {
while (offset < file.size) {
const chunk = file.slice(offset, offset + chunkSize);
chunks.push(chunk);
offset += chunkSize;
}
return chunks;
} catch (error) {
console.error('Error slicing file:', error);
throw new Error('Failed to slice file. Your browser may not support the File API.');
}
}
Creating an RxJS pipeline for chunk uploads
Here's an implementation for sequentially uploading chunks using RxJS, with error handling:
import { HttpClient, HttpHeaders, HttpEventType, HttpEvent } from '@angular/common/http';
import { concatMap, catchError, tap, filter, map } from 'rxjs/operators';
import { from, Observable, throwError } from 'rxjs';
uploadChunks(chunks: Blob[], uploadUrl: string, fileId: string): Observable<HttpEvent<any>> {
return from(chunks).pipe(
concatMap((chunk, index) => {
const headers = new HttpHeaders({
'Content-Range': `bytes ${index * chunk.size}-${(index + 1) * chunk.size - 1}/${chunks.length * chunk.size}`,
'X-File-Id': fileId
});
return this.http.post(uploadUrl, chunk, {
headers,
reportProgress: true,
observe: 'events'
}).pipe(
tap(event => {
if (event.type === HttpEventType.Response) {
console.log(`Chunk ${index} uploaded successfully`);
}
}),
catchError(error => {
console.error(`Error uploading chunk ${index}:`, error);
return throwError(() => new Error(`Failed to upload chunk ${index}: ${error.message}`));
})
);
})
);
}
Adding pause, resume, and cancellation
Implement a service for handling pause, resume, and cancellation with state tracking:
import { Injectable } from '@angular/core'
import { HttpClient, HttpEvent, HttpEventType, HttpHeaders } from '@angular/common/http'
import { Observable, Subject, from, throwError } from 'rxjs'
import { concatMap, takeUntil, catchError, tap, switchMap, startWith } from 'rxjs/operators'
export interface FileUploadProgress {
id: string
file: File
progress: number
complete: boolean
error: boolean
errorMessage?: string
}
@Injectable({
providedIn: 'root',
})
export class FileUploadService {
private cancel$ = new Subject<void>()
private pause$ = new Subject<void>()
private resume$ = new Subject<void>()
private uploadedChunks = new Map<string, number>() // fileId -> lastUploadedChunkIndex
constructor(private http: HttpClient) {}
uploadFile(
file: File,
uploadUrl: string,
chunkSize: number = 1024 * 1024,
): Observable<FileUploadProgress> {
const fileId = this.generateId()
const chunks = this.sliceFile(file, chunkSize)
this.uploadedChunks.set(fileId, 0)
return this.uploadChunksWithResume(chunks, uploadUrl, fileId).pipe(
filter(
(event) =>
event.type === HttpEventType.UploadProgress || event.type === HttpEventType.Response,
),
map((event) => {
let progress = 0
let complete = false
let error = false
if (event.type === HttpEventType.UploadProgress && event.total) {
progress = Math.round((100 * event.loaded) / event.total)
} else if (event.type === HttpEventType.Response) {
progress = 100
complete = true
}
return {
id: fileId,
file,
progress,
complete,
error,
} as FileUploadProgress
}),
catchError((err) => {
return throwError(
() =>
({
id: fileId,
file,
progress: 0,
complete: false,
error: true,
errorMessage: err.message,
}) as FileUploadProgress,
)
}),
)
}
private uploadChunksWithResume(
chunks: Blob[],
uploadUrl: string,
fileId: string,
): Observable<HttpEvent<any>> {
return this.resume$.pipe(
startWith(null), // Start immediately
switchMap(() => {
const startFrom = this.uploadedChunks.get(fileId) || 0
const remainingChunks = chunks.slice(startFrom)
return from(remainingChunks).pipe(
concatMap((chunk, index) => {
const actualIndex = startFrom + index
const headers = new HttpHeaders({
'Content-Range': `bytes ${actualIndex * chunk.size}-${(actualIndex + 1) * chunk.size - 1}/${chunks.length * chunk.size}`,
'X-File-Id': fileId,
'X-Chunk-Index': actualIndex.toString(),
})
return this.http
.post(uploadUrl, chunk, {
headers,
reportProgress: true,
observe: 'events',
})
.pipe(
tap((event) => {
if (event.type === HttpEventType.Response) {
this.uploadedChunks.set(fileId, actualIndex + 1)
}
}),
catchError((error) => {
console.error(`Error uploading chunk ${actualIndex}:`, error)
return throwError(
() => new Error(`Failed to upload chunk ${actualIndex}: ${error.message}`),
)
}),
)
}),
takeUntil(this.pause$),
takeUntil(this.cancel$),
)
}),
)
}
private sliceFile(file: File, chunkSize: number): Blob[] {
const chunks: Blob[] = []
let offset = 0
try {
while (offset < file.size) {
const chunk = file.slice(offset, offset + chunkSize)
chunks.push(chunk)
offset += chunkSize
}
return chunks
} catch (error) {
console.error('Error slicing file:', error)
throw new Error('Failed to slice file. Your browser may not support the File API.')
}
}
private generateId(): string {
return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)
}
pauseUpload(): void {
this.pause$.next()
}
resumeUpload(): void {
this.resume$.next()
}
cancelUpload(): void {
this.cancel$.next()
this.uploadedChunks.clear()
}
}
Tracking progress
Use type guards for tracking upload progress with Angular's HttpClient:
import { Component } from '@angular/core'
import { FileUploadService, FileUploadProgress } from './file-upload.service'
@Component({
selector: 'app-file-upload',
template: `
<div class="upload-container">
<input type="file" (change)="onFileSelected($event)" />
<div *ngIf="currentUpload">
<div class="progress-bar" [style.width.%]="currentUpload.progress"></div>
<div class="controls">
<button (click)="pauseUpload()" [disabled]="!currentUpload || currentUpload.complete">
Pause
</button>
<button (click)="resumeUpload()" [disabled]="!currentUpload || currentUpload.complete">
Resume
</button>
<button (click)="cancelUpload()" [disabled]="!currentUpload || currentUpload.complete">
Cancel
</button>
</div>
<div *ngIf="currentUpload.error" class="error">
</div>
</div>
</div>
`,
styles: [
`
.progress-bar {
height: 20px;
background-color: #4caf50;
transition: width 0.3s;
}
.controls {
margin-top: 10px;
}
.error {
color: red;
margin-top: 10px;
}
`,
],
})
export class FileUploadComponent {
currentUpload: FileUploadProgress | null = null
constructor(private uploadService: FileUploadService) {}
onFileSelected(event: Event): void {
const input = event.target as HTMLInputElement
if (input.files && input.files.length > 0) {
const file = input.files[0]
this.uploadFile(file)
}
}
uploadFile(file: File): void {
// Determine optimal chunk size based on file size
const chunkSize = this.determineOptimalChunkSize(file.size)
this.uploadService
.uploadFile(file, 'https://your-upload-endpoint.com/upload', chunkSize)
.subscribe({
next: (progress) => {
this.currentUpload = progress
},
error: (err) => {
console.error('Upload failed:', err)
},
})
}
pauseUpload(): void {
this.uploadService.pauseUpload()
}
resumeUpload(): void {
this.uploadService.resumeUpload()
}
cancelUpload(): void {
this.uploadService.cancelUpload()
this.currentUpload = null
}
/**
* Determines the optimal chunk size based on network conditions and file size.
*
* - Small chunks (< 1MB): Better for unreliable connections, but more HTTP overhead.
* - Medium chunks (1-5MB): Good balance for most web applications.
* - Large chunks (> 5MB): Better for reliable, high-bandwidth connections and larger files.
*/
determineOptimalChunkSize(fileSize: number): number {
// Base chunk size on file size
if (fileSize < 1024 * 1024 * 10) {
// < 10MB
return 512 * 1024 // 512KB chunks
} else if (fileSize < 1024 * 1024 * 100) {
// < 100MB
return 2 * 1024 * 1024 // 2MB chunks
} else {
return 5 * 1024 * 1024 // 5MB chunks
}
}
}
This DevTip demonstrates how to implement resumable, chunked file uploads in Angular using RxJS and HttpClient. For more advanced features and robust file uploading, consider using Uppy, an open-source file uploader that integrates seamlessly with Transloadit.