immich/server/src/services/asset-upload.service.ts

419 lines
16 KiB
TypeScript
Raw Normal View History

2025-10-06 19:47:27 -04:00
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
import { Response } from 'express';
import { DateTime } from 'luxon';
2025-09-24 13:56:46 -04:00
import { createHash } from 'node:crypto';
import { extname, join } from 'node:path';
import { Readable } from 'node:stream';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
2025-09-24 13:56:46 -04:00
import { StorageCore } from 'src/cores/storage.core';
import { OnJob } from 'src/decorators';
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/asset-upload';
2025-09-24 13:56:46 -04:00
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetMetadataKey,
AssetStatus,
AssetType,
AssetVisibility,
JobName,
JobStatus,
QueueName,
StorageFolder,
} from 'src/enum';
2025-09-24 13:56:46 -04:00
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
2025-09-24 13:56:46 -04:00
import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
import { withRetry } from 'src/utils/misc';
2025-09-24 13:56:46 -04:00
export const MAX_RUFH_INTEROP_VERSION = 8;
2025-09-29 04:31:47 -04:00
2025-09-24 13:56:46 -04:00
@Injectable()
export class AssetUploadService extends BaseService {
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { isComplete, assetData, uploadLength, contentLength, version } = dto;
2025-09-24 13:56:46 -04:00
2025-10-06 19:47:27 -04:00
const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) {
if (asset.status !== AssetStatus.Partial) {
2025-10-02 15:15:32 -04:00
return this.sendAlreadyCompletedProblem(res);
}
2025-10-06 19:47:27 -04:00
const location = `/api/upload/${asset.id}`;
2025-10-02 15:15:32 -04:00
if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version);
}
// this is a 5xx to indicate the client should do offset retrieval and resume
res.status(500).send('Incomplete asset already exists');
2025-09-24 13:56:46 -04:00
return;
}
2025-09-28 18:37:16 -04:00
if (isComplete && uploadLength !== contentLength) {
return this.sendInconsistentLengthProblem(res);
}
2025-10-06 19:47:27 -04:00
const location = `/api/upload/${asset.id}`;
if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version);
2025-09-29 04:31:47 -04:00
}
2025-09-28 18:37:16 -04:00
2025-09-24 13:56:46 -04:00
let checksumBuffer: Buffer | undefined;
2025-10-06 19:47:27 -04:00
const metadata = { id: asset.id, path: asset.path, size: contentLength, fileModifiedAt: assetData.fileModifiedAt };
const writeStream = this.pipe(req, res, metadata);
2025-09-24 13:56:46 -04:00
if (isComplete) {
const hash = createHash('sha1');
req.on('data', (data: Buffer) => hash.update(data));
2025-09-24 13:56:46 -04:00
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
}
writeStream.on('finish', () => {
this.setCompleteHeader(res, dto.version, isComplete);
2025-09-24 13:56:46 -04:00
if (!isComplete) {
return res.status(201).set('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
2025-09-24 13:56:46 -04:00
}
2025-10-06 19:47:27 -04:00
this.logger.log(`Finished upload to ${asset.path}`);
if (dto.checksum.compare(checksumBuffer!) !== 0) {
2025-10-06 19:47:27 -04:00
return this.sendChecksumMismatchResponse(res, asset.id, asset.path);
2025-09-28 18:37:16 -04:00
}
this.onComplete(metadata)
.then(() => res.status(200).send({ id: asset.id }))
.catch((error) => {
2025-10-06 19:47:27 -04:00
this.logger.error(`Failed to complete upload for ${asset.id}: ${error.message}`);
res.status(500).send();
});
2025-09-24 13:56:46 -04:00
});
await new Promise((resolve) => writeStream.on('close', resolve));
2025-09-24 13:56:46 -04:00
}
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
const { isComplete, uploadLength, uploadOffset, contentLength, version } = dto;
this.setCompleteHeader(res, version, false);
return this.databaseRepository.withUuidLock(id, async () => {
const completionData = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!completionData) {
res.status(404).send('Asset not found');
2025-09-24 13:56:46 -04:00
return;
}
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
2025-09-24 13:56:46 -04:00
if (status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res);
2025-09-28 18:37:16 -04:00
}
if (uploadLength && size && size !== uploadLength) {
return this.sendInconsistentLengthProblem(res);
2025-09-24 13:56:46 -04:00
}
const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== uploadOffset) {
return this.sendOffsetMismatchProblem(res, expectedOffset, uploadOffset);
2025-09-24 13:56:46 -04:00
}
const newLength = uploadOffset + contentLength;
if (uploadLength !== undefined && newLength > uploadLength) {
res.status(400).send('Upload would exceed declared length');
2025-09-28 18:37:16 -04:00
return;
}
if (contentLength === 0 && !isComplete) {
res.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
2025-09-24 13:56:46 -04:00
return;
}
const metadata = { id, path, size: contentLength, fileModifiedAt: fileModifiedAt };
const writeStream = this.pipe(req, res, metadata);
2025-09-24 13:56:46 -04:00
writeStream.on('finish', async () => {
this.setCompleteHeader(res, version, isComplete);
2025-09-28 18:37:16 -04:00
const currentOffset = await this.getCurrentOffset(path);
if (!isComplete) {
return res.status(204).setHeader('Upload-Offset', currentOffset.toString()).send();
2025-09-28 18:37:16 -04:00
}
this.logger.log(`Finished upload to ${path}`);
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return this.sendChecksumMismatchResponse(res, id, path);
2025-09-24 13:56:46 -04:00
}
2025-09-28 18:37:16 -04:00
try {
await this.onComplete(metadata);
} finally {
res.status(200).send({ id });
}
2025-09-24 13:56:46 -04:00
});
await new Promise((resolve) => writeStream.on('close', resolve));
});
}
2025-09-24 13:56:46 -04:00
cancelUpload(auth: AuthDto, assetId: string, res: Response): Promise<void> {
return this.databaseRepository.withUuidLock(assetId, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res);
}
await this.onCancel(assetId, asset.path);
res.status(204).send();
});
}
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!asset) {
res.status(404).send('Asset not found');
return;
}
const offset = await this.getCurrentOffset(asset.path);
this.setCompleteHeader(res, version, asset.status !== AssetStatus.Partial);
res
.status(204)
.setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', 'min-size=0')
.send();
});
}
2025-10-06 19:47:27 -04:00
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
async removeStaleUploads(): Promise<void> {
// TODO: make this configurable
const createdBefore = DateTime.now().minus({ days: 7 }).toJSDate();
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
for await (const asset of assets) {
2025-10-06 19:47:27 -04:00
jobs.push({ name: JobName.PartialAssetCleanup, data: asset });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
}
2025-10-06 19:47:27 -04:00
@OnJob({ name: JobName.PartialAssetCleanup, queue: QueueName.BackgroundTask })
removeStaleUpload({ id }: JobOf<JobName.PartialAssetCleanup>): Promise<JobStatus> {
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetJobRepository.getForPartialAssetCleanupJob(id);
if (!asset) {
return JobStatus.Skipped;
}
const { checksum, fileModifiedAt, path, size } = asset;
try {
const stat = await this.storageRepository.stat(path);
if (size === stat.size && checksum === (await this.cryptoRepository.hashFile(path))) {
await this.onComplete({ id, path, fileModifiedAt });
return JobStatus.Success;
}
} catch (error: any) {
this.logger.debugFn(() => `Failed to check upload file ${path}: ${error.message}`);
}
await this.onCancel(id, path);
return JobStatus.Success;
});
}
2025-10-06 19:47:27 -04:00
async onStart(
auth: AuthDto,
{ assetData, checksum, uploadLength }: StartUploadDto,
): Promise<{ id: string; path: string; status: AssetStatus; isDuplicate: boolean }> {
const assetId = this.cryptoRepository.randomUUID();
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId);
const extension = extname(assetData.filename);
const path = join(folder, `${assetId}${extension}`);
const type = mimeTypes.assetType(path);
if (type === AssetType.Other) {
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
}
this.validateQuota(auth, uploadLength);
try {
await this.assetRepository.createWithMetadata(
{
id: assetId,
ownerId: auth.user.id,
libraryId: null,
checksum,
originalPath: path,
deviceAssetId: assetData.deviceAssetId,
deviceId: assetData.deviceId,
fileCreatedAt: assetData.fileCreatedAt,
fileModifiedAt: assetData.fileModifiedAt,
localDateTime: assetData.fileCreatedAt,
type: type,
isFavorite: assetData.isFavorite,
duration: assetData.duration || null,
visibility: AssetVisibility.Hidden,
originalFileName: assetData.filename,
status: AssetStatus.Partial,
},
uploadLength,
assetData.iCloudId ? [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: assetData.iCloudId } }] : undefined,
);
} catch (error: any) {
if (!isAssetChecksumConstraint(error)) {
this.logger.error(`Error creating upload asset record: ${error.message}`);
throw new InternalServerErrorException('Error creating asset');
}
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksum);
if (!duplicate) {
throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
}
return { id: duplicate.id, path, status: duplicate.status, isDuplicate: true };
}
await this.storageRepository.mkdir(folder);
return { id: assetId, path, status: AssetStatus.Partial, isDuplicate: false };
}
async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
this.logger.debug('Completing upload for asset', id);
const jobData = { name: JobName.AssetExtractMetadata, data: { id: id, source: 'upload' } } as const;
await withRetry(() => this.assetRepository.setComplete(id));
try {
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} catch (error: any) {
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
}
await withRetry(() => this.jobRepository.queue(jobData));
}
async onCancel(assetId: string, path: string): Promise<void> {
this.logger.debug('Cancelling upload for asset', assetId);
await withRetry(() => this.storageRepository.unlink(path));
await withRetry(() => this.assetRepository.removeAndDecrementQuota(assetId));
}
private pipe(req: Readable, res: Response, { id, path, size }: { id: string; path: string; size: number }) {
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
writeStream.on('error', (error) => {
this.logger.error(`Failed to write chunk to ${path}: ${error.message}`);
if (!res.headersSent) {
res.status(500).send();
}
});
2025-09-24 13:56:46 -04:00
req.on('error', (error) => {
this.logger.error(`Failed to read request body: ${error.message}`);
if (!res.headersSent) {
res.status(500).send();
}
});
2025-09-24 13:56:46 -04:00
let receivedLength = 0;
req.on('data', (data: Buffer) => {
if (receivedLength + data.length > size) {
2025-09-28 18:37:16 -04:00
writeStream.destroy();
req.destroy();
return this.onCancel(id, path).finally(() =>
res.status(400).send('Received more data than specified in content-length'),
);
}
receivedLength += data.length;
if (!writeStream.write(data)) {
req.pause();
writeStream.once('drain', () => req.resume());
}
2025-09-24 13:56:46 -04:00
});
req.on('end', () => {
if (receivedLength === size) {
return writeStream.end();
}
writeStream.destroy();
this.onCancel(id, path).finally(() =>
res.status(400).send(`Received ${receivedLength} bytes when expecting ${size}`),
);
});
return writeStream;
2025-09-24 13:56:46 -04:00
}
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number): void {
2025-09-28 18:37:16 -04:00
if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket
socket.write(
2025-09-29 04:31:47 -04:00
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
2025-09-28 18:37:16 -04:00
`Location: ${location}\r\n` +
'Upload-Limit: min-size=0\r\n' +
2025-09-29 04:31:47 -04:00
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
2025-09-28 18:37:16 -04:00
);
}
}
private sendInconsistentLengthProblem(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
2025-09-28 18:37:16 -04:00
title: 'inconsistent length values for upload',
});
}
private sendAlreadyCompletedProblem(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
2025-09-28 18:37:16 -04:00
title: 'upload is already completed',
});
}
private sendOffsetMismatchProblem(res: Response, expected: number, actual: number): void {
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
2025-09-29 04:31:47 -04:00
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource',
'expected-offset': expected,
'provided-offset': actual,
});
2025-09-28 18:37:16 -04:00
}
private sendChecksumMismatchResponse(res: Response, assetId: string, path: string): Promise<void> {
2025-09-28 18:37:16 -04:00
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
res.status(460).send('File on server does not match provided checksum');
return this.onCancel(assetId, path);
2025-09-24 13:56:46 -04:00
}
private validateQuota(auth: AuthDto, size: number): void {
2025-09-24 13:56:46 -04:00
if (auth.user.quotaSizeInBytes === null) {
return;
}
if (auth.user.quotaSizeInBytes < auth.user.quotaUsageInBytes + size) {
throw new BadRequestException('Quota has been exceeded!');
}
}
private async getCurrentOffset(path: string): Promise<number> {
try {
const stat = await this.storageRepository.stat(path);
return stat.size;
} catch (error: any) {
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
return 0;
}
throw error;
}
}
private setCompleteHeader(res: Response, interopVersion: number | null, isComplete: boolean): void {
2025-09-29 04:31:47 -04:00
if (!interopVersion) {
return;
}
if (interopVersion > 3) {
res.setHeader('Upload-Complete', isComplete ? '?1' : '?0');
2025-09-29 04:31:47 -04:00
} else {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
2025-09-29 04:31:47 -04:00
}
}
2025-09-24 13:56:46 -04:00
}