immich/server/src/services/asset-upload.service.ts

362 lines
14 KiB
TypeScript
Raw Normal View History

import { BadRequestException, Injectable } from '@nestjs/common';
import { Response } from 'express';
2025-09-24 13:56:46 -04:00
import { createHash } from 'node:crypto';
import { extname, join } from 'node:path';
import { Readable } from 'node:stream';
2025-09-24 13:56:46 -04:00
import { StorageCore } from 'src/cores/storage.core';
import { AuthDto } from 'src/dtos/auth.dto';
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/upload.dto';
import { AssetStatus, AssetType, AssetVisibility, JobName, StorageFolder } from 'src/enum';
import { AuthenticatedRequest } from 'src/middleware/auth.guard';
2025-09-24 13:56:46 -04:00
import { BaseService } from 'src/services/base.service';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
import { withRetry } from 'src/utils/misc';
2025-09-24 13:56:46 -04:00
export const MAX_RUFH_INTEROP_VERSION = 8;
2025-09-29 04:31:47 -04:00
2025-09-24 13:56:46 -04:00
@Injectable()
export class AssetUploadService extends BaseService {
async startUpload(req: AuthenticatedRequest, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { isComplete, assetData, uploadLength, contentLength, version } = dto;
if (isComplete && uploadLength && uploadLength !== contentLength) {
return this.sendInconsistentLengthProblem(res);
2025-09-28 18:37:16 -04:00
}
2025-09-24 13:56:46 -04:00
const assetId = this.cryptoRepository.randomUUID();
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, req.auth.user.id, assetId);
const extension = extname(assetData.filename);
2025-09-24 13:56:46 -04:00
const path = join(folder, `${assetId}${extension}`);
const type = mimeTypes.assetType(path);
2025-09-28 18:37:16 -04:00
2025-09-24 13:56:46 -04:00
if (type === AssetType.Other) {
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
2025-09-24 13:56:46 -04:00
}
2025-09-28 18:37:16 -04:00
this.validateQuota(req.auth, uploadLength ?? contentLength);
2025-09-24 13:56:46 -04:00
try {
await this.assetRepository.createWithMetadata(
{
id: assetId,
ownerId: req.auth.user.id,
libraryId: null,
checksum: dto.checksum,
originalPath: path,
deviceAssetId: assetData.deviceAssetId,
deviceId: assetData.deviceId,
fileCreatedAt: assetData.fileCreatedAt,
fileModifiedAt: assetData.fileModifiedAt,
localDateTime: assetData.fileCreatedAt,
type: type,
isFavorite: assetData.isFavorite,
duration: assetData.duration || null,
visibility: assetData.visibility || AssetVisibility.Timeline,
originalFileName: assetData.filename,
status: AssetStatus.Partial,
},
uploadLength,
assetData.metadata,
);
2025-09-24 13:56:46 -04:00
} catch (error: any) {
if (isAssetChecksumConstraint(error)) {
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(req.auth.user.id, dto.checksum);
2025-09-24 13:56:46 -04:00
if (!duplicate) {
res.status(500).send('Error locating duplicate for checksum constraint');
return;
2025-09-24 13:56:46 -04:00
}
2025-09-28 18:37:16 -04:00
if (duplicate.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res);
2025-09-24 13:56:46 -04:00
}
2025-09-28 18:37:16 -04:00
const location = `/api/upload/${duplicate.id}`;
res.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
2025-09-24 13:56:46 -04:00
return;
}
this.logger.error(`Error creating upload asset record: ${error.message}`);
res.status(500).send('Error creating upload asset record');
2025-09-24 13:56:46 -04:00
return;
}
2025-09-28 18:37:16 -04:00
const location = `/api/upload/${assetId}`;
if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version);
2025-09-29 04:31:47 -04:00
}
2025-09-28 18:37:16 -04:00
2025-09-24 13:56:46 -04:00
await this.storageRepository.mkdir(folder);
let checksumBuffer: Buffer | undefined;
const metadata = { id: assetId, path, size: contentLength, fileModifiedAt: assetData.fileModifiedAt };
const writeStream = this.pipe(req, res, metadata);
2025-09-24 13:56:46 -04:00
if (isComplete) {
const hash = createHash('sha1');
req.on('data', (data: Buffer) => hash.update(data));
2025-09-24 13:56:46 -04:00
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
}
writeStream.on('finish', () => {
this.setCompleteHeader(res, dto.version, isComplete);
2025-09-24 13:56:46 -04:00
if (!isComplete) {
return res.status(201).send();
2025-09-24 13:56:46 -04:00
}
this.logger.log(`Finished upload to ${path}`);
if (dto.checksum.compare(checksumBuffer!) !== 0) {
return this.sendChecksumMismatchResponse(res, assetId, path);
2025-09-28 18:37:16 -04:00
}
this.onComplete(metadata)
.then(() => res.status(200).send())
.catch((error) => {
this.logger.error(`Failed to complete upload for ${assetId}: ${error.message}`);
res.status(500).send();
});
2025-09-24 13:56:46 -04:00
});
await new Promise((resolve) => writeStream.on('close', resolve));
2025-09-24 13:56:46 -04:00
}
resumeUpload(req: AuthenticatedRequest, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
const { isComplete, uploadLength, uploadOffset, contentLength, version } = dto;
2025-09-28 18:37:16 -04:00
return this.databaseRepository.withUuidLock(id, async () => {
const completionData = await this.assetRepository.getCompletionMetadata(id, req.auth.user.id);
if (!completionData) {
res.status(404).send('Asset not found');
2025-09-24 13:56:46 -04:00
return;
}
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
2025-09-24 13:56:46 -04:00
if (status !== AssetStatus.Partial) {
this.setCompleteHeader(res, version, false);
return this.sendAlreadyCompletedProblem(res);
2025-09-28 18:37:16 -04:00
}
if (uploadLength && size && size !== uploadLength) {
this.setCompleteHeader(res, version, false);
return this.sendInconsistentLengthProblem(res);
2025-09-24 13:56:46 -04:00
}
const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== uploadOffset) {
this.setCompleteHeader(res, version, false);
return this.sendOffsetMismatchProblem(res, expectedOffset, uploadOffset);
2025-09-24 13:56:46 -04:00
}
const newLength = uploadOffset + contentLength;
if (uploadLength !== undefined && newLength > uploadLength) {
this.setCompleteHeader(res, version, false);
res.status(400).send('Upload would exceed declared length');
2025-09-28 18:37:16 -04:00
return;
}
this.validateQuota(req.auth, newLength);
2025-09-28 18:37:16 -04:00
if (contentLength === 0 && !isComplete) {
this.setCompleteHeader(res, version, false);
res.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
2025-09-24 13:56:46 -04:00
return;
}
const metadata = { id, path, size: contentLength, fileModifiedAt: fileModifiedAt };
const writeStream = this.pipe(req, res, metadata);
2025-09-24 13:56:46 -04:00
writeStream.on('finish', async () => {
this.setCompleteHeader(res, version, isComplete);
2025-09-28 18:37:16 -04:00
const currentOffset = await this.getCurrentOffset(path);
if (!isComplete) {
return res.status(204).setHeader('Upload-Offset', currentOffset.toString()).send();
2025-09-28 18:37:16 -04:00
}
this.logger.log(`Finished upload to ${path}`);
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return this.sendChecksumMismatchResponse(res, id, path);
2025-09-24 13:56:46 -04:00
}
2025-09-28 18:37:16 -04:00
try {
await this.onComplete(metadata);
} finally {
res.status(200).send();
}
2025-09-24 13:56:46 -04:00
});
await new Promise((resolve) => writeStream.on('close', resolve));
});
}
2025-09-24 13:56:46 -04:00
private pipe(req: Readable, res: Response, { id, path, size }: { id: string; path: string; size: number }) {
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
writeStream.on('error', (error) => {
this.logger.error(`Failed to write chunk to ${path}: ${error.message}`);
if (!res.headersSent) {
res.status(500).send();
}
});
2025-09-24 13:56:46 -04:00
req.on('error', (error) => {
this.logger.error(`Failed to read request body: ${error.message}`);
if (!res.headersSent) {
res.status(500).send();
}
});
2025-09-24 13:56:46 -04:00
let receivedLength = 0;
req.on('data', (data: Buffer) => {
if (receivedLength + data.length > size) {
2025-09-28 18:37:16 -04:00
writeStream.destroy();
req.destroy();
return this.onCancel(id, path).finally(() =>
res.status(400).send('Received more data than specified in content-length'),
);
}
receivedLength += data.length;
if (!writeStream.write(data)) {
req.pause();
writeStream.once('drain', () => req.resume());
}
2025-09-24 13:56:46 -04:00
});
req.on('end', () => {
if (receivedLength === size) {
return writeStream.end();
}
writeStream.destroy();
this.onCancel(id, path).finally(() =>
res.status(400).send(`Received ${receivedLength} bytes when expecting ${size}`),
);
});
return writeStream;
2025-09-24 13:56:46 -04:00
}
cancelUpload(auth: AuthDto, assetId: string, response: Response): Promise<void> {
return this.databaseRepository.withUuidLock(assetId, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
if (!asset) {
response.status(404).send('Asset not found');
return;
}
if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(response);
}
await this.onCancel(assetId, asset.path);
response.status(204).send();
});
2025-09-29 03:40:24 -04:00
}
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
2025-09-24 13:56:46 -04:00
if (!asset) {
res.status(404).send('Asset not found');
2025-09-24 13:56:46 -04:00
return;
}
2025-09-28 18:37:16 -04:00
const offset = await this.getCurrentOffset(asset.path);
this.setCompleteHeader(res, version, asset.status !== AssetStatus.Partial);
res
2025-09-24 13:56:46 -04:00
.status(204)
2025-09-28 18:37:16 -04:00
.setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', 'min-size=0')
2025-09-24 13:56:46 -04:00
.send();
});
}
2025-09-28 18:37:16 -04:00
async getUploadOptions(response: Response): Promise<void> {
response.status(204).setHeader('Upload-Limit', 'min-size=0').setHeader('Allow', 'POST, OPTIONS').send();
}
private async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
this.logger.debug('Completing upload for asset', id);
const jobData = { name: JobName.AssetExtractMetadata, data: { id: id, source: 'upload' } } as const;
await withRetry(() => this.assetRepository.setCompleteWithSize(id));
try {
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} catch (error: any) {
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
}
await withRetry(() => this.jobRepository.queue(jobData));
2025-09-28 18:37:16 -04:00
}
private async onCancel(assetId: string, path: string): Promise<void> {
this.logger.debug('Cancelling upload for asset', assetId);
await withRetry(() => this.storageRepository.unlink(path));
await withRetry(() => this.assetRepository.remove({ id: assetId }));
2025-09-28 18:37:16 -04:00
}
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number): void {
2025-09-28 18:37:16 -04:00
if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket
socket.write(
2025-09-29 04:31:47 -04:00
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
2025-09-28 18:37:16 -04:00
`Location: ${location}\r\n` +
`Upload-Limit: min-size=0\r\n` +
2025-09-29 04:31:47 -04:00
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
2025-09-28 18:37:16 -04:00
);
}
}
private sendInconsistentLengthProblem(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
2025-09-28 18:37:16 -04:00
title: 'inconsistent length values for upload',
});
}
private sendAlreadyCompletedProblem(res: Response): void {
res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
2025-09-28 18:37:16 -04:00
title: 'upload is already completed',
});
}
private sendOffsetMismatchProblem(res: Response, expected: number, actual: number): void {
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
2025-09-29 04:31:47 -04:00
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource',
'expected-offset': expected,
'provided-offset': actual,
});
2025-09-28 18:37:16 -04:00
}
private sendChecksumMismatchResponse(res: Response, assetId: string, path: string): Promise<void> {
2025-09-28 18:37:16 -04:00
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
res.status(460).send('Checksum mismatch');
return this.onCancel(assetId, path);
2025-09-24 13:56:46 -04:00
}
private validateQuota(auth: AuthDto, size: number): void {
2025-09-24 13:56:46 -04:00
if (auth.user.quotaSizeInBytes === null) {
return;
}
if (auth.user.quotaSizeInBytes < auth.user.quotaUsageInBytes + size) {
throw new BadRequestException('Quota has been exceeded!');
}
}
private async getCurrentOffset(path: string): Promise<number> {
try {
const stat = await this.storageRepository.stat(path);
return stat.size;
} catch (error: any) {
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
return 0;
}
throw error;
}
}
private setCompleteHeader(res: Response, interopVersion: number | null, isComplete: boolean): void {
2025-09-29 04:31:47 -04:00
if (!interopVersion) {
return;
}
if (interopVersion > 3) {
res.setHeader('Upload-Complete', isComplete ? '?1' : '?0');
2025-09-29 04:31:47 -04:00
} else {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
2025-09-29 04:31:47 -04:00
}
}
2025-09-24 13:56:46 -04:00
}