2025-09-24 13:56:46 -04:00
|
|
|
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
|
|
|
|
|
import { plainToInstance } from 'class-transformer';
|
|
|
|
|
import { validateSync } from 'class-validator';
|
|
|
|
|
import { Request, Response } from 'express';
|
|
|
|
|
import { createHash } from 'node:crypto';
|
|
|
|
|
import { extname, join } from 'node:path';
|
2025-09-28 18:37:16 -04:00
|
|
|
import { setTimeout } from 'node:timers/promises';
|
2025-09-24 13:56:46 -04:00
|
|
|
import { StorageCore } from 'src/cores/storage.core';
|
|
|
|
|
import { AuthDto } from 'src/dtos/auth.dto';
|
|
|
|
|
import { UploadAssetDataDto } from 'src/dtos/upload.dto';
|
|
|
|
|
import { AssetStatus, AssetType, AssetVisibility, ImmichHeader, JobName, StorageFolder } from 'src/enum';
|
|
|
|
|
import { BaseService } from 'src/services/base.service';
|
|
|
|
|
import { isAssetChecksumConstraint } from 'src/utils/database';
|
|
|
|
|
import { mimeTypes } from 'src/utils/mime-types';
|
2025-09-28 18:37:16 -04:00
|
|
|
import { parseDictionary } from 'structured-headers';
|
2025-09-24 13:56:46 -04:00
|
|
|
|
|
|
|
|
@Injectable()
|
|
|
|
|
export class AssetUploadService extends BaseService {
|
2025-09-28 18:37:16 -04:00
|
|
|
async startUpload(auth: AuthDto, request: Request, response: Response): Promise<void> {
|
2025-09-24 13:56:46 -04:00
|
|
|
const headers = request.headers;
|
2025-09-28 18:37:16 -04:00
|
|
|
const contentLength = this.requireContentLength(headers);
|
|
|
|
|
const isComplete = this.requireUploadComplete(headers);
|
|
|
|
|
const metadata = this.requireAssetData(headers);
|
|
|
|
|
const checksumHeader = this.requireChecksum(headers);
|
|
|
|
|
const uploadLength = this.getUploadLength(headers);
|
|
|
|
|
|
|
|
|
|
if (isComplete && uploadLength !== null && uploadLength !== contentLength) {
|
|
|
|
|
return this.sendInconsistentLengthProblem(response);
|
|
|
|
|
}
|
2025-09-24 13:56:46 -04:00
|
|
|
|
|
|
|
|
const assetId = this.cryptoRepository.randomUUID();
|
|
|
|
|
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId);
|
|
|
|
|
const extension = extname(metadata.filename);
|
|
|
|
|
const path = join(folder, `${assetId}${extension}`);
|
|
|
|
|
const type = mimeTypes.assetType(path);
|
2025-09-28 18:37:16 -04:00
|
|
|
|
2025-09-24 13:56:46 -04:00
|
|
|
if (type === AssetType.Other) {
|
|
|
|
|
throw new BadRequestException(`${metadata.filename} is an unsupported file type`);
|
|
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
|
|
|
|
|
this.validateQuota(auth, uploadLength ?? contentLength);
|
2025-09-24 13:56:46 -04:00
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
await this.assetRepository.create({
|
|
|
|
|
id: assetId,
|
|
|
|
|
ownerId: auth.user.id,
|
|
|
|
|
libraryId: null,
|
|
|
|
|
checksum: checksumHeader,
|
|
|
|
|
originalPath: path,
|
|
|
|
|
deviceAssetId: metadata.deviceAssetId,
|
|
|
|
|
deviceId: metadata.deviceId,
|
|
|
|
|
fileCreatedAt: metadata.fileCreatedAt,
|
|
|
|
|
fileModifiedAt: metadata.fileModifiedAt,
|
|
|
|
|
localDateTime: metadata.fileCreatedAt,
|
|
|
|
|
type: mimeTypes.assetType(path),
|
|
|
|
|
isFavorite: metadata.isFavorite,
|
|
|
|
|
duration: metadata.duration || null,
|
|
|
|
|
visibility: metadata.visibility || AssetVisibility.Timeline,
|
|
|
|
|
originalFileName: metadata.filename,
|
|
|
|
|
status: AssetStatus.Partial,
|
|
|
|
|
});
|
|
|
|
|
} catch (error: any) {
|
|
|
|
|
if (isAssetChecksumConstraint(error)) {
|
|
|
|
|
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksumHeader);
|
|
|
|
|
if (!duplicate) {
|
|
|
|
|
throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
if (duplicate.status !== AssetStatus.Partial) {
|
|
|
|
|
return this.sendAlreadyCompletedProblem(response);
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
const location = `/api/upload/${duplicate.id}`;
|
|
|
|
|
response.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
|
2025-09-24 13:56:46 -04:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
this.logger.error(`Error creating upload asset record: ${error.message}`);
|
|
|
|
|
response.status(500).send('Error creating upload asset record');
|
|
|
|
|
return;
|
|
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
|
|
|
|
|
const location = `/api/upload/${assetId}`;
|
|
|
|
|
// this.sendInterimResponse(response, location);
|
|
|
|
|
|
2025-09-24 13:56:46 -04:00
|
|
|
await this.storageRepository.mkdir(folder);
|
|
|
|
|
let checksumBuffer: Buffer | undefined;
|
|
|
|
|
const writeStream = this.storageRepository.createWriteStream(path);
|
|
|
|
|
|
|
|
|
|
if (isComplete) {
|
|
|
|
|
const hash = createHash('sha1');
|
|
|
|
|
request.on('data', (chunk: Buffer) => hash.update(chunk));
|
|
|
|
|
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
writeStream.on('error', (error) => {
|
|
|
|
|
this.logger.error(`Failed to write chunk to ${path}: ${error.message}`);
|
|
|
|
|
if (!response.headersSent) {
|
2025-09-28 18:37:16 -04:00
|
|
|
response.status(500).setHeader('Location', location).send();
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
writeStream.on('finish', () => {
|
|
|
|
|
if (!isComplete) {
|
2025-09-28 18:37:16 -04:00
|
|
|
return response.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
this.logger.log(`Finished upload to ${path}`);
|
2025-09-28 18:37:16 -04:00
|
|
|
if (checksumHeader.compare(checksumBuffer!) !== 0) {
|
|
|
|
|
return this.sendChecksumMismatchResponse(response, assetId, path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
response
|
|
|
|
|
.status(200)
|
|
|
|
|
.setHeader('Upload-Complete', '?1')
|
|
|
|
|
.setHeader('Location', location)
|
|
|
|
|
.setHeader('Upload-Limit', 'min-size=0')
|
|
|
|
|
.send();
|
|
|
|
|
|
|
|
|
|
return this.onComplete({ assetId, path, size: contentLength, fileModifiedAt: metadata.fileModifiedAt });
|
2025-09-24 13:56:46 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
request.on('error', (error) => {
|
|
|
|
|
this.logger.error(`Failed to read request body: ${error.message}`);
|
|
|
|
|
writeStream.end();
|
|
|
|
|
if (!response.headersSent) {
|
2025-09-28 18:37:16 -04:00
|
|
|
response.status(500).setHeader('Location', location).send();
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let receivedLength = 0;
|
|
|
|
|
request.on('data', (chunk: Buffer) => {
|
|
|
|
|
if (receivedLength + chunk.length > contentLength) {
|
|
|
|
|
writeStream.destroy();
|
|
|
|
|
request.destroy();
|
|
|
|
|
response.status(400).send('Received more data than specified in content-length');
|
2025-09-28 18:37:16 -04:00
|
|
|
return this.removeAsset(assetId, path);
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
receivedLength += chunk.length;
|
|
|
|
|
if (!writeStream.write(chunk)) {
|
|
|
|
|
request.pause();
|
|
|
|
|
writeStream.once('drain', () => request.resume());
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
request.on('end', () => {
|
2025-09-28 18:37:16 -04:00
|
|
|
if (receivedLength === contentLength) {
|
|
|
|
|
return writeStream.end();
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`);
|
|
|
|
|
writeStream.destroy();
|
|
|
|
|
this.removeAsset(assetId, path);
|
2025-09-24 13:56:46 -04:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
async resumeUpload(auth: AuthDto, assetId: string, request: Request, response: Response): Promise<void> {
|
2025-09-24 13:56:46 -04:00
|
|
|
const headers = request.headers;
|
2025-09-28 18:37:16 -04:00
|
|
|
const isComplete = this.requireUploadComplete(headers);
|
|
|
|
|
const contentLength = this.requireContentLength(headers);
|
|
|
|
|
const providedOffset = this.getUploadOffset(headers);
|
|
|
|
|
const uploadLength = this.getUploadLength(headers);
|
|
|
|
|
|
|
|
|
|
const contentType = headers['content-type'];
|
|
|
|
|
if (contentType !== 'application/partial-upload') {
|
|
|
|
|
throw new BadRequestException('Content-Type must be application/partial-upload for PATCH requests');
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-24 13:56:46 -04:00
|
|
|
await this.databaseRepository.withUuidLock(assetId, async () => {
|
|
|
|
|
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
|
|
|
|
|
if (!asset) {
|
|
|
|
|
response.status(404).send('Asset not found');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (asset.status !== AssetStatus.Partial) {
|
2025-09-28 18:37:16 -04:00
|
|
|
return this.sendAlreadyCompletedProblem(response);
|
|
|
|
|
}
|
|
|
|
|
if (providedOffset === null) {
|
|
|
|
|
throw new BadRequestException('Missing Upload-Offset header');
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
const { path } = asset;
|
2025-09-24 13:56:46 -04:00
|
|
|
const expectedOffset = await this.getCurrentOffset(path);
|
|
|
|
|
if (expectedOffset !== providedOffset) {
|
2025-09-28 18:37:16 -04:00
|
|
|
return this.sendOffsetMismatchProblem(response, expectedOffset, providedOffset);
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const newLength = providedOffset + contentLength;
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
// If upload length is provided, validate we're not exceeding it
|
|
|
|
|
if (uploadLength !== null && newLength > uploadLength) {
|
|
|
|
|
response.status(400).send('Upload would exceed declared length');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.validateQuota(auth, newLength);
|
|
|
|
|
|
|
|
|
|
// Empty PATCH without Upload-Complete
|
|
|
|
|
if (contentLength === 0 && !isComplete) {
|
|
|
|
|
response
|
|
|
|
|
.status(204)
|
|
|
|
|
.setHeader('Upload-Offset', expectedOffset.toString())
|
|
|
|
|
.setHeader('Upload-Complete', '?0')
|
|
|
|
|
.send();
|
2025-09-24 13:56:46 -04:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
|
2025-09-28 18:37:16 -04:00
|
|
|
let receivedLength = 0;
|
|
|
|
|
|
2025-09-24 13:56:46 -04:00
|
|
|
writeStream.on('error', (error) => {
|
|
|
|
|
this.logger.error(`Failed to write chunk to ${path}: ${error.message}`);
|
|
|
|
|
if (!response.headersSent) {
|
|
|
|
|
response.status(500).send('Failed to write chunk');
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
writeStream.on('finish', async () => {
|
2025-09-28 18:37:16 -04:00
|
|
|
const currentOffset = await this.getCurrentOffset(path);
|
|
|
|
|
if (!isComplete) {
|
|
|
|
|
return response
|
|
|
|
|
.status(204)
|
|
|
|
|
.setHeader('Upload-Offset', currentOffset.toString())
|
|
|
|
|
.setHeader('Upload-Complete', '?0')
|
|
|
|
|
.send();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.logger.log(`Finished upload to ${path}`);
|
|
|
|
|
const checksum = await this.cryptoRepository.hashFile(path);
|
|
|
|
|
if (asset.checksum.compare(checksum) !== 0) {
|
|
|
|
|
return this.sendChecksumMismatchResponse(response, assetId, path);
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
|
|
|
|
|
response
|
|
|
|
|
.status(200)
|
|
|
|
|
.setHeader('Upload-Complete', '?1')
|
|
|
|
|
.setHeader('Upload-Offset', currentOffset.toString())
|
|
|
|
|
.send();
|
|
|
|
|
|
|
|
|
|
await this.onComplete({ assetId, path, size: currentOffset, fileModifiedAt: asset.fileModifiedAt });
|
2025-09-24 13:56:46 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
request.on('data', (chunk: Buffer) => {
|
|
|
|
|
if (receivedLength + chunk.length > contentLength) {
|
|
|
|
|
this.logger.error(`Received more data than specified in content-length for upload to ${path}`);
|
2025-09-28 18:37:16 -04:00
|
|
|
writeStream.destroy();
|
2025-09-24 13:56:46 -04:00
|
|
|
request.destroy();
|
2025-09-28 18:37:16 -04:00
|
|
|
response.status(400).send('Received more data than specified in content-length');
|
|
|
|
|
return this.removeAsset(assetId, path);
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
receivedLength += chunk.length;
|
|
|
|
|
if (!writeStream.write(chunk)) {
|
|
|
|
|
request.pause();
|
|
|
|
|
writeStream.once('drain', () => request.resume());
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
request.on('end', () => {
|
2025-09-28 18:37:16 -04:00
|
|
|
if (receivedLength === contentLength) {
|
|
|
|
|
return writeStream.end();
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`);
|
|
|
|
|
writeStream.destroy();
|
|
|
|
|
return this.removeAsset(assetId, path);
|
2025-09-24 13:56:46 -04:00
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
async getUploadStatus(auth: AuthDto, assetId: string, request: Request, response: Response) {
|
2025-09-24 13:56:46 -04:00
|
|
|
return this.databaseRepository.withUuidLock(assetId, async () => {
|
|
|
|
|
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
|
|
|
|
|
if (!asset) {
|
|
|
|
|
response.status(404).send('Asset not found');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
const offset = await this.getCurrentOffset(asset.path);
|
|
|
|
|
const isComplete = asset.status !== AssetStatus.Partial;
|
2025-09-24 13:56:46 -04:00
|
|
|
|
|
|
|
|
response
|
|
|
|
|
.status(204)
|
2025-09-28 18:37:16 -04:00
|
|
|
.setHeader('Upload-Offset', offset.toString())
|
|
|
|
|
.setHeader('Upload-Complete', isComplete ? '?1' : '?0')
|
|
|
|
|
.setHeader('Cache-Control', 'no-store')
|
|
|
|
|
.setHeader('Upload-Limit', 'min-size=0')
|
2025-09-24 13:56:46 -04:00
|
|
|
.send();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
async getUploadOptions(response: Response): Promise<void> {
|
|
|
|
|
response.status(204).setHeader('Upload-Limit', 'min-size=0').setHeader('Allow', 'POST, OPTIONS').send();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async cancelUpload(auth: AuthDto, assetId: string, response: Response): Promise<void> {
|
|
|
|
|
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
|
|
|
|
|
if (!asset) {
|
|
|
|
|
response.status(404).send('Asset not found');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if (asset.status !== AssetStatus.Partial) {
|
|
|
|
|
return this.sendAlreadyCompletedProblem(response);
|
|
|
|
|
}
|
|
|
|
|
await this.removeAsset(assetId, asset.path);
|
|
|
|
|
response.status(204).send();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async onComplete(data: { assetId: string; path: string; size: number; fileModifiedAt: Date }): Promise<void> {
|
|
|
|
|
const { assetId, path, size, fileModifiedAt } = data;
|
2025-09-24 13:56:46 -04:00
|
|
|
const jobData = { name: JobName.AssetExtractMetadata, data: { id: assetId, source: 'upload' } } as const;
|
2025-09-28 18:37:16 -04:00
|
|
|
await this.withRetry(() => this.assetRepository.setCompleteWithSize(assetId, size));
|
|
|
|
|
await this.withRetry(() => this.jobRepository.queue(jobData));
|
|
|
|
|
await this.withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async removeAsset(assetId: string, path: string): Promise<void> {
|
|
|
|
|
await this.withRetry(() => this.storageRepository.unlink(path));
|
|
|
|
|
await this.withRetry(() => this.assetRepository.remove({ id: assetId }));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private sendInterimResponse(response: Response, location: string): void {
|
|
|
|
|
const socket = response.socket;
|
|
|
|
|
if (socket && !socket.destroyed) {
|
|
|
|
|
// Express doesn't understand interim responses, so write directly to socket
|
|
|
|
|
socket.write(
|
|
|
|
|
`HTTP/1.1 104 Upload Resumption Supported\r\n` +
|
|
|
|
|
`Location: ${location}\r\n` +
|
|
|
|
|
`Upload-Draft-Interop-Version: 8\r\n` +
|
|
|
|
|
`\r\n`,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private sendInconsistentLengthProblem(response: Response): void {
|
|
|
|
|
response.status(400).contentType('application/problem+json').send({
|
|
|
|
|
type: `https://iana.org/assignments/http-problem-types#inconsistent-upload-length`,
|
|
|
|
|
title: 'inconsistent length values for upload',
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private sendAlreadyCompletedProblem(response: Response): void {
|
|
|
|
|
response.status(400).contentType('application/problem+json').send({
|
|
|
|
|
type: `https://iana.org/assignments/http-problem-types#completed-upload`,
|
|
|
|
|
title: 'upload is already completed',
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private sendOffsetMismatchProblem(response: Response, expected: number, actual: number): void {
|
|
|
|
|
response
|
|
|
|
|
.status(409)
|
|
|
|
|
.contentType('application/problem+json')
|
|
|
|
|
.setHeader('Upload-Offset', expected.toString())
|
|
|
|
|
.setHeader('Upload-Complete', '?0')
|
|
|
|
|
.send({
|
|
|
|
|
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
|
|
|
|
|
title: 'offset from request does not match offset of resource',
|
|
|
|
|
'expected-offset': expected,
|
|
|
|
|
'provided-offset': actual,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private sendChecksumMismatchResponse(response: Response, assetId: string, path: string): Promise<void> {
|
|
|
|
|
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
|
|
|
|
|
response.status(460).send('Checksum mismatch');
|
|
|
|
|
return this.removeAsset(assetId, path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private requireUploadComplete(headers: Request['headers']): boolean {
|
|
|
|
|
const value = headers['upload-complete'] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
throw new BadRequestException('Missing Upload-Complete header');
|
|
|
|
|
}
|
|
|
|
|
return value === '?1';
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getUploadOffset(headers: Request['headers']): number | null {
|
|
|
|
|
const value = headers['upload-offset'] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
const offset = parseInt(value, 10);
|
|
|
|
|
if (!isFinite(offset) || offset < 0) {
|
|
|
|
|
throw new BadRequestException('Invalid Upload-Offset header');
|
|
|
|
|
}
|
|
|
|
|
return offset;
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
private getUploadLength(headers: Request['headers']): number | null {
|
|
|
|
|
const value = headers['upload-length'] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
const length = parseInt(value, 10);
|
|
|
|
|
if (!isFinite(length) || length < 0) {
|
|
|
|
|
throw new BadRequestException('Invalid Upload-Length header');
|
|
|
|
|
}
|
|
|
|
|
return length;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private requireContentLength(headers: Request['headers']): number {
|
|
|
|
|
const value = headers['content-length'] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
throw new BadRequestException('Missing Content-Length header');
|
|
|
|
|
}
|
|
|
|
|
const length = parseInt(value, 10);
|
|
|
|
|
if (!isFinite(length) || length < 0) {
|
|
|
|
|
throw new BadRequestException('Invalid Content-Length header');
|
|
|
|
|
}
|
|
|
|
|
return length;
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
private async withRetry<T>(operation: () => Promise<T>, retries: number = 2, delay: number = 100): Promise<T> {
|
2025-09-24 13:56:46 -04:00
|
|
|
let lastError: any;
|
|
|
|
|
for (let attempt = 0; attempt <= retries; attempt++) {
|
|
|
|
|
try {
|
|
|
|
|
return await operation();
|
|
|
|
|
} catch (error: any) {
|
|
|
|
|
lastError = error;
|
|
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
if (attempt < retries) {
|
|
|
|
|
await setTimeout(delay);
|
|
|
|
|
}
|
2025-09-24 13:56:46 -04:00
|
|
|
}
|
|
|
|
|
throw lastError;
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
private validateQuota(auth: AuthDto, size: number) {
|
2025-09-24 13:56:46 -04:00
|
|
|
if (auth.user.quotaSizeInBytes === null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (auth.user.quotaSizeInBytes < auth.user.quotaUsageInBytes + size) {
|
|
|
|
|
throw new BadRequestException('Quota has been exceeded!');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async getCurrentOffset(path: string): Promise<number> {
|
|
|
|
|
try {
|
|
|
|
|
const stat = await this.storageRepository.stat(path);
|
|
|
|
|
return stat.size;
|
|
|
|
|
} catch (error: any) {
|
|
|
|
|
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
private requireChecksum(headers: Request['headers']): Buffer {
|
2025-09-24 13:56:46 -04:00
|
|
|
const value = headers['repr-digest'] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
throw new BadRequestException(`Missing 'repr-digest' header`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const sha1Item = parseDictionary(value).get('sha');
|
|
|
|
|
if (!sha1Item) {
|
|
|
|
|
throw new BadRequestException(`Missing 'sha' in 'repr-digest' header`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const checksum = sha1Item[0];
|
|
|
|
|
if (!(checksum instanceof ArrayBuffer)) {
|
|
|
|
|
throw new BadRequestException(`Invalid 'sha' in 'repr-digest' header`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return Buffer.from(checksum);
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-28 18:37:16 -04:00
|
|
|
private requireAssetData(headers: Request['headers']): UploadAssetDataDto {
|
2025-09-24 13:56:46 -04:00
|
|
|
const value = headers[ImmichHeader.AssetData] as string | undefined;
|
|
|
|
|
if (value === undefined) {
|
|
|
|
|
throw new BadRequestException(`Missing ${ImmichHeader.AssetData} header`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let assetData: any;
|
|
|
|
|
try {
|
|
|
|
|
assetData = JSON.parse(Buffer.from(value, 'base64').toString('utf8'));
|
|
|
|
|
} catch {
|
|
|
|
|
throw new BadRequestException(`${ImmichHeader.AssetData} header is not valid base64-encoded JSON`);
|
|
|
|
|
}
|
2025-09-28 18:37:16 -04:00
|
|
|
|
2025-09-24 13:56:46 -04:00
|
|
|
const dto = plainToInstance(UploadAssetDataDto, assetData);
|
2025-09-28 18:37:16 -04:00
|
|
|
const errors = validateSync(dto, { whitelist: true });
|
|
|
|
|
if (errors.length > 0) {
|
|
|
|
|
const formatted = errors.map((e) => (e.constraints ? Object.values(e.constraints).join(', ') : ''));
|
2025-09-24 13:56:46 -04:00
|
|
|
throw new BadRequestException(`Invalid ${ImmichHeader.AssetData} header: ${formatted.join('; ')}`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return dto;
|
|
|
|
|
}
|
|
|
|
|
}
|