set max-age limit

This commit is contained in:
mertalev 2025-10-10 19:26:22 -04:00
parent da52b3ebf4
commit 0ad983135c
No known key found for this signature in database
GPG key ID: DF6ABC77AAD98C95
8 changed files with 71 additions and 80 deletions

View file

@ -974,7 +974,7 @@ describe('/upload', () => {
expect(status).toBe(204); expect(status).toBe(204);
expect(headers['upload-offset']).toBe('512'); expect(headers['upload-offset']).toBe('512');
expect(headers['upload-complete']).toBe('?0'); expect(headers['upload-complete']).toBe('?0');
expect(headers['upload-limit']).toEqual('min-size=0'); expect(headers['upload-limit']).toEqual('min-size=0, max-age=259200');
expect(headers['cache-control']).toBe('no-store'); expect(headers['cache-control']).toBe('no-store');
}); });
@ -993,7 +993,7 @@ describe('/upload', () => {
const { status, headers } = await request(app).options('/upload'); const { status, headers } = await request(app).options('/upload');
expect(status).toBe(204); expect(status).toBe(204);
expect(headers['upload-limit']).toEqual('min-size=0'); expect(headers['upload-limit']).toEqual('min-size=0, max-age=259200');
}); });
}); });
}); });

View file

@ -22,6 +22,9 @@ export interface SystemConfig {
cronExpression: string; cronExpression: string;
keepLastAmount: number; keepLastAmount: number;
}; };
upload: {
maxAgeHours: number;
};
}; };
ffmpeg: { ffmpeg: {
crf: number; crf: number;
@ -133,10 +136,7 @@ export interface SystemConfig {
clusterNewFaces: boolean; clusterNewFaces: boolean;
generateMemories: boolean; generateMemories: boolean;
syncQuotaUsage: boolean; syncQuotaUsage: boolean;
removeStaleUploads: { removeStaleUploads: boolean;
enabled: boolean;
hoursAgo: number;
};
}; };
trash: { trash: {
enabled: boolean; enabled: boolean;
@ -194,6 +194,9 @@ export const defaults = Object.freeze<SystemConfig>({
cronExpression: CronExpression.EVERY_DAY_AT_2AM, cronExpression: CronExpression.EVERY_DAY_AT_2AM,
keepLastAmount: 14, keepLastAmount: 14,
}, },
upload: {
maxAgeHours: 72,
},
}, },
ffmpeg: { ffmpeg: {
crf: 23, crf: 23,
@ -329,10 +332,7 @@ export const defaults = Object.freeze<SystemConfig>({
syncQuotaUsage: true, syncQuotaUsage: true,
missingThumbnails: true, missingThumbnails: true,
clusterNewFaces: true, clusterNewFaces: true,
removeStaleUploads: { removeStaleUploads: true,
enabled: true,
hoursAgo: 72,
},
}, },
trash: { trash: {
enabled: true, enabled: true,

View file

@ -447,13 +447,4 @@ describe(AssetUploadController.name, () => {
expect(status).toBe(400); expect(status).toBe(400);
}); });
}); });
describe('OPTIONS /upload', () => {
it('should return 204 with upload limits', async () => {
const { status, headers } = await request(ctx.getHttpServer()).options('/upload');
expect(status).toBe(204);
expect(headers['upload-limit']).toBe('min-size=0');
});
});
}); });

View file

@ -1,17 +1,4 @@
import { import { Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
Controller,
Delete,
Head,
Header,
HttpCode,
HttpStatus,
Options,
Param,
Patch,
Post,
Req,
Res,
} from '@nestjs/common';
import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger'; import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { import {
@ -120,7 +107,7 @@ export class AssetUploadController {
} }
@Options() @Options()
@HttpCode(HttpStatus.NO_CONTENT) getUploadOptions(@Res() res: Response) {
@Header('Upload-Limit', 'min-size=0') return this.service.getUploadOptions(res);
getUploadOptions() {} }
} }

View file

@ -55,11 +55,23 @@ export class DatabaseBackupConfig {
keepLastAmount!: number; keepLastAmount!: number;
} }
export class UploadBackupConfig {
@IsInt()
@IsPositive()
@IsNotEmpty()
maxAgeHours!: number;
}
export class SystemConfigBackupsDto { export class SystemConfigBackupsDto {
@Type(() => DatabaseBackupConfig) @Type(() => DatabaseBackupConfig)
@ValidateNested() @ValidateNested()
@IsObject() @IsObject()
database!: DatabaseBackupConfig; database!: DatabaseBackupConfig;
@Type(() => UploadBackupConfig)
@ValidateNested()
@IsObject()
upload!: UploadBackupConfig;
} }
export class SystemConfigFFmpegDto { export class SystemConfigFFmpegDto {
@ -326,17 +338,6 @@ class SystemConfigNewVersionCheckDto {
enabled!: boolean; enabled!: boolean;
} }
class SystemConfigRemovePartialUploadsDto {
@ValidateBoolean()
enabled!: boolean;
@IsInt()
@Min(1)
@Type(() => Number)
@ApiProperty({ type: 'integer' })
hoursAgo!: number;
}
class SystemConfigNightlyTasksDto { class SystemConfigNightlyTasksDto {
@IsDateStringFormat('HH:mm', { message: 'startTime must be in HH:mm format' }) @IsDateStringFormat('HH:mm', { message: 'startTime must be in HH:mm format' })
startTime!: string; startTime!: string;
@ -356,10 +357,8 @@ class SystemConfigNightlyTasksDto {
@ValidateBoolean() @ValidateBoolean()
syncQuotaUsage!: boolean; syncQuotaUsage!: boolean;
@Type(() => SystemConfigRemovePartialUploadsDto) @ValidateBoolean()
@ValidateNested() removeStaleUploads!: boolean;
@IsObject()
removeStaleUploads!: SystemConfigRemovePartialUploadsDto;
} }
class SystemConfigOAuthDto { class SystemConfigOAuthDto {

View file

@ -4,6 +4,7 @@ import { DateTime } from 'luxon';
import { createHash } from 'node:crypto'; import { createHash } from 'node:crypto';
import { extname, join } from 'node:path'; import { extname, join } from 'node:path';
import { Readable } from 'node:stream'; import { Readable } from 'node:stream';
import { SystemConfig } from 'src/config';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants'; import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core'; import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators'; import { OnEvent, OnJob } from 'src/decorators';
@ -54,6 +55,7 @@ export class AssetUploadService extends BaseService {
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> { async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`); this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { isComplete, assetData, uploadLength, contentLength, version } = dto; const { isComplete, assetData, uploadLength, contentLength, version } = dto;
const { backup } = await this.getConfig({ withCache: true });
const asset = await this.onStart(auth, dto); const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) { if (asset.isDuplicate) {
@ -63,7 +65,7 @@ export class AssetUploadService extends BaseService {
const location = `/api/upload/${asset.id}`; const location = `/api/upload/${asset.id}`;
if (version <= MAX_RUFH_INTEROP_VERSION) { if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version); this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
} }
// this is a 5xx to indicate the client should do offset retrieval and resume // this is a 5xx to indicate the client should do offset retrieval and resume
res.status(500).send('Incomplete asset already exists'); res.status(500).send('Incomplete asset already exists');
@ -76,29 +78,31 @@ export class AssetUploadService extends BaseService {
const location = `/api/upload/${asset.id}`; const location = `/api/upload/${asset.id}`;
if (version <= MAX_RUFH_INTEROP_VERSION) { if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version); this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
} }
this.addRequest(asset.id, req); this.addRequest(asset.id, req);
let checksumBuffer: Buffer | undefined; await this.databaseRepository.withUuidLock(asset.id, async () => {
const writeStream = this.pipe(req, asset.path, contentLength); let checksumBuffer: Buffer | undefined;
if (isComplete) { const writeStream = this.pipe(req, asset.path, contentLength);
const hash = createHash('sha1'); if (isComplete) {
req.on('data', (data: Buffer) => hash.update(data)); const hash = createHash('sha1');
writeStream.on('finish', () => (checksumBuffer = hash.digest())); req.on('data', (data: Buffer) => hash.update(data));
} writeStream.on('finish', () => (checksumBuffer = hash.digest()));
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject)); }
this.setCompleteHeader(res, dto.version, isComplete); await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
if (!isComplete) { this.setCompleteHeader(res, dto.version, isComplete);
res.status(201).set('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); if (!isComplete) {
return; res.status(201).set('Location', location).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
} return;
if (dto.checksum.compare(checksumBuffer!) !== 0) { }
return await this.sendChecksumMismatch(res, asset.id, asset.path); if (dto.checksum.compare(checksumBuffer!) !== 0) {
} return await this.sendChecksumMismatch(res, asset.id, asset.path);
}
await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt }); await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt });
res.status(200).send({ id: asset.id }); res.status(200).send({ id: asset.id });
});
} }
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> { resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
@ -180,6 +184,7 @@ export class AssetUploadService extends BaseService {
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> { async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`); this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
const { backup } = await this.getConfig({ withCache: true });
this.abortExistingRequest(id); this.abortExistingRequest(id);
return this.databaseRepository.withUuidLock(id, async () => { return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id); const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
@ -194,15 +199,20 @@ export class AssetUploadService extends BaseService {
.status(204) .status(204)
.setHeader('Upload-Offset', offset.toString()) .setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store') .setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', 'min-size=0') .setHeader('Upload-Limit', this.getUploadLimits(backup))
.send(); .send();
}); });
} }
async getUploadOptions(res: Response): Promise<void> {
const { backup } = await this.getConfig({ withCache: true });
res.status(204).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
}
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask }) @OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
async removeStaleUploads(): Promise<void> { async removeStaleUploads(): Promise<void> {
const config = await this.getConfig({ withCache: false }); const config = await this.getConfig({ withCache: false });
const createdBefore = DateTime.now().minus({ hours: config.nightlyTasks.removeStaleUploads.hoursAgo }).toJSDate(); const createdBefore = DateTime.now().minus({ hours: config.backup.upload.maxAgeHours }).toJSDate();
let jobs: JobItem[] = []; let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore); const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
for await (const asset of assets) { for await (const asset of assets) {
@ -353,13 +363,13 @@ export class AssetUploadService extends BaseService {
return writeStream; return writeStream;
} }
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number): void { private sendInterimResponse({ socket }: Response, location: string, interopVersion: number, limits: string): void {
if (socket && !socket.destroyed) { if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket // Express doesn't understand interim responses, so write directly to socket
socket.write( socket.write(
'HTTP/1.1 104 Upload Resumption Supported\r\n' + 'HTTP/1.1 104 Upload Resumption Supported\r\n' +
`Location: ${location}\r\n` + `Location: ${location}\r\n` +
'Upload-Limit: min-size=0\r\n' + `Upload-Limit: ${limits}\r\n` +
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`, `Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
); );
} }
@ -428,4 +438,8 @@ export class AssetUploadService extends BaseService {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1'); res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
} }
} }
private getUploadLimits({ upload }: SystemConfig['backup']) {
return `min-size=0, max-age=${upload.maxAgeHours * 3600}`;
}
} }

View file

@ -302,7 +302,7 @@ export class JobService extends BaseService {
jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } }); jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } });
} }
if (config.nightlyTasks.removeStaleUploads.enabled) { if (config.nightlyTasks.removeStaleUploads) {
jobs.push({ name: JobName.PartialAssetCleanupQueueAll }); jobs.push({ name: JobName.PartialAssetCleanupQueueAll });
} }

View file

@ -46,6 +46,9 @@ const updatedConfig = Object.freeze<SystemConfig>({
cronExpression: '0 02 * * *', cronExpression: '0 02 * * *',
keepLastAmount: 14, keepLastAmount: 14,
}, },
upload: {
maxAgeHours: 72,
},
}, },
ffmpeg: { ffmpeg: {
crf: 30, crf: 30,
@ -115,10 +118,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
missingThumbnails: true, missingThumbnails: true,
generateMemories: true, generateMemories: true,
syncQuotaUsage: true, syncQuotaUsage: true,
removeStaleUploads: { removeStaleUploads: true,
enabled: true,
hoursAgo: 72,
},
}, },
reverseGeocoding: { reverseGeocoding: {
enabled: true, enabled: true,