set max-age limit

This commit is contained in:
mertalev 2025-10-10 19:26:22 -04:00
parent da52b3ebf4
commit 0ad983135c
No known key found for this signature in database
GPG key ID: DF6ABC77AAD98C95
8 changed files with 71 additions and 80 deletions

View file

@ -974,7 +974,7 @@ describe('/upload', () => {
expect(status).toBe(204);
expect(headers['upload-offset']).toBe('512');
expect(headers['upload-complete']).toBe('?0');
expect(headers['upload-limit']).toEqual('min-size=0');
expect(headers['upload-limit']).toEqual('min-size=0, max-age=259200');
expect(headers['cache-control']).toBe('no-store');
});
@ -993,7 +993,7 @@ describe('/upload', () => {
const { status, headers } = await request(app).options('/upload');
expect(status).toBe(204);
expect(headers['upload-limit']).toEqual('min-size=0');
expect(headers['upload-limit']).toEqual('min-size=0, max-age=259200');
});
});
});

View file

@ -22,6 +22,9 @@ export interface SystemConfig {
cronExpression: string;
keepLastAmount: number;
};
upload: {
maxAgeHours: number;
};
};
ffmpeg: {
crf: number;
@ -133,10 +136,7 @@ export interface SystemConfig {
clusterNewFaces: boolean;
generateMemories: boolean;
syncQuotaUsage: boolean;
removeStaleUploads: {
enabled: boolean;
hoursAgo: number;
};
removeStaleUploads: boolean;
};
trash: {
enabled: boolean;
@ -194,6 +194,9 @@ export const defaults = Object.freeze<SystemConfig>({
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 23,
@ -329,10 +332,7 @@ export const defaults = Object.freeze<SystemConfig>({
syncQuotaUsage: true,
missingThumbnails: true,
clusterNewFaces: true,
removeStaleUploads: {
enabled: true,
hoursAgo: 72,
},
removeStaleUploads: true,
},
trash: {
enabled: true,

View file

@ -447,13 +447,4 @@ describe(AssetUploadController.name, () => {
expect(status).toBe(400);
});
});
describe('OPTIONS /upload', () => {
it('should return 204 with upload limits', async () => {
const { status, headers } = await request(ctx.getHttpServer()).options('/upload');
expect(status).toBe(204);
expect(headers['upload-limit']).toBe('min-size=0');
});
});
});

View file

@ -1,17 +1,4 @@
import {
Controller,
Delete,
Head,
Header,
HttpCode,
HttpStatus,
Options,
Param,
Patch,
Post,
Req,
Res,
} from '@nestjs/common';
import { Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import {
@ -120,7 +107,7 @@ export class AssetUploadController {
}
@Options()
@HttpCode(HttpStatus.NO_CONTENT)
@Header('Upload-Limit', 'min-size=0')
getUploadOptions() {}
getUploadOptions(@Res() res: Response) {
return this.service.getUploadOptions(res);
}
}

View file

@ -55,11 +55,23 @@ export class DatabaseBackupConfig {
keepLastAmount!: number;
}
export class UploadBackupConfig {
@IsInt()
@IsPositive()
@IsNotEmpty()
maxAgeHours!: number;
}
export class SystemConfigBackupsDto {
@Type(() => DatabaseBackupConfig)
@ValidateNested()
@IsObject()
database!: DatabaseBackupConfig;
@Type(() => UploadBackupConfig)
@ValidateNested()
@IsObject()
upload!: UploadBackupConfig;
}
export class SystemConfigFFmpegDto {
@ -326,17 +338,6 @@ class SystemConfigNewVersionCheckDto {
enabled!: boolean;
}
class SystemConfigRemovePartialUploadsDto {
@ValidateBoolean()
enabled!: boolean;
@IsInt()
@Min(1)
@Type(() => Number)
@ApiProperty({ type: 'integer' })
hoursAgo!: number;
}
class SystemConfigNightlyTasksDto {
@IsDateStringFormat('HH:mm', { message: 'startTime must be in HH:mm format' })
startTime!: string;
@ -356,10 +357,8 @@ class SystemConfigNightlyTasksDto {
@ValidateBoolean()
syncQuotaUsage!: boolean;
@Type(() => SystemConfigRemovePartialUploadsDto)
@ValidateNested()
@IsObject()
removeStaleUploads!: SystemConfigRemovePartialUploadsDto;
@ValidateBoolean()
removeStaleUploads!: boolean;
}
class SystemConfigOAuthDto {

View file

@ -4,6 +4,7 @@ import { DateTime } from 'luxon';
import { createHash } from 'node:crypto';
import { extname, join } from 'node:path';
import { Readable } from 'node:stream';
import { SystemConfig } from 'src/config';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
@ -54,6 +55,7 @@ export class AssetUploadService extends BaseService {
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const { isComplete, assetData, uploadLength, contentLength, version } = dto;
const { backup } = await this.getConfig({ withCache: true });
const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) {
@ -63,7 +65,7 @@ export class AssetUploadService extends BaseService {
const location = `/api/upload/${asset.id}`;
if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version);
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
}
// this is a 5xx to indicate the client should do offset retrieval and resume
res.status(500).send('Incomplete asset already exists');
@ -76,10 +78,11 @@ export class AssetUploadService extends BaseService {
const location = `/api/upload/${asset.id}`;
if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(res, location, version);
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
}
this.addRequest(asset.id, req);
await this.databaseRepository.withUuidLock(asset.id, async () => {
let checksumBuffer: Buffer | undefined;
const writeStream = this.pipe(req, asset.path, contentLength);
if (isComplete) {
@ -90,7 +93,7 @@ export class AssetUploadService extends BaseService {
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
this.setCompleteHeader(res, dto.version, isComplete);
if (!isComplete) {
res.status(201).set('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
res.status(201).set('Location', location).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
return;
}
if (dto.checksum.compare(checksumBuffer!) !== 0) {
@ -99,6 +102,7 @@ export class AssetUploadService extends BaseService {
await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt });
res.status(200).send({ id: asset.id });
});
}
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
@ -180,6 +184,7 @@ export class AssetUploadService extends BaseService {
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
const { backup } = await this.getConfig({ withCache: true });
this.abortExistingRequest(id);
return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
@ -194,15 +199,20 @@ export class AssetUploadService extends BaseService {
.status(204)
.setHeader('Upload-Offset', offset.toString())
.setHeader('Cache-Control', 'no-store')
.setHeader('Upload-Limit', 'min-size=0')
.setHeader('Upload-Limit', this.getUploadLimits(backup))
.send();
});
}
async getUploadOptions(res: Response): Promise<void> {
const { backup } = await this.getConfig({ withCache: true });
res.status(204).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
}
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
async removeStaleUploads(): Promise<void> {
const config = await this.getConfig({ withCache: false });
const createdBefore = DateTime.now().minus({ hours: config.nightlyTasks.removeStaleUploads.hoursAgo }).toJSDate();
const createdBefore = DateTime.now().minus({ hours: config.backup.upload.maxAgeHours }).toJSDate();
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
for await (const asset of assets) {
@ -353,13 +363,13 @@ export class AssetUploadService extends BaseService {
return writeStream;
}
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number): void {
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number, limits: string): void {
if (socket && !socket.destroyed) {
// Express doesn't understand interim responses, so write directly to socket
socket.write(
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
`Location: ${location}\r\n` +
'Upload-Limit: min-size=0\r\n' +
`Upload-Limit: ${limits}\r\n` +
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
);
}
@ -428,4 +438,8 @@ export class AssetUploadService extends BaseService {
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
}
}
private getUploadLimits({ upload }: SystemConfig['backup']) {
return `min-size=0, max-age=${upload.maxAgeHours * 3600}`;
}
}

View file

@ -302,7 +302,7 @@ export class JobService extends BaseService {
jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } });
}
if (config.nightlyTasks.removeStaleUploads.enabled) {
if (config.nightlyTasks.removeStaleUploads) {
jobs.push({ name: JobName.PartialAssetCleanupQueueAll });
}

View file

@ -46,6 +46,9 @@ const updatedConfig = Object.freeze<SystemConfig>({
cronExpression: '0 02 * * *',
keepLastAmount: 14,
},
upload: {
maxAgeHours: 72,
},
},
ffmpeg: {
crf: 30,
@ -115,10 +118,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
missingThumbnails: true,
generateMemories: true,
syncQuotaUsage: true,
removeStaleUploads: {
enabled: true,
hoursAgo: 72,
},
removeStaleUploads: true,
},
reverseGeocoding: {
enabled: true,