add controller tests, move validation testing from e2e

revert unnecessary change

update mocks

add structured-headers to e2e deps
This commit is contained in:
mertalev 2025-10-06 15:14:26 -04:00
parent 597382a25f
commit d4f3d9d6a5
No known key found for this signature in database
GPG key ID: DF6ABC77AAD98C95
16 changed files with 525 additions and 125 deletions

View file

@ -53,5 +53,8 @@
},
"volta": {
"node": "22.20.0"
},
"dependencies": {
"structured-headers": "^2.0.2"
}
}

View file

@ -108,28 +108,6 @@ describe('/upload', () => {
});
});
it('should require upload-length', async () => {
const content = randomBytes(1024);
const { status, headers, body } = await request(app)
.post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`)
.set('Upload-Complete', '?1')
.set('Content-Type', 'image/jpeg')
.send(content);
expect(status).toBe(400);
expect(headers['location']).toBeUndefined();
expect(body).toEqual(
expect.objectContaining({
message: ['uploadLength must be an integer number', 'uploadLength must not be less than 0'],
}),
);
});
it('should create an incomplete upload with Upload-Complete: ?0', async () => {
const partialContent = randomBytes(512);
@ -168,25 +146,6 @@ describe('/upload', () => {
expect(headers['upload-incomplete']).toBe('?1');
});
it('should reject invalid checksum', async () => {
const content = randomBytes(1024);
const { status, headers, body } = await request(app)
.post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', `sha=:INVALID:`)
.set('Upload-Complete', '?1')
.set('Content-Type', 'image/jpeg')
.set('Upload-Length', '1024')
.send(content);
expect(status).toBe(400);
expect(headers['location']).toBeUndefined();
expect(body).toEqual(expect.objectContaining({ message: 'Invalid repr-digest header' }));
});
it('should reject attempt to upload completed asset', async () => {
const content = randomBytes(1024);
@ -424,36 +383,6 @@ describe('/upload', () => {
});
});
it('should require application/partial-upload content type if version is at least 6', async () => {
const { status, body } = await request(baseUrl)
.patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '6')
.set('Upload-Offset', '1250')
.set('Upload-Complete', '?0')
.set('Content-Type', 'application/octet-stream')
.send(randomBytes(100));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({ message: ['contentType must be equal to application/partial-upload'] }),
);
});
it('should allow non-application/partial-upload content type if version is less than 6', async () => {
const { status, headers } = await request(baseUrl)
.patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '1250')
.set('Upload-Incomplete', '?1')
.set('Content-Type', 'application/octet-stream')
.send();
expect(status).toBe(204);
expect(headers['upload-offset']).toBe('1250');
});
it('should complete upload with Upload-Complete: ?1', async () => {
const headResponse = await request(baseUrl)
.head(uploadResource)
@ -767,15 +696,6 @@ describe('/upload', () => {
expect(headers['cache-control']).toBe('no-store');
});
it('should return 400 for non-UUID upload resource', async () => {
const { status } = await request(app)
.head('/upload/nonexistent')
.set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(400);
});
it('should return 404 for non-existent upload resource', async () => {
const { status } = await request(app)
.head('/upload/4feacf6f-830f-46c8-8140-2b3da67070c0')

4
pnpm-lock.yaml generated
View file

@ -191,6 +191,10 @@ importers:
version: 5.9.3
e2e:
dependencies:
structured-headers:
specifier: ^2.0.2
version: 2.0.2
devDependencies:
'@eslint/js':
specifier: ^9.8.0

View file

@ -0,0 +1,447 @@
import { createHash, randomUUID } from 'crypto';
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { serializeDictionary } from 'structured-headers';
import request from 'supertest';
import { factory } from 'test/small.factory';
import { ControllerContext, controllerSetup, mockBaseService } from 'test/utils';
const makeAssetData = (overrides?: Partial<any>): string => {
return serializeDictionary({
filename: 'test-image.jpg',
'device-asset-id': 'test-asset-id',
'device-id': 'test-device',
'file-created-at': new Date('2025-01-02T00:00:00Z').toISOString(),
'file-modified-at': new Date('2025-01-01T00:00:00Z').toISOString(),
'is-favorite': false,
...overrides,
});
};
describe(AssetUploadController.name, () => {
let ctx: ControllerContext;
let buffer: Buffer;
let checksum: string;
const service = mockBaseService(AssetUploadService);
beforeAll(async () => {
ctx = await controllerSetup(AssetUploadController, [{ provide: AssetUploadService, useValue: service }]);
return () => ctx.close();
});
beforeEach(() => {
service.resetAllMocks();
service.startUpload.mockImplementation(async (auth, req, res, dto) => void res.send());
service.resumeUpload.mockImplementation(async (auth, req, res, id, dto) => void res.send());
service.cancelUpload.mockImplementation(async (auth, id, res) => void res.send());
service.getUploadStatus.mockImplementation(async (auth, res, id, dto) => void res.send());
ctx.reset();
buffer = Buffer.from(randomUUID());
checksum = `sha=:${createHash('sha1').update(buffer).digest('base64')}:`;
});
describe('POST /upload', () => {
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).post('/upload');
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must be an integer number', 'version must not be less than 3']),
}),
);
});
it('should require X-Immich-Asset-Data header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'x-immich-asset-data header is required' }));
});
it('should require Repr-Digest header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Missing repr-digest header' }));
});
it('should require Upload-Complete header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('uploadComplete')]),
}),
);
});
it('should require Upload-Length header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([
'uploadLength must be an integer number',
'uploadLength must not be less than 0',
]),
}),
);
});
it('should reject invalid Repr-Digest format', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', checksum)
.set('Repr-Digest', 'invalid-format')
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: 'Invalid repr-digest header' }));
});
it('should validate device-asset-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceAssetId')]),
}),
);
});
it('should validate device-id is required in asset data', async () => {
const assetData = serializeDictionary({
filename: 'test.jpg',
'device-asset-id': 'test-asset',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('deviceId')]),
}),
);
});
it('should validate filename is required in asset data', async () => {
const assetData = serializeDictionary({
'device-asset-id': 'test-asset',
'device-id': 'test-device',
'file-created-at': new Date().toISOString(),
'file-modified-at': new Date().toISOString(),
});
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', assetData)
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('filename')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '3')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Incomplete', '?0')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).not.toBe(400);
});
it('should validate Upload-Complete is a boolean structured field', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', 'true')
.set('Upload-Length', '1024')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('uploadComplete')]),
}),
);
});
it('should validate Upload-Length is a non-negative integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.post('/upload')
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', makeAssetData())
.set('Repr-Digest', checksum)
.set('Upload-Complete', '?1')
.set('Upload-Length', '-100')
.send(buffer);
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadLength must not be less than 0']),
}),
);
});
});
describe('PATCH /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).patch(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Offset', '0')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['version must be an integer number', 'version must not be less than 3']),
}),
);
});
it('should require Upload-Offset header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Complete', '?1')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([
'uploadOffset must be an integer number',
'uploadOffset must not be less than 0',
]),
}),
);
});
it('should require Upload-Complete header', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('uploadComplete')]),
}),
);
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
it('should validate Upload-Offset is a non-negative integer', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '-50')
.set('Upload-Complete', '?0')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: expect.arrayContaining(['uploadOffset must not be less than 0']),
}),
);
});
it('should require Content-Type: application/partial-upload for version >= 6', async () => {
const { status, body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '6')
.set('Upload-Offset', '0')
.set('Upload-Complete', '?0')
.set('Content-Type', 'application/octet-stream')
.send(Buffer.from('test'));
expect(status).toBe(400);
expect(body).toEqual(
expect.objectContaining({
message: ['contentType must be equal to application/partial-upload'],
}),
);
});
it('should allow other Content-Type for version < 6', async () => {
const { body } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.set('Content-Type', 'application/octet-stream')
.send();
// Will fail for other reasons, but not content-type validation
expect(body).not.toEqual(
expect.objectContaining({
message: expect.arrayContaining([expect.stringContaining('contentType')]),
}),
);
});
it('should accept Upload-Incomplete header for version 3', async () => {
const { status } = await request(ctx.getHttpServer())
.patch(`/upload/${uploadId}`)
.set('Upload-Draft-Interop-Version', '3')
.set('Upload-Offset', '0')
.set('Upload-Incomplete', '?1')
.send();
// Should not fail validation
expect(status).not.toBe(400);
});
});
describe('DELETE /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).delete(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer()).delete('/upload/invalid-uuid');
expect(status).toBe(400);
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
});
});
describe('HEAD /upload/:id', () => {
const uploadId = factory.uuid();
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require Upload-Draft-Interop-Version header', async () => {
const { status } = await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
expect(status).toBe(400);
});
it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer())
.head('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(400);
});
});
describe('OPTIONS /upload', () => {
it('should return 204 with upload limits', async () => {
const { status, headers } = await request(ctx.getHttpServer()).options('/upload');
expect(status).toBe(204);
expect(headers['upload-limit']).toBe('min-size=0');
});
});
});

View file

@ -1,5 +1,4 @@
import {
BadRequestException,
Controller,
Delete,
Head,
@ -14,15 +13,13 @@ import {
Res,
} from '@nestjs/common';
import { ApiHeader, ApiTags } from '@nestjs/swagger';
import { plainToInstance } from 'class-transformer';
import { validateSync } from 'class-validator';
import { Request, Response } from 'express';
import { IncomingHttpHeaders } from 'node:http';
import { AuthDto } from 'src/dtos/auth.dto';
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto, UploadHeader } from 'src/dtos/upload.dto';
import { ImmichHeader, Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { AssetUploadService } from 'src/services/asset-upload.service';
import { validateSyncOrReject } from 'src/utils/request';
import { UUIDParamDto } from 'src/validation';
const apiInteropVersion = {
@ -76,8 +73,7 @@ export class AssetUploadController {
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
startUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response): Promise<void> {
const dto = this.getDto(StartUploadDto, req.headers);
return this.service.startUpload(auth, req, res, dto);
return this.service.startUpload(auth, req, res, validateSyncOrReject(StartUploadDto, req.headers));
}
@Patch(':id')
@ -92,8 +88,7 @@ export class AssetUploadController {
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
resumeUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
const dto = this.getDto(ResumeUploadDto, req.headers);
return this.service.resumeUpload(auth, req, res, id, dto);
return this.service.resumeUpload(auth, req, res, id, validateSyncOrReject(ResumeUploadDto, req.headers));
}
@Delete(':id')
@ -106,23 +101,11 @@ export class AssetUploadController {
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
@ApiHeader(apiInteropVersion)
getUploadStatus(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
const dto = this.getDto(GetUploadStatusDto, req.headers);
return this.service.getUploadStatus(auth, res, id, dto);
return this.service.getUploadStatus(auth, res, id, validateSyncOrReject(GetUploadStatusDto, req.headers));
}
@Options()
@HttpCode(HttpStatus.NO_CONTENT)
@Header('Upload-Limit', 'min-size=0')
getUploadOptions() {}
private getDto<T extends object>(cls: new () => T, headers: IncomingHttpHeaders): T {
const dto = plainToInstance(cls, headers, { excludeExtraneousValues: true });
const errors = validateSync(dto);
if (errors.length > 0) {
const constraints = errors.flatMap((e) => (e.constraints ? Object.values(e.constraints) : []));
console.warn('Upload DTO validation failed:', JSON.stringify(errors, null, 2));
throw new BadRequestException(constraints);
}
return dto;
}
}

View file

@ -114,7 +114,7 @@ export class StartUploadDto extends BaseUploadHeadersDto {
@Expose({ name: UploadHeader.ReprDigest })
@Transform(({ value }) => {
if (!value) {
return null;
throw new BadRequestException(`Missing ${UploadHeader.ReprDigest} header`);
}
const checksum = parseDictionary(value).get('sha')?.[0];

View file

@ -16,7 +16,7 @@ export class AssetUploadInterceptor implements NestInterceptor {
const res = context.switchToHttp().getResponse<Response<AssetMediaResponseDto>>();
const checksum = fromMaybeArray(req.headers[ImmichHeader.Checksum]);
const response = await this.service.getUploadAssetIdByChecksum(req.auth, checksum);
const response = await this.service.getUploadAssetIdByChecksum(req.user, checksum);
if (response) {
res.status(200);
return of({ status: AssetMediaStatus.DUPLICATE, id: response.id });

View file

@ -46,7 +46,7 @@ export const Authenticated = (options: AuthenticatedOptions = {}): MethodDecorat
};
export const Auth = createParamDecorator((data, context: ExecutionContext): AuthDto => {
return context.switchToHttp().getRequest<AuthenticatedRequest>().auth;
return context.switchToHttp().getRequest<AuthenticatedRequest>().user;
});
export const FileResponse = () =>
@ -67,11 +67,11 @@ export const GetLoginDetails = createParamDecorator((data, context: ExecutionCon
});
export interface AuthRequest extends Request {
auth?: AuthDto;
user?: AuthDto;
}
export interface AuthenticatedRequest extends Request {
auth: AuthDto;
user: AuthDto;
}
@Injectable()
@ -99,7 +99,7 @@ export class AuthGuard implements CanActivate {
} = { sharedLink: false, admin: false, ...options };
const request = context.switchToHttp().getRequest<AuthRequest>();
request.auth = await this.authService.authenticate({
request.user = await this.authService.authenticate({
headers: request.headers,
queryParams: request.query as Record<string, string>,
metadata: { adminRoute, sharedLinkRoute, permission, uri: request.path },

View file

@ -2,7 +2,7 @@ import { Injectable } from '@nestjs/common';
import archiver from 'archiver';
import chokidar, { ChokidarOptions } from 'chokidar';
import { escapePath, glob, globStream } from 'fast-glob';
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, unlinkSync } from 'node:fs';
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
import fs from 'node:fs/promises';
import path from 'node:path';
import { Readable, Writable } from 'node:stream';
@ -138,16 +138,6 @@ export class StorageRepository {
}
}
unlinkSync(file: string) {
try {
unlinkSync(file);
} catch (error) {
if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') {
throw error;
}
}
}
async unlinkDir(folder: string, options: { recursive?: boolean; force?: boolean }) {
await fs.rm(folder, options);
}

View file

@ -215,7 +215,11 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('hex'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@ -224,7 +228,11 @@ describe(AssetMediaService.name, () => {
});
it('should find an existing asset by base64', async () => {
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: 'asset-id',
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('base64'))).resolves.toEqual({
id: 'asset-id',
status: AssetMediaStatus.DUPLICATE,
@ -378,7 +386,11 @@ describe(AssetMediaService.name, () => {
(error as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.create.mockRejectedValue(error);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(assetEntity.id);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: assetEntity.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
await expect(sut.uploadAsset(authStub.user1, createDto, file)).resolves.toEqual({
id: 'id_1',
@ -803,7 +815,11 @@ describe(AssetMediaService.name, () => {
mocks.asset.update.mockRejectedValue(error);
mocks.asset.getById.mockResolvedValueOnce(sidecarAsset);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(sidecarAsset.id);
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
id: sidecarAsset.id,
createdAt: new Date(),
status: AssetStatus.Active,
});
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([sidecarAsset.id]));
// this is the original file size
mocks.storage.stat.mockResolvedValue({ size: 0 } as Stats);

View file

@ -43,7 +43,7 @@ export class AssetUploadService extends BaseService {
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
}
this.validateQuota(auth, uploadLength ?? contentLength);
this.validateQuota(auth, uploadLength);
try {
await this.assetRepository.createWithMetadata(
@ -94,7 +94,7 @@ export class AssetUploadService extends BaseService {
return;
}
if (isComplete && uploadLength && uploadLength !== contentLength) {
if (isComplete && uploadLength !== contentLength) {
return this.sendInconsistentLengthProblem(res);
}

View file

@ -192,7 +192,7 @@ export function mapToUploadFile(file: ImmichFile): UploadFile {
export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File): UploadRequest => {
return {
auth: request.auth || null,
auth: request.user || null,
body: request.body,
fieldName: file.fieldname as UploadFieldName,
file: mapToUploadFile(file as ImmichFile),

View file

@ -1,5 +1,35 @@
import { BadRequestException } from '@nestjs/common';
import { plainToInstance } from 'class-transformer';
import { validateSync } from 'class-validator';
export const fromChecksum = (checksum: string): Buffer => {
return Buffer.from(checksum, checksum.length === 28 ? 'base64' : 'hex');
};
export const fromMaybeArray = <T>(param: T | T[]) => (Array.isArray(param) ? param[0] : param);
export function validateSyncOrReject<T extends object>(cls: new () => T, obj: any): T {
const dto = plainToInstance(cls, obj, { excludeExtraneousValues: true });
const errors = validateSync(dto);
if (errors.length === 0) {
return dto;
}
const constraints = [];
for (const error of errors) {
if (error.constraints) {
constraints.push(...Object.values(error.constraints));
}
if (!error.children) {
continue;
}
for (const child of error.children) {
if (child.constraints) {
constraints.push(...Object.values(child.constraints));
}
}
}
throw new BadRequestException(constraints);
}

View file

@ -45,5 +45,9 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
upsertMetadata: vitest.fn(),
getMetadataByKey: vitest.fn(),
deleteMetadataByKey: vitest.fn(),
getCompletionMetadata: vitest.fn(),
createWithMetadata: vitest.fn(),
removeAndDecrementQuota: vitest.fn(),
setComplete: vitest.fn(),
};
};

View file

@ -20,6 +20,7 @@ export const newDatabaseRepositoryMock = (): Mocked<RepositoryInterface<Database
prewarm: vitest.fn(),
runMigrations: vitest.fn(),
withLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
withUuidLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
tryLock: vitest.fn(),
isBusy: vitest.fn(),
wait: vitest.fn(),

View file

@ -51,6 +51,7 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
readFile: vitest.fn(),
createFile: vitest.fn(),
createWriteStream: vitest.fn(),
createOrAppendWriteStream: vitest.fn(),
createOrOverwriteFile: vitest.fn(),
existsSync: vitest.fn(),
overwriteFile: vitest.fn(),
@ -58,6 +59,7 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
unlinkDir: vitest.fn().mockResolvedValue(true),
removeEmptyDirs: vitest.fn(),
checkFileExists: vitest.fn(),
mkdir: vitest.fn(),
mkdirSync: vitest.fn(),
checkDiskUsage: vitest.fn(),
readdir: vitest.fn(),