dto refactor

add logging

handle metadata
This commit is contained in:
mertalev 2025-09-29 18:09:06 -04:00
parent 6f61bf04e4
commit 4ed92f5df5
No known key found for this signature in database
GPG key ID: DF6ABC77AAD98C95
13 changed files with 795 additions and 370 deletions

View file

@ -33,6 +33,7 @@ describe('/upload (RUFH compliance)', () => {
const { status, headers } = await request(app) const { status, headers } = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`)
.set('Upload-Complete', '?1') .set('Upload-Complete', '?1')
@ -50,6 +51,7 @@ describe('/upload (RUFH compliance)', () => {
const { status, headers } = await request(app) const { status, headers } = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(partialContent).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(partialContent).digest('base64')}:`)
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
@ -71,18 +73,21 @@ describe('/upload (RUFH compliance)', () => {
const { headers } = await request(app) const { headers } = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`)
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
.send(content); .send(content);
expect(headers['location']).toBeDefined();
uploadResource = headers['location']; uploadResource = headers['location'];
}); });
it('should retrieve upload offset with HEAD request', async () => { it('should retrieve upload offset with HEAD request', async () => {
const { status, headers } = await request(baseUrl) const { status, headers } = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(204); expect(status).toBe(204);
expect(headers['upload-offset']).toBe('512'); expect(headers['upload-offset']).toBe('512');
@ -94,7 +99,8 @@ describe('/upload (RUFH compliance)', () => {
it('should return 400 for non-UUID upload resource', async () => { it('should return 400 for non-UUID upload resource', async () => {
const { status } = await request(app) const { status } = await request(app)
.head('/upload/nonexistent') .head('/upload/nonexistent')
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(400); expect(status).toBe(400);
}); });
@ -102,7 +108,8 @@ describe('/upload (RUFH compliance)', () => {
it('should return 404 for non-existent upload resource', async () => { it('should return 404 for non-existent upload resource', async () => {
const { status } = await request(app) const { status } = await request(app)
.head('/upload/4feacf6f-830f-46c8-8140-2b3da67070c0') .head('/upload/4feacf6f-830f-46c8-8140-2b3da67070c0')
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(status).toBe(404); expect(status).toBe(404);
}); });
@ -119,6 +126,7 @@ describe('/upload (RUFH compliance)', () => {
const response = await request(app) const response = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(fullContent).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(fullContent).digest('base64')}:`)
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
@ -128,9 +136,10 @@ describe('/upload (RUFH compliance)', () => {
}); });
it('should append data with correct offset', async () => { it('should append data with correct offset', async () => {
const { status, headers } = await request(baseUrl) const { status, headers, body } = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', chunks[0].length.toString()) .set('Upload-Offset', chunks[0].length.toString())
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -141,7 +150,8 @@ describe('/upload (RUFH compliance)', () => {
const headResponse = await request(baseUrl) const headResponse = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(headResponse.headers['upload-offset']).toBe('1250'); expect(headResponse.headers['upload-offset']).toBe('1250');
}); });
@ -152,6 +162,7 @@ describe('/upload (RUFH compliance)', () => {
const { status, headers, body } = await request(baseUrl) const { status, headers, body } = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', wrongOffset.toString()) .set('Upload-Offset', wrongOffset.toString())
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -159,16 +170,20 @@ describe('/upload (RUFH compliance)', () => {
expect(status).toBe(409); expect(status).toBe(409);
expect(headers['upload-offset']).toBe('1250'); expect(headers['upload-offset']).toBe('1250');
expect(body.type).toBe('https://iana.org/assignments/http-problem-types#mismatching-upload-offset'); expect(body).toEqual({
expect(body['expected-offset']).toBe(1250); type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
expect(body['provided-offset']).toBe(wrongOffset); title: 'offset from request does not match offset of resource',
'expected-offset': 1250,
'provided-offset': wrongOffset,
});
}); });
it('should complete upload with Upload-Complete: ?1', async () => { it('should complete upload with Upload-Complete: ?1', async () => {
// Get current offset first // Get current offset first
const headResponse = await request(baseUrl) const headResponse = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
const offset = parseInt(headResponse.headers['upload-offset']); const offset = parseInt(headResponse.headers['upload-offset']);
expect(offset).toBe(1250); expect(offset).toBe(1250);
@ -176,6 +191,7 @@ describe('/upload (RUFH compliance)', () => {
const { status, headers } = await request(baseUrl) const { status, headers } = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', offset.toString()) .set('Upload-Offset', offset.toString())
.set('Upload-Complete', '?1') .set('Upload-Complete', '?1')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -186,17 +202,21 @@ describe('/upload (RUFH compliance)', () => {
expect(headers['upload-offset']).toBe('2750'); expect(headers['upload-offset']).toBe('2750');
}); });
it('should reject append to completed upload when offset is right', async () => { it('should reject append to completed upload', async () => {
const { status, body } = await request(baseUrl) const { status, body } = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '2750') .set('Upload-Offset', '2750')
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
.send(randomBytes(100)); .send(randomBytes(100));
expect(status).toBe(400); expect(status).toBe(400);
expect(body.type).toBe('https://iana.org/assignments/http-problem-types#completed-upload'); expect(body).toEqual({
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
title: 'upload is already completed',
});
}); });
}); });
@ -209,6 +229,7 @@ describe('/upload (RUFH compliance)', () => {
const response = await request(app) const response = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`)
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
@ -227,7 +248,8 @@ describe('/upload (RUFH compliance)', () => {
// Verify resource is no longer accessible // Verify resource is no longer accessible
const headResponse = await request(baseUrl) const headResponse = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(headResponse.status).toBe(404); expect(headResponse.status).toBe(404);
}); });
@ -243,6 +265,7 @@ describe('/upload (RUFH compliance)', () => {
const initialResponse = await request(app) const initialResponse = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(totalContent).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(totalContent).digest('base64')}:`)
.set('Upload-Complete', '?0') // Indicate incomplete .set('Upload-Complete', '?0') // Indicate incomplete
@ -254,7 +277,8 @@ describe('/upload (RUFH compliance)', () => {
// Check offset after interruption // Check offset after interruption
const offsetResponse = await request(baseUrl) const offsetResponse = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(offsetResponse.headers['upload-offset']).toBe('2000'); expect(offsetResponse.headers['upload-offset']).toBe('2000');
@ -263,6 +287,7 @@ describe('/upload (RUFH compliance)', () => {
const resumeResponse = await request(baseUrl) const resumeResponse = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', '2000') .set('Upload-Offset', '2000')
.set('Upload-Complete', '?1') .set('Upload-Complete', '?1')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -281,6 +306,7 @@ describe('/upload (RUFH compliance)', () => {
const createResponse = await request(app) const createResponse = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${hash.digest('base64')}:`) .set('Repr-Digest', `sha=:${hash.digest('base64')}:`)
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
@ -293,6 +319,7 @@ describe('/upload (RUFH compliance)', () => {
let response = await request(baseUrl) let response = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', currentOffset.toString()) .set('Upload-Offset', currentOffset.toString())
.set('Upload-Complete', '?0') .set('Upload-Complete', '?0')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -304,7 +331,8 @@ describe('/upload (RUFH compliance)', () => {
// Verify offset // Verify offset
const offsetCheck = await request(baseUrl) const offsetCheck = await request(baseUrl)
.head(uploadResource) .head(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8');
expect(offsetCheck.headers['upload-offset']).toBe('5000'); expect(offsetCheck.headers['upload-offset']).toBe('5000');
@ -312,6 +340,7 @@ describe('/upload (RUFH compliance)', () => {
response = await request(baseUrl) response = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('Upload-Offset', currentOffset.toString()) .set('Upload-Offset', currentOffset.toString())
.set('Upload-Complete', '?1') .set('Upload-Complete', '?1')
.set('Content-Type', 'application/partial-upload') .set('Content-Type', 'application/partial-upload')
@ -329,6 +358,7 @@ describe('/upload (RUFH compliance)', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/upload') .post('/upload')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.set('Upload-Draft-Interop-Version', '8')
.set('X-Immich-Asset-Data', base64Metadata) .set('X-Immich-Asset-Data', base64Metadata)
.set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`)
.set('Upload-Complete', '?1') .set('Upload-Complete', '?1')
@ -337,7 +367,10 @@ describe('/upload (RUFH compliance)', () => {
.send(content); .send(content);
expect(status).toBe(400); expect(status).toBe(400);
expect(body.type).toBe('https://iana.org/assignments/http-problem-types#inconsistent-upload-length'); expect(body).toEqual({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
title: 'inconsistent length values for upload',
});
}); });
}); });
@ -348,27 +381,8 @@ describe('/upload (RUFH compliance)', () => {
.set('Authorization', `Bearer ${user.accessToken}`); .set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(204); expect(status).toBe(204);
expect(headers['upload-limit']).toBeDefined(); expect(headers['upload-limit']).toEqual('min-size=0');
const limits = parseUploadLimit(headers['upload-limit']);
expect(limits).toHaveProperty('min-size');
}); });
}); });
}); });
// Helper function to parse Upload-Limit header
function parseUploadLimit(headerValue: string): Record<string, number> {
const limits: Record<string, number> = {};
if (!headerValue) return limits;
// Parse structured field dictionary format
const pairs = headerValue.split(',').map((p) => p.trim());
for (const pair of pairs) {
const [key, value] = pair.split('=');
if (key && value) {
limits[key] = parseInt(value, 10);
}
}
return limits;
}

View file

@ -137,12 +137,15 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> getUploadStatusWithHttpInfo(String id, { String? key, String? slug, }) async { Future<Response> getUploadStatusWithHttpInfo(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload/{id}' final apiPath = r'/upload/{id}'
.replaceAll('{id}', id); .replaceAll('{id}', id);
@ -161,6 +164,8 @@ class UploadApi {
queryParams.addAll(_queryParams('', 'slug', slug)); queryParams.addAll(_queryParams('', 'slug', slug));
} }
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -179,13 +184,16 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> getUploadStatus(String id, { String? key, String? slug, }) async { Future<void> getUploadStatus(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async {
final response = await getUploadStatusWithHttpInfo(id, key: key, slug: slug, ); final response = await getUploadStatusWithHttpInfo(draftUploadInteropVersion, id, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
@ -197,12 +205,24 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> resumeUploadWithHttpInfo(String id, { String? key, String? slug, }) async { Future<Response> resumeUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload/{id}' final apiPath = r'/upload/{id}'
.replaceAll('{id}', id); .replaceAll('{id}', id);
@ -221,6 +241,11 @@ class UploadApi {
queryParams.addAll(_queryParams('', 'slug', slug)); queryParams.addAll(_queryParams('', 'slug', slug));
} }
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion);
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'upload-offset'] = parameterToString(uploadOffset);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -239,13 +264,25 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> resumeUpload(String id, { String? key, String? slug, }) async { Future<void> resumeUpload(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async {
final response = await resumeUploadWithHttpInfo(id, key: key, slug: slug, ); final response = await resumeUploadWithHttpInfo(contentLength, draftUploadInteropVersion, id, uploadComplete, uploadOffset, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
@ -257,10 +294,25 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] reprDigest (required):
/// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] xImmichAssetData (required):
/// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> startUploadWithHttpInfo({ String? key, String? slug, }) async { Future<Response> startUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload'; final apiPath = r'/upload';
@ -278,6 +330,12 @@ class UploadApi {
queryParams.addAll(_queryParams('', 'slug', slug)); queryParams.addAll(_queryParams('', 'slug', slug));
} }
headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion);
headerParams[r'repr-digest'] = parameterToString(reprDigest);
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -296,11 +354,26 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] contentLength (required):
/// Non-negative size of the request body in bytes.
///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] reprDigest (required):
/// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.
///
/// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
///
/// * [String] xImmichAssetData (required):
/// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> startUpload({ String? key, String? slug, }) async { Future<void> startUpload(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async {
final response = await startUploadWithHttpInfo( key: key, slug: slug, ); final response = await startUploadWithHttpInfo(contentLength, draftUploadInteropVersion, reprDigest, uploadComplete, xImmichAssetData, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }

207
mobile/set_test.dart Normal file
View file

@ -0,0 +1,207 @@
enum BackupSelection {
// Used to sort albums based on the backupSelection
// selected -> none -> excluded
// Do not change the order of these values
selected,
none,
excluded,
}
class LocalAlbum {
final String id;
final String name;
final DateTime updatedAt;
final bool isIosSharedAlbum;
final int assetCount;
final BackupSelection backupSelection;
final String? linkedRemoteAlbumId;
const LocalAlbum({
required this.id,
required this.name,
required this.updatedAt,
this.assetCount = 0,
this.backupSelection = BackupSelection.none,
this.isIosSharedAlbum = false,
this.linkedRemoteAlbumId,
});
LocalAlbum copyWith({
String? id,
String? name,
DateTime? updatedAt,
int? assetCount,
BackupSelection? backupSelection,
bool? isIosSharedAlbum,
String? linkedRemoteAlbumId,
}) {
return LocalAlbum(
id: id ?? this.id,
name: name ?? this.name,
updatedAt: updatedAt ?? this.updatedAt,
assetCount: assetCount ?? this.assetCount,
backupSelection: backupSelection ?? this.backupSelection,
isIosSharedAlbum: isIosSharedAlbum ?? this.isIosSharedAlbum,
linkedRemoteAlbumId: linkedRemoteAlbumId ?? this.linkedRemoteAlbumId,
);
}
@override
bool operator ==(Object other) {
if (other is! LocalAlbum) return false;
if (identical(this, other)) return true;
return other.id == id &&
other.name == name &&
other.updatedAt == updatedAt &&
other.assetCount == assetCount &&
other.backupSelection == backupSelection &&
other.isIosSharedAlbum == isIosSharedAlbum &&
other.linkedRemoteAlbumId == linkedRemoteAlbumId;
}
@override
int get hashCode {
return id.hashCode ^
name.hashCode ^
updatedAt.hashCode ^
assetCount.hashCode ^
backupSelection.hashCode ^
isIosSharedAlbum.hashCode ^
linkedRemoteAlbumId.hashCode;
}
@override
String toString() {
return '''LocalAlbum: {
id: $id,
name: $name,
updatedAt: $updatedAt,
assetCount: $assetCount,
backupSelection: $backupSelection,
isIosSharedAlbum: $isIosSharedAlbum
linkedRemoteAlbumId: $linkedRemoteAlbumId,
}''';
}
}
int square(int num) {
return num * num;
}
@pragma('vm:never-inline')
List<LocalAlbum> getAlbums() {
final updatedAt = DateTime.now();
final selection = BackupSelection.values;
return List.generate(100000, (i) {
return LocalAlbum(id: i.toString(), name: '', updatedAt: updatedAt, backupSelection: selection[i % 3]);
});
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum1(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now()));
newAlbums.setAll(0, albums);
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
if (currentAlbum.id == album.id) {
newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected);
break;
}
}
return newAlbums;
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum2(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now()));
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
newAlbums[i] = currentAlbum.id == album.id ? currentAlbum.copyWith(backupSelection: BackupSelection.selected) : currentAlbum;
}
return newAlbums;
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum3(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = albums.toList(growable: false);
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
if (currentAlbum.id == album.id) {
newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected);
break;
}
}
return newAlbums;
}
@pragma('vm:never-inline')
Set<String> toSet1(List<LocalAlbum> albums) {
return albums.map((album) => album.id).toSet();
}
@pragma('vm:never-inline')
Set<String> toSet2(List<LocalAlbum> albums) {
final ids = <String>{};
for (final album in albums) {
ids.add(album.id);
}
return ids;
}
@pragma('vm:never-inline')
Set<String> toSet3(List<LocalAlbum> albums) {
return Set.unmodifiable(albums.map((album) => album.id));
}
@pragma('vm:never-inline')
Set<String> toSet4(List<LocalAlbum> albums) {
final ids = <String>{};
for (int i = 0; i < albums.length; i++) {
final id = albums[i].id;
ids.add(id);
}
return ids;
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered1(List<LocalAlbum> albums, BackupSelection selection) {
return albums.where((album) => album.backupSelection == selection).toList(growable: false);
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered2(List<LocalAlbum> albums, BackupSelection selection) {
final filtered = <LocalAlbum>[];
for (final album in albums) {
if (album.backupSelection == selection) {
filtered.add(album);
}
}
return filtered;
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered3(List<LocalAlbum> albums, BackupSelection selection) {
final filtered = <LocalAlbum>[];
for (int i = 0; i < albums.length; i++) {
final album = albums[i];
if (album.backupSelection == selection) {
filtered.add(album);
}
}
return filtered;
}
late Set<String> ids;
late List<LocalAlbum> localAlbums;
void main(List<String> args) {
final albums = getAlbums();
// final album = LocalAlbum(id: '50000', name: '', updatedAt: DateTime.now());
final stopwatch = Stopwatch()..start();
// localAlbums = setAlbum3(albums, album);
// ids = toSet1(albums);
localAlbums = toFiltered2(albums, BackupSelection.selected);
stopwatch.stop();
print('Elapsed time: ${(stopwatch.elapsedMicroseconds / 1000).toStringAsFixed(2)}ms');
}

View file

@ -9419,6 +9419,24 @@
"post": { "post": {
"operationId": "startUpload", "operationId": "startUpload",
"parameters": [ "parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "key", "name": "key",
"required": false, "required": false,
@ -9427,6 +9445,15 @@
"type": "string" "type": "string"
} }
}, },
{
"name": "repr-digest",
"in": "header",
"description": "Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "slug", "name": "slug",
"required": false, "required": false,
@ -9434,6 +9461,24 @@
"schema": { "schema": {
"type": "string" "type": "string"
} }
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "x-immich-asset-data",
"in": "header",
"description": "Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.",
"required": true,
"schema": {
"type": "string"
}
} }
], ],
"responses": { "responses": {
@ -9514,6 +9559,15 @@
"head": { "head": {
"operationId": "getUploadStatus", "operationId": "getUploadStatus",
"parameters": [ "parameters": [
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "id", "name": "id",
"required": true, "required": true,
@ -9565,6 +9619,24 @@
"patch": { "patch": {
"operationId": "resumeUpload", "operationId": "resumeUpload",
"parameters": [ "parameters": [
{
"name": "content-length",
"in": "header",
"description": "Non-negative size of the request body in bytes.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "id", "name": "id",
"required": true, "required": true,
@ -9589,6 +9661,24 @@
"schema": { "schema": {
"type": "string" "type": "string"
} }
},
{
"name": "upload-complete",
"in": "header",
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "upload-offset",
"in": "header",
"description": "Non-negative byte offset indicating the starting position of the data in the request body within the entire file.",
"required": true,
"schema": {
"type": "string"
}
} }
], ],
"responses": { "responses": {

View file

@ -4536,16 +4536,28 @@ export function getUploadOptions({ key, slug }: {
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function startUpload({ key, slug }: { export function startUpload({ contentLength, draftUploadInteropVersion, key, reprDigest, slug, uploadComplete, xImmichAssetData }: {
contentLength: string;
draftUploadInteropVersion: string;
key?: string; key?: string;
reprDigest: string;
slug?: string; slug?: string;
uploadComplete: string;
xImmichAssetData: string;
}, opts?: Oazapfts.RequestOpts) { }, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload${QS.query(QS.explode({ return oazapfts.ok(oazapfts.fetchText(`/upload${QS.query(QS.explode({
key, key,
slug slug
}))}`, { }))}`, {
...opts, ...opts,
method: "POST" method: "POST",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"draft-upload-interop-version": draftUploadInteropVersion,
"repr-digest": reprDigest,
"upload-complete": uploadComplete,
"x-immich-asset-data": xImmichAssetData
})
})); }));
} }
/** /**
@ -4567,7 +4579,8 @@ export function cancelUpload({ id, key, slug }: {
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function getUploadStatus({ id, key, slug }: { export function getUploadStatus({ draftUploadInteropVersion, id, key, slug }: {
draftUploadInteropVersion: string;
id: string; id: string;
key?: string; key?: string;
slug?: string; slug?: string;
@ -4577,23 +4590,36 @@ export function getUploadStatus({ id, key, slug }: {
slug slug
}))}`, { }))}`, {
...opts, ...opts,
method: "HEAD" method: "HEAD",
headers: oazapfts.mergeHeaders(opts?.headers, {
"draft-upload-interop-version": draftUploadInteropVersion
})
})); }));
} }
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function resumeUpload({ id, key, slug }: { export function resumeUpload({ contentLength, draftUploadInteropVersion, id, key, slug, uploadComplete, uploadOffset }: {
contentLength: string;
draftUploadInteropVersion: string;
id: string; id: string;
key?: string; key?: string;
slug?: string; slug?: string;
uploadComplete: string;
uploadOffset: string;
}, opts?: Oazapfts.RequestOpts) { }, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({ return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key, key,
slug slug
}))}`, { }))}`, {
...opts, ...opts,
method: "PATCH" method: "PATCH",
headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength,
"draft-upload-interop-version": draftUploadInteropVersion,
"upload-complete": uploadComplete,
"upload-offset": uploadOffset
})
})); }));
} }
/** /**

View file

@ -1,12 +1,35 @@
import { Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common'; import { BadRequestException, Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger'; import { ApiHeader, ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express'; import { plainToInstance } from 'class-transformer';
import { validateSync } from 'class-validator';
import { Response } from 'express';
import { IncomingHttpHeaders } from 'node:http';
import { AuthDto } from 'src/dtos/auth.dto'; import { AuthDto } from 'src/dtos/auth.dto';
import { Permission } from 'src/enum'; import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto, UploadHeader } from 'src/dtos/upload.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard'; import { ImmichHeader, Permission } from 'src/enum';
import { Auth, Authenticated, AuthenticatedRequest } from 'src/middleware/auth.guard';
import { AssetUploadService } from 'src/services/asset-upload.service'; import { AssetUploadService } from 'src/services/asset-upload.service';
import { UUIDParamDto } from 'src/validation'; import { UUIDParamDto } from 'src/validation';
const apiInteropVersion = {
name: UploadHeader.InteropVersion,
description: `Indicates the version of the RUFH protocol supported by the client.`,
required: true,
};
const apiUploadComplete = {
name: UploadHeader.UploadComplete,
description:
'Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.',
required: true,
};
const apiContentLength = {
name: UploadHeader.ContentLength,
description: 'Non-negative size of the request body in bytes.',
required: true,
};
@ApiTags('Upload') @ApiTags('Upload')
@Controller('upload') @Controller('upload')
export class AssetUploadController { export class AssetUploadController {
@ -14,36 +37,73 @@ export class AssetUploadController {
@Post() @Post()
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) @Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
startUpload(@Auth() auth: AuthDto, @Req() request: Request, @Res() response: Response): Promise<void> { @ApiHeader({
return this.service.startUpload(auth, request, response); name: ImmichHeader.AssetData,
description:
'Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.',
required: true,
})
@ApiHeader({
name: UploadHeader.ReprDigest,
description:
'Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.',
required: true,
})
@ApiHeader(apiInteropVersion)
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
startUpload(@Req() req: AuthenticatedRequest, @Res() res: Response): Promise<void> {
const dto = this.getDto(StartUploadDto, req.headers);
console.log('Starting upload with dto:', JSON.stringify(dto));
return this.service.startUpload(req, res, dto);
} }
@Patch(':id') @Patch(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) @Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
resumeUpload( @ApiHeader({
@Auth() auth: AuthDto, name: UploadHeader.UploadOffset,
@Param() { id }: UUIDParamDto, description:
@Req() request: Request, 'Non-negative byte offset indicating the starting position of the data in the request body within the entire file.',
@Res() response: Response, required: true,
): Promise<void> { })
return this.service.resumeUpload(auth, id, request, response); @ApiHeader(apiInteropVersion)
@ApiHeader(apiUploadComplete)
@ApiHeader(apiContentLength)
resumeUpload(@Req() req: AuthenticatedRequest, @Res() res: Response, @Param() { id }: UUIDParamDto) {
const dto = this.getDto(ResumeUploadDto, req.headers);
console.log('Resuming upload with dto:', JSON.stringify(dto));
return this.service.resumeUpload(req, res, id, dto);
} }
@Delete(':id') @Delete(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) @Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
cancelUpload(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Res() response: Response): Promise<void> { cancelUpload(@Auth() auth: AuthDto, @Res() res: Response, @Param() { id }: UUIDParamDto) {
return this.service.cancelUpload(auth, id, response); return this.service.cancelUpload(auth, id, res);
} }
@Head(':id') @Head(':id')
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) @Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
getUploadStatus(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Res() response: Response): Promise<void> { @ApiHeader(apiInteropVersion)
return this.service.getUploadStatus(auth, id, response); getUploadStatus(@Req() req: AuthenticatedRequest, @Res() res: Response, @Param() { id }: UUIDParamDto) {
const dto = this.getDto(GetUploadStatusDto, req.headers);
console.log('Getting upload status with dto:', JSON.stringify(dto));
return this.service.getUploadStatus(req.auth, res, id, dto);
} }
@Options() @Options()
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) @Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
getUploadOptions(@Res() response: Response): Promise<void> { getUploadOptions(@Res() res: Response) {
return this.service.getUploadOptions(response); return this.service.getUploadOptions(res);
}
private getDto<T extends object>(cls: new () => T, headers: IncomingHttpHeaders): T {
const dto = plainToInstance(cls, headers, { excludeExtraneousValues: true });
const errors = validateSync(dto);
if (errors.length > 0) {
const constraints = errors.map((e) => (e.constraints ? Object.values(e.constraints).join(', ') : '')).join('; ');
console.warn('Upload DTO validation failed:', JSON.stringify(errors, null, 2));
throw new BadRequestException(constraints);
}
return dto;
} }
} }

View file

@ -1,133 +1,159 @@
import { Type } from 'class-transformer'; import { BadRequestException } from '@nestjs/common';
import { IsEnum, IsInt, IsNotEmpty, IsObject, IsString, IsUUID, ValidateNested } from 'class-validator'; import { Expose, plainToInstance, Transform, Type } from 'class-transformer';
import { AssetMediaCreateDto } from 'src/dtos/asset-media.dto'; import { Equals, IsArray, IsEnum, IsInt, IsNotEmpty, IsString, Min, ValidateIf, ValidateNested } from 'class-validator';
import { AssetMetadataUpsertItemDto } from 'src/dtos/asset.dto';
import { AssetVisibility, ImmichHeader } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } from 'src/validation';
import { parseDictionary } from 'structured-headers';
export enum TusdHookRequestType { export class UploadAssetDataDto {
PreCreate = 'pre-create', @IsNotEmpty()
PreFinish = 'pre-finish', @IsString()
} deviceAssetId!: string;
export enum TusdHookStorageType { @IsNotEmpty()
FileStore = 'filestore', @IsString()
} deviceId!: string;
export class TusdStorageDto { @ValidateDate()
@IsEnum(TusdHookStorageType) fileCreatedAt!: Date;
Type!: string;
@ValidateDate()
fileModifiedAt!: Date;
@Optional()
@IsString()
duration?: string;
@IsString() @IsString()
@IsNotEmpty() @IsNotEmpty()
Path!: string; filename!: string;
@IsString() @ValidateBoolean({ optional: true })
@IsNotEmpty() isFavorite?: boolean;
InfoPath!: string;
@ValidateEnum({ enum: AssetVisibility, name: 'AssetVisibility', optional: true })
visibility?: AssetVisibility;
@ValidateUUID({ optional: true })
livePhotoVideoId?: string;
@Transform(({ value }) => {
try {
const json = JSON.parse(value);
const items = Array.isArray(json) ? json : [json];
return items.map((item) => plainToInstance(AssetMetadataUpsertItemDto, item));
} catch {
throw new BadRequestException(['metadata must be valid JSON']);
}
})
@Optional()
@ValidateNested({ each: true })
@IsArray()
metadata!: AssetMetadataUpsertItemDto[];
} }
export class UploadAssetDataDto extends AssetMediaCreateDto { export enum StructuredBoolean {
@IsString() False = '?0',
@IsNotEmpty() True = '?1',
declare filename: string;
} }
export class TusdMetaDataDto { export enum UploadHeader {
@IsString() UploadOffset = 'upload-offset',
@IsNotEmpty() ContentLength = 'content-length',
declare AssetData: string; // base64-encoded JSON string of UploadAssetDataDto UploadLength = 'upload-length',
UploadComplete = 'upload-complete',
UploadIncomplete = 'upload-incomplete',
InteropVersion = 'upload-draft-interop-version',
ReprDigest = 'repr-digest',
} }
export class TusdPreCreateUploadDto { class BaseRufhHeadersDto {
@Expose({ name: UploadHeader.InteropVersion })
@Min(3)
@IsInt() @IsInt()
Size!: number; @Type(() => Number)
version!: number;
} }
export class TusdPreFinishUploadDto { export class BaseUploadHeadersDto extends BaseRufhHeadersDto {
@IsUUID() @Expose({ name: UploadHeader.ContentLength })
ID!: string; @Min(0)
@IsInt() @IsInt()
Size!: number; @Type(() => Number)
contentLength!: number;
@Type(() => TusdMetaDataDto) @Expose({ name: UploadHeader.UploadComplete })
@ValidateNested() @ValidateIf((o) => o.requestInterop !== null && o.requestInterop! <= 3)
@IsObject() @IsEnum(StructuredBoolean)
MetaData!: TusdMetaDataDto; uploadComplete!: StructuredBoolean;
@Type(() => TusdStorageDto) @Expose({ name: UploadHeader.UploadIncomplete })
@ValidateNested() @ValidateIf((o) => o.requestInterop === null || o.requestInterop! > 3)
@IsObject() @IsEnum(StructuredBoolean)
Storage!: TusdStorageDto; uploadIncomplete!: StructuredBoolean;
@Expose({ name: UploadHeader.UploadLength })
@Min(0)
@IsInt()
@Type(() => Number)
@Optional()
uploadLength?: number;
get isComplete(): boolean {
if (this.version <= 3) {
return this.uploadIncomplete === StructuredBoolean.False;
}
return this.uploadComplete === StructuredBoolean.True;
}
} }
export class TusdHttpRequestDto { export class StartUploadDto extends BaseUploadHeadersDto {
@IsString() @Expose({ name: ImmichHeader.AssetData })
@IsNotEmpty() // @ValidateNested()
Method!: string; // @IsObject()
@Type(() => UploadAssetDataDto)
@Transform(({ value }) => {
if (!value) {
return null;
}
@IsString() const json = Buffer.from(value, 'base64').toString('utf-8');
@IsNotEmpty() try {
URI!: string; return JSON.parse(json);
} catch {
throw new BadRequestException(`${ImmichHeader.AssetData} must be valid base64-encoded JSON`);
}
})
assetData!: UploadAssetDataDto;
@IsObject() @Expose({ name: UploadHeader.ReprDigest })
Header!: Record<string, string[]>; @Transform(({ value }) => {
if (!value) {
return null;
}
const checksum = parseDictionary(value).get('sha')?.[0];
if (checksum instanceof ArrayBuffer) {
return Buffer.from(checksum);
}
throw new BadRequestException(`Invalid ${UploadHeader.ReprDigest} header`);
})
checksum!: Buffer;
} }
export class TusdPreCreateEventDto { export class ResumeUploadDto extends BaseUploadHeadersDto {
@Type(() => TusdPreCreateUploadDto) @Expose({ name: 'content-type' })
@ValidateNested() @ValidateIf((o) => o.requestInterop !== null && o.requestInterop >= 6)
@IsObject() @Equals('application/partial-upload')
Upload!: TusdPreCreateUploadDto; contentType!: number | null;
@Type(() => TusdHttpRequestDto) @Expose({ name: UploadHeader.UploadOffset })
@ValidateNested() @Min(0)
@IsObject() @IsInt()
HTTPRequest!: TusdHttpRequestDto; @Type(() => Number)
@Optional()
uploadOffset!: number | null;
} }
export class TusdPreFinishEventDto { export class GetUploadStatusDto extends BaseRufhHeadersDto {}
@Type(() => TusdPreFinishUploadDto)
@ValidateNested()
@IsObject()
Upload!: TusdPreFinishUploadDto;
@Type(() => TusdHttpRequestDto)
@ValidateNested()
@IsObject()
HTTPRequest!: TusdHttpRequestDto;
}
export class TusdHookRequestDto {
@IsEnum(TusdHookRequestType)
Type!: TusdHookRequestType;
@IsObject()
Event!: TusdPreCreateEventDto | TusdPreFinishEventDto;
}
export class TusdHttpResponseDto {
StatusCode!: number;
Body?: string;
Header?: Record<string, string>;
}
export class TusdChangeFileInfoStorageDto {
Path?: string;
}
export class TusdChangeFileInfoDto {
ID?: string;
MetaData?: TusdMetaDataDto;
Storage?: TusdChangeFileInfoStorageDto;
}
export class TusdHookResponseDto {
HTTPResponse?: TusdHttpResponseDto;
RejectUpload?: boolean;
ChangeFileInfo?: TusdChangeFileInfoDto;
}

View file

@ -16,7 +16,7 @@ export class AssetUploadInterceptor implements NestInterceptor {
const res = context.switchToHttp().getResponse<Response<AssetMediaResponseDto>>(); const res = context.switchToHttp().getResponse<Response<AssetMediaResponseDto>>();
const checksum = fromMaybeArray(req.headers[ImmichHeader.Checksum]); const checksum = fromMaybeArray(req.headers[ImmichHeader.Checksum]);
const response = await this.service.getUploadAssetIdByChecksum(req.user, checksum); const response = await this.service.getUploadAssetIdByChecksum(req.auth, checksum);
if (response) { if (response) {
res.status(200); res.status(200);
return of({ status: AssetMediaStatus.DUPLICATE, id: response.id }); return of({ status: AssetMediaStatus.DUPLICATE, id: response.id });

View file

@ -46,7 +46,7 @@ export const Authenticated = (options: AuthenticatedOptions = {}): MethodDecorat
}; };
export const Auth = createParamDecorator((data, context: ExecutionContext): AuthDto => { export const Auth = createParamDecorator((data, context: ExecutionContext): AuthDto => {
return context.switchToHttp().getRequest<AuthenticatedRequest>().user; return context.switchToHttp().getRequest<AuthenticatedRequest>().auth;
}); });
export const FileResponse = () => export const FileResponse = () =>
@ -68,11 +68,11 @@ export const GetLoginDetails = createParamDecorator((data, context: ExecutionCon
}); });
export interface AuthRequest extends Request { export interface AuthRequest extends Request {
user?: AuthDto; auth?: AuthDto;
} }
export interface AuthenticatedRequest extends Request { export interface AuthenticatedRequest extends Request {
user: AuthDto; auth: AuthDto;
} }
@Injectable() @Injectable()
@ -99,7 +99,7 @@ export class AuthGuard implements CanActivate {
} = { sharedLink: false, admin: false, ...options }; } = { sharedLink: false, admin: false, ...options };
const request = context.switchToHttp().getRequest<AuthRequest>(); const request = context.switchToHttp().getRequest<AuthRequest>();
request.user = await this.authService.authenticate({ request.auth = await this.authService.authenticate({
headers: request.headers, headers: request.headers,
queryParams: request.query as Record<string, string>, queryParams: request.query as Record<string, string>,
metadata: { adminRoute, sharedLinkRoute, permission, uri: request.path }, metadata: { adminRoute, sharedLinkRoute, permission, uri: request.path },

View file

@ -255,6 +255,18 @@ export class AssetRepository {
return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow(); return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow();
} }
createWithMetadata(asset: Insertable<AssetTable> & { id: string }, metadata?: AssetMetadataItem[]) {
if (!metadata || metadata.length === 0) {
return this.db.insertInto('asset').values(asset).execute();
}
return this.db
.with('asset', (qb) => qb.insertInto('asset').values(asset).returning('id'))
.insertInto('asset_metadata')
.values(metadata.map(({ key, value }) => ({ assetId: asset.id, key, value })))
.execute();
}
getCompletionMetadata(assetId: string, ownerId: string) { getCompletionMetadata(assetId: string, ownerId: string) {
return this.db return this.db
.selectFrom('asset') .selectFrom('asset')

View file

@ -62,15 +62,11 @@ export class StorageRepository {
} }
createWriteStream(filepath: string): Writable { createWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'w' }); return createWriteStream(filepath, { flags: 'w', highWaterMark: 1024 * 1024 });
}
overwriteWriteStream(filepath: string, offset = 0): Writable {
return createWriteStream(filepath, { flags: 'r+', start: offset });
} }
createOrAppendWriteStream(filepath: string): Writable { createOrAppendWriteStream(filepath: string): Writable {
return createWriteStream(filepath, { flags: 'a' }); return createWriteStream(filepath, { flags: 'a', highWaterMark: 1024 * 1024 });
} }
createOrOverwriteFile(filepath: string, buffer: Buffer) { createOrOverwriteFile(filepath: string, buffer: Buffer) {

View file

@ -1,70 +1,65 @@
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common'; import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
import { plainToInstance } from 'class-transformer'; import { Response } from 'express';
import { validateSync } from 'class-validator';
import { Request, Response } from 'express';
import { createHash } from 'node:crypto'; import { createHash } from 'node:crypto';
import { extname, join } from 'node:path'; import { extname, join } from 'node:path';
import { setTimeout } from 'node:timers/promises'; import { setTimeout } from 'node:timers/promises';
import { StorageCore } from 'src/cores/storage.core'; import { StorageCore } from 'src/cores/storage.core';
import { AuthDto } from 'src/dtos/auth.dto'; import { AuthDto } from 'src/dtos/auth.dto';
import { UploadAssetDataDto } from 'src/dtos/upload.dto'; import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/upload.dto';
import { AssetStatus, AssetType, AssetVisibility, ImmichHeader, JobName, StorageFolder } from 'src/enum'; import { AssetStatus, AssetType, AssetVisibility, JobName, StorageFolder } from 'src/enum';
import { AuthenticatedRequest } from 'src/middleware/auth.guard';
import { BaseService } from 'src/services/base.service'; import { BaseService } from 'src/services/base.service';
import { isAssetChecksumConstraint } from 'src/utils/database'; import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types'; import { mimeTypes } from 'src/utils/mime-types';
import { parseDictionary } from 'structured-headers';
const MAX_INTEROP_VERSION = 8; export const MAX_RUFH_INTEROP_VERSION = 8;
@Injectable() @Injectable()
export class AssetUploadService extends BaseService { export class AssetUploadService extends BaseService {
async startUpload(auth: AuthDto, request: Request, response: Response): Promise<void> { async startUpload(req: AuthenticatedRequest, response: Response, dto: StartUploadDto): Promise<void> {
const headers = request.headers; this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
const requestInterop = this.getNumberHeader(headers, 'upload-draft-interop-version'); const { isComplete, assetData, uploadLength, contentLength, version } = dto;
const contentLength = this.requireContentLength(headers); if (isComplete && uploadLength !== undefined && uploadLength !== contentLength) {
const isComplete = this.requireUploadComplete(headers, requestInterop);
const metadata = this.requireAssetData(headers);
const checksumHeader = this.requireChecksum(headers);
const uploadLength = this.getNumberHeader(headers, 'upload-length');
if (isComplete && uploadLength !== null && uploadLength !== contentLength) {
return this.sendInconsistentLengthProblem(response); return this.sendInconsistentLengthProblem(response);
} }
const assetId = this.cryptoRepository.randomUUID(); const assetId = this.cryptoRepository.randomUUID();
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId); const folder = StorageCore.getNestedFolder(StorageFolder.Upload, req.auth.user.id, assetId);
const extension = extname(metadata.filename); const extension = extname(assetData.filename);
const path = join(folder, `${assetId}${extension}`); const path = join(folder, `${assetId}${extension}`);
const type = mimeTypes.assetType(path); const type = mimeTypes.assetType(path);
if (type === AssetType.Other) { if (type === AssetType.Other) {
throw new BadRequestException(`${metadata.filename} is an unsupported file type`); throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
} }
this.validateQuota(auth, uploadLength ?? contentLength); this.validateQuota(req.auth, uploadLength ?? contentLength);
try { try {
await this.assetRepository.create({ await this.assetRepository.createWithMetadata(
id: assetId, {
ownerId: auth.user.id, id: assetId,
libraryId: null, ownerId: req.auth.user.id,
checksum: checksumHeader, libraryId: null,
originalPath: path, checksum: dto.checksum,
deviceAssetId: metadata.deviceAssetId, originalPath: path,
deviceId: metadata.deviceId, deviceAssetId: assetData.deviceAssetId,
fileCreatedAt: metadata.fileCreatedAt, deviceId: assetData.deviceId,
fileModifiedAt: metadata.fileModifiedAt, fileCreatedAt: assetData.fileCreatedAt,
localDateTime: metadata.fileCreatedAt, fileModifiedAt: assetData.fileModifiedAt,
type: mimeTypes.assetType(path), localDateTime: assetData.fileCreatedAt,
isFavorite: metadata.isFavorite, type: mimeTypes.assetType(path),
duration: metadata.duration || null, isFavorite: assetData.isFavorite,
visibility: metadata.visibility || AssetVisibility.Timeline, duration: assetData.duration || null,
originalFileName: metadata.filename, visibility: assetData.visibility || AssetVisibility.Timeline,
status: AssetStatus.Partial, originalFileName: assetData.filename,
}); status: AssetStatus.Partial,
},
assetData.metadata,
);
} catch (error: any) { } catch (error: any) {
if (isAssetChecksumConstraint(error)) { if (isAssetChecksumConstraint(error)) {
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksumHeader); const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(req.auth.user.id, dto.checksum);
if (!duplicate) { if (!duplicate) {
throw new InternalServerErrorException('Error locating duplicate for checksum constraint'); throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
} }
@ -82,8 +77,8 @@ export class AssetUploadService extends BaseService {
} }
const location = `/api/upload/${assetId}`; const location = `/api/upload/${assetId}`;
if (requestInterop !== null && requestInterop >= 3 && requestInterop <= MAX_INTEROP_VERSION) { if (version <= MAX_RUFH_INTEROP_VERSION) {
this.sendInterimResponse(response, location, requestInterop); this.sendInterimResponse(response, location, version);
} }
await this.storageRepository.mkdir(folder); await this.storageRepository.mkdir(folder);
@ -92,7 +87,7 @@ export class AssetUploadService extends BaseService {
if (isComplete) { if (isComplete) {
const hash = createHash('sha1'); const hash = createHash('sha1');
request.on('data', (chunk: Buffer) => hash.update(chunk)); req.on('data', (chunk: Buffer) => hash.update(chunk));
writeStream.on('finish', () => (checksumBuffer = hash.digest())); writeStream.on('finish', () => (checksumBuffer = hash.digest()));
} }
@ -103,22 +98,24 @@ export class AssetUploadService extends BaseService {
} }
}); });
writeStream.on('finish', () => { writeStream.on('finish', async () => {
if (!isComplete) { if (!isComplete) {
return response.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); return response.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
} }
this.logger.log(`Finished upload to ${path}`); this.logger.log(`Finished upload to ${path}`);
if (checksumHeader.compare(checksumBuffer!) !== 0) { if (dto.checksum.compare(checksumBuffer!) !== 0) {
return this.sendChecksumMismatchResponse(response, assetId, path); return this.sendChecksumMismatchResponse(response, assetId, path);
} }
this.setCompleteHeader(response, requestInterop, true); try {
response.status(200).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); await this.onComplete({ assetId, path, size: contentLength, fileModifiedAt: assetData.fileModifiedAt });
} finally {
return this.onComplete({ assetId, path, size: contentLength, fileModifiedAt: metadata.fileModifiedAt }); this.setCompleteHeader(response, dto.version, true);
response.status(200).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
}
}); });
request.on('error', (error) => { req.on('error', (error) => {
this.logger.error(`Failed to read request body: ${error.message}`); this.logger.error(`Failed to read request body: ${error.message}`);
writeStream.end(); writeStream.end();
if (!response.headersSent) { if (!response.headersSent) {
@ -127,45 +124,44 @@ export class AssetUploadService extends BaseService {
}); });
let receivedLength = 0; let receivedLength = 0;
request.on('data', (chunk: Buffer) => { req.on('data', (chunk: Buffer) => {
if (receivedLength + chunk.length > contentLength) { if (receivedLength + chunk.length > contentLength) {
writeStream.destroy(); writeStream.destroy();
request.destroy(); req.destroy();
response.status(400).send('Received more data than specified in content-length'); response.status(400).send('Received more data than specified in content-length');
return this.removeAsset(assetId, path); return this.onCancel(assetId, path);
} }
receivedLength += chunk.length; receivedLength += chunk.length;
if (!writeStream.write(chunk)) { if (!writeStream.write(chunk)) {
request.pause(); req.pause();
writeStream.once('drain', () => request.resume()); writeStream.once('drain', () => req.resume());
} }
}); });
request.on('end', () => { req.on('end', () => {
if (receivedLength === contentLength) { if (receivedLength === contentLength) {
return writeStream.end(); return writeStream.end();
} }
this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`); this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`);
writeStream.destroy(); writeStream.destroy();
this.removeAsset(assetId, path); this.onCancel(assetId, path);
}); });
} }
async resumeUpload(auth: AuthDto, assetId: string, request: Request, response: Response): Promise<void> { resumeUpload(req: AuthenticatedRequest, response: Response, id: string, dto: ResumeUploadDto): Promise<void> {
const headers = request.headers; this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
const requestInterop = this.getNumberHeader(headers, 'upload-draft-interop-version'); const { isComplete, uploadLength, uploadOffset, contentLength, version } = dto;
const isComplete = this.requireUploadComplete(headers, requestInterop); if (isComplete && uploadLength !== undefined && uploadLength !== contentLength) {
const contentLength = this.requireContentLength(headers); this.sendInconsistentLengthProblem(response);
const providedOffset = this.getNumberHeader(headers, 'upload-offset'); return Promise.resolve();
const uploadLength = this.getNumberHeader(headers, 'upload-length'); }
const contentType = headers['content-type']; if (version && version >= 6 && req.headers['content-type'] !== 'application/partial-upload') {
if (requestInterop && requestInterop >= 6 && contentType !== 'application/partial-upload') {
throw new BadRequestException('Content-Type must be application/partial-upload for PATCH requests'); throw new BadRequestException('Content-Type must be application/partial-upload for PATCH requests');
} }
await this.databaseRepository.withUuidLock(assetId, async () => { return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id); const asset = await this.assetRepository.getCompletionMetadata(id, req.auth.user.id);
if (!asset) { if (!asset) {
response.status(404).send('Asset not found'); response.status(404).send('Asset not found');
return; return;
@ -174,30 +170,30 @@ export class AssetUploadService extends BaseService {
if (asset.status !== AssetStatus.Partial) { if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(response); return this.sendAlreadyCompletedProblem(response);
} }
if (providedOffset === null) { if (uploadOffset === null) {
throw new BadRequestException('Missing Upload-Offset header'); throw new BadRequestException('Missing Upload-Offset header');
} }
const { path } = asset; const { path } = asset;
const expectedOffset = await this.getCurrentOffset(path); const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== providedOffset) { if (expectedOffset !== uploadOffset) {
this.setCompleteHeader(response, requestInterop, false); this.setCompleteHeader(response, version, false);
return this.sendOffsetMismatchProblem(response, expectedOffset, providedOffset); return this.sendOffsetMismatchProblem(response, expectedOffset, uploadOffset);
} }
const newLength = providedOffset + contentLength; const newLength = uploadOffset + contentLength;
// If upload length is provided, validate we're not exceeding it // If upload length is provided, validate we're not exceeding it
if (uploadLength !== null && newLength > uploadLength) { if (uploadLength !== undefined && newLength > uploadLength) {
response.status(400).send('Upload would exceed declared length'); response.status(400).send('Upload would exceed declared length');
return; return;
} }
this.validateQuota(auth, newLength); this.validateQuota(req.auth, newLength);
// Empty PATCH without Upload-Complete // Empty PATCH without Upload-Complete
if (contentLength === 0 && !isComplete) { if (contentLength === 0 && !isComplete) {
this.setCompleteHeader(response, requestInterop, false); this.setCompleteHeader(response, version, false);
response.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send(); response.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
return; return;
} }
@ -215,45 +211,47 @@ export class AssetUploadService extends BaseService {
writeStream.on('finish', async () => { writeStream.on('finish', async () => {
const currentOffset = await this.getCurrentOffset(path); const currentOffset = await this.getCurrentOffset(path);
if (!isComplete) { if (!isComplete) {
this.setCompleteHeader(response, requestInterop, false); this.setCompleteHeader(response, version, false);
return response.status(204).setHeader('Upload-Offset', currentOffset.toString()).send(); return response.status(204).setHeader('Upload-Offset', currentOffset.toString()).send();
} }
this.logger.log(`Finished upload to ${path}`); this.logger.log(`Finished upload to ${path}`);
const checksum = await this.cryptoRepository.hashFile(path); const checksum = await this.cryptoRepository.hashFile(path);
if (asset.checksum.compare(checksum) !== 0) { if (asset.checksum.compare(checksum) !== 0) {
return this.sendChecksumMismatchResponse(response, assetId, path); return this.sendChecksumMismatchResponse(response, id, path);
} }
this.setCompleteHeader(response, requestInterop, true); try {
response.status(200).setHeader('Upload-Offset', currentOffset.toString()).send(); await this.onComplete({ assetId: id, path, size: currentOffset, fileModifiedAt: asset.fileModifiedAt });
} finally {
await this.onComplete({ assetId, path, size: currentOffset, fileModifiedAt: asset.fileModifiedAt }); this.setCompleteHeader(response, version, true);
response.status(200).setHeader('Upload-Offset', currentOffset.toString()).send();
}
}); });
request.on('data', (chunk: Buffer) => { req.on('data', (chunk: Buffer) => {
if (receivedLength + chunk.length > contentLength) { if (receivedLength + chunk.length > contentLength) {
this.logger.error(`Received more data than specified in content-length for upload to ${path}`); this.logger.error(`Received more data than specified in content-length for upload to ${path}`);
writeStream.destroy(); writeStream.destroy();
request.destroy(); req.destroy();
response.status(400).send('Received more data than specified in content-length'); response.status(400).send('Received more data than specified in content-length');
return this.removeAsset(assetId, path); return this.onCancel(id, path);
} }
receivedLength += chunk.length; receivedLength += chunk.length;
if (!writeStream.write(chunk)) { if (!writeStream.write(chunk)) {
request.pause(); req.pause();
writeStream.once('drain', () => request.resume()); writeStream.once('drain', () => req.resume());
} }
}); });
request.on('end', () => { req.on('end', () => {
if (receivedLength === contentLength) { if (receivedLength === contentLength) {
return writeStream.end(); return writeStream.end();
} }
this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`); this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${id}`);
writeStream.destroy(); writeStream.destroy();
return this.removeAsset(assetId, path); return this.onCancel(id, path);
}); });
}); });
} }
@ -267,13 +265,13 @@ export class AssetUploadService extends BaseService {
if (asset.status !== AssetStatus.Partial) { if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(response); return this.sendAlreadyCompletedProblem(response);
} }
await this.removeAsset(assetId, asset.path); await this.onCancel(assetId, asset.path);
response.status(204).send(); response.status(204).send();
} }
async getUploadStatus(auth: AuthDto, assetId: string, response: Response) { async getUploadStatus(auth: AuthDto, response: Response, id: string, { version }: GetUploadStatusDto) {
return this.databaseRepository.withUuidLock(assetId, async () => { return this.databaseRepository.withUuidLock(id, async () => {
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id); const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
if (!asset) { if (!asset) {
response.status(404).send('Asset not found'); response.status(404).send('Asset not found');
return; return;
@ -282,8 +280,7 @@ export class AssetUploadService extends BaseService {
const offset = await this.getCurrentOffset(asset.path); const offset = await this.getCurrentOffset(asset.path);
const isComplete = asset.status !== AssetStatus.Partial; const isComplete = asset.status !== AssetStatus.Partial;
const requestInterop = this.getNumberHeader(response.req.headers, 'upload-draft-interop-version'); this.setCompleteHeader(response, version, isComplete);
this.setCompleteHeader(response, requestInterop, isComplete);
response response
.status(204) .status(204)
.setHeader('Upload-Offset', offset.toString()) .setHeader('Upload-Offset', offset.toString())
@ -299,13 +296,19 @@ export class AssetUploadService extends BaseService {
private async onComplete(data: { assetId: string; path: string; size: number; fileModifiedAt: Date }): Promise<void> { private async onComplete(data: { assetId: string; path: string; size: number; fileModifiedAt: Date }): Promise<void> {
const { assetId, path, size, fileModifiedAt } = data; const { assetId, path, size, fileModifiedAt } = data;
this.logger.debug('Completing upload for asset', assetId);
const jobData = { name: JobName.AssetExtractMetadata, data: { id: assetId, source: 'upload' } } as const; const jobData = { name: JobName.AssetExtractMetadata, data: { id: assetId, source: 'upload' } } as const;
await this.withRetry(() => this.assetRepository.setCompleteWithSize(assetId, size)); await this.withRetry(() => this.assetRepository.setCompleteWithSize(assetId, size));
try {
await this.withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} catch (error: any) {
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
}
await this.withRetry(() => this.jobRepository.queue(jobData)); await this.withRetry(() => this.jobRepository.queue(jobData));
await this.withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
} }
private async removeAsset(assetId: string, path: string): Promise<void> { private async onCancel(assetId: string, path: string): Promise<void> {
this.logger.debug('Cancelling upload for asset', assetId);
await this.withRetry(() => this.storageRepository.unlink(path)); await this.withRetry(() => this.storageRepository.unlink(path));
await this.withRetry(() => this.assetRepository.remove({ id: assetId })); await this.withRetry(() => this.assetRepository.remove({ id: assetId }));
} }
@ -324,14 +327,14 @@ export class AssetUploadService extends BaseService {
private sendInconsistentLengthProblem(response: Response): void { private sendInconsistentLengthProblem(response: Response): void {
response.status(400).contentType('application/problem+json').send({ response.status(400).contentType('application/problem+json').send({
type: `https://iana.org/assignments/http-problem-types#inconsistent-upload-length`, type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
title: 'inconsistent length values for upload', title: 'inconsistent length values for upload',
}); });
} }
private sendAlreadyCompletedProblem(response: Response): void { private sendAlreadyCompletedProblem(response: Response): void {
response.status(400).contentType('application/problem+json').send({ response.status(400).contentType('application/problem+json').send({
type: `https://iana.org/assignments/http-problem-types#completed-upload`, type: 'https://iana.org/assignments/http-problem-types#completed-upload',
title: 'upload is already completed', title: 'upload is already completed',
}); });
} }
@ -348,47 +351,7 @@ export class AssetUploadService extends BaseService {
private sendChecksumMismatchResponse(response: Response, assetId: string, path: string): Promise<void> { private sendChecksumMismatchResponse(response: Response, assetId: string, path: string): Promise<void> {
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`); this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
response.status(460).send('Checksum mismatch'); response.status(460).send('Checksum mismatch');
return this.removeAsset(assetId, path); return this.onCancel(assetId, path);
}
private requireUploadComplete(headers: Request['headers'], interopVersion: number | null): boolean {
if (interopVersion !== null && interopVersion <= 3) {
const value = headers['upload-incomplete'] as string | undefined;
if (value === undefined) {
throw new BadRequestException('Missing Upload-Incomplete header');
}
return value === '?0';
}
const value = headers['upload-complete'] as string | undefined;
if (value === undefined) {
throw new BadRequestException('Missing Upload-Complete header');
}
return value === '?1';
}
private getNumberHeader(headers: Request['headers'], name: string): number | null {
const value = headers[name] as string | undefined;
if (value === undefined) {
return null;
}
const number = parseInt(value, 10);
if (!isFinite(number) || number < 0) {
throw new BadRequestException(`Invalid ${name} header`);
}
return number;
}
private requireContentLength(headers: Request['headers']): number {
const value = headers['content-length'] as string | undefined;
if (value === undefined) {
throw new BadRequestException('Missing Content-Length header');
}
const length = parseInt(value, 10);
if (!isFinite(length) || length < 0) {
throw new BadRequestException('Invalid Content-Length header');
}
return length;
} }
private async withRetry<T>(operation: () => Promise<T>, retries: number = 2, delay: number = 100): Promise<T> { private async withRetry<T>(operation: () => Promise<T>, retries: number = 2, delay: number = 100): Promise<T> {
@ -428,48 +391,6 @@ export class AssetUploadService extends BaseService {
} }
} }
private requireChecksum(headers: Request['headers']): Buffer {
const value = headers['repr-digest'] as string | undefined;
if (value === undefined) {
throw new BadRequestException(`Missing 'repr-digest' header`);
}
const sha1Item = parseDictionary(value).get('sha');
if (!sha1Item) {
throw new BadRequestException(`Missing 'sha' in 'repr-digest' header`);
}
const checksum = sha1Item[0];
if (!(checksum instanceof ArrayBuffer)) {
throw new BadRequestException(`Invalid 'sha' in 'repr-digest' header`);
}
return Buffer.from(checksum);
}
private requireAssetData(headers: Request['headers']): UploadAssetDataDto {
const value = headers[ImmichHeader.AssetData] as string | undefined;
if (value === undefined) {
throw new BadRequestException(`Missing ${ImmichHeader.AssetData} header`);
}
let assetData: any;
try {
assetData = JSON.parse(Buffer.from(value, 'base64').toString('utf8'));
} catch {
throw new BadRequestException(`${ImmichHeader.AssetData} header is not valid base64-encoded JSON`);
}
const dto = plainToInstance(UploadAssetDataDto, assetData);
const errors = validateSync(dto, { whitelist: true });
if (errors.length > 0) {
const formatted = errors.map((e) => (e.constraints ? Object.values(e.constraints).join(', ') : ''));
throw new BadRequestException(`Invalid ${ImmichHeader.AssetData} header: ${formatted.join('; ')}`);
}
return dto;
}
private setCompleteHeader(response: Response, interopVersion: number | null, isComplete: boolean): void { private setCompleteHeader(response: Response, interopVersion: number | null, isComplete: boolean): void {
if (!interopVersion) { if (!interopVersion) {
return; return;

View file

@ -192,7 +192,7 @@ export function mapToUploadFile(file: ImmichFile): UploadFile {
export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File): UploadRequest => { export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File): UploadRequest => {
return { return {
auth: request.user || null, auth: request.auth || null,
body: request.body, body: request.body,
fieldName: file.fieldname as UploadFieldName, fieldName: file.fieldname as UploadFieldName,
file: mapToUploadFile(file as ImmichFile), file: mapToUploadFile(file as ImmichFile),