diff --git a/e2e/src/api/specs/asset-upload.e2e-spec.ts b/e2e/src/api/specs/asset-upload.e2e-spec.ts index 6776fb7771..f367bda0ff 100644 --- a/e2e/src/api/specs/asset-upload.e2e-spec.ts +++ b/e2e/src/api/specs/asset-upload.e2e-spec.ts @@ -33,6 +33,7 @@ describe('/upload (RUFH compliance)', () => { const { status, headers } = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Upload-Complete', '?1') @@ -50,6 +51,7 @@ describe('/upload (RUFH compliance)', () => { const { status, headers } = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(partialContent).digest('base64')}:`) .set('Upload-Complete', '?0') @@ -71,18 +73,21 @@ describe('/upload (RUFH compliance)', () => { const { headers } = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Upload-Complete', '?0') .send(content); + expect(headers['location']).toBeDefined(); uploadResource = headers['location']; }); it('should retrieve upload offset with HEAD request', async () => { const { status, headers } = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(status).toBe(204); expect(headers['upload-offset']).toBe('512'); @@ -94,7 +99,8 @@ describe('/upload (RUFH compliance)', () => { it('should return 400 for non-UUID upload resource', async () => { const { status } = await request(app) .head('/upload/nonexistent') - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(status).toBe(400); }); @@ -102,7 +108,8 @@ describe('/upload (RUFH compliance)', () => { it('should return 404 for non-existent upload resource', async () => { const { status } = await request(app) .head('/upload/4feacf6f-830f-46c8-8140-2b3da67070c0') - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(status).toBe(404); }); @@ -119,6 +126,7 @@ describe('/upload (RUFH compliance)', () => { const response = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(fullContent).digest('base64')}:`) .set('Upload-Complete', '?0') @@ -128,9 +136,10 @@ describe('/upload (RUFH compliance)', () => { }); it('should append data with correct offset', async () => { - const { status, headers } = await request(baseUrl) + const { status, headers, body } = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', chunks[0].length.toString()) .set('Upload-Complete', '?0') .set('Content-Type', 'application/partial-upload') @@ -141,7 +150,8 @@ describe('/upload (RUFH compliance)', () => { const headResponse = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(headResponse.headers['upload-offset']).toBe('1250'); }); @@ -152,6 +162,7 @@ describe('/upload (RUFH compliance)', () => { const { status, headers, body } = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', wrongOffset.toString()) .set('Upload-Complete', '?0') .set('Content-Type', 'application/partial-upload') @@ -159,16 +170,20 @@ describe('/upload (RUFH compliance)', () => { expect(status).toBe(409); expect(headers['upload-offset']).toBe('1250'); - expect(body.type).toBe('https://iana.org/assignments/http-problem-types#mismatching-upload-offset'); - expect(body['expected-offset']).toBe(1250); - expect(body['provided-offset']).toBe(wrongOffset); + expect(body).toEqual({ + type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset', + title: 'offset from request does not match offset of resource', + 'expected-offset': 1250, + 'provided-offset': wrongOffset, + }); }); it('should complete upload with Upload-Complete: ?1', async () => { // Get current offset first const headResponse = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); const offset = parseInt(headResponse.headers['upload-offset']); expect(offset).toBe(1250); @@ -176,6 +191,7 @@ describe('/upload (RUFH compliance)', () => { const { status, headers } = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', offset.toString()) .set('Upload-Complete', '?1') .set('Content-Type', 'application/partial-upload') @@ -186,17 +202,21 @@ describe('/upload (RUFH compliance)', () => { expect(headers['upload-offset']).toBe('2750'); }); - it('should reject append to completed upload when offset is right', async () => { + it('should reject append to completed upload', async () => { const { status, body } = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', '2750') .set('Upload-Complete', '?0') .set('Content-Type', 'application/partial-upload') .send(randomBytes(100)); expect(status).toBe(400); - expect(body.type).toBe('https://iana.org/assignments/http-problem-types#completed-upload'); + expect(body).toEqual({ + type: 'https://iana.org/assignments/http-problem-types#completed-upload', + title: 'upload is already completed', + }); }); }); @@ -209,6 +229,7 @@ describe('/upload (RUFH compliance)', () => { const response = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Upload-Complete', '?0') @@ -227,7 +248,8 @@ describe('/upload (RUFH compliance)', () => { // Verify resource is no longer accessible const headResponse = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(headResponse.status).toBe(404); }); @@ -243,6 +265,7 @@ describe('/upload (RUFH compliance)', () => { const initialResponse = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(totalContent).digest('base64')}:`) .set('Upload-Complete', '?0') // Indicate incomplete @@ -254,7 +277,8 @@ describe('/upload (RUFH compliance)', () => { // Check offset after interruption const offsetResponse = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(offsetResponse.headers['upload-offset']).toBe('2000'); @@ -263,6 +287,7 @@ describe('/upload (RUFH compliance)', () => { const resumeResponse = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', '2000') .set('Upload-Complete', '?1') .set('Content-Type', 'application/partial-upload') @@ -281,6 +306,7 @@ describe('/upload (RUFH compliance)', () => { const createResponse = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${hash.digest('base64')}:`) .set('Upload-Complete', '?0') @@ -293,6 +319,7 @@ describe('/upload (RUFH compliance)', () => { let response = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', currentOffset.toString()) .set('Upload-Complete', '?0') .set('Content-Type', 'application/partial-upload') @@ -304,7 +331,8 @@ describe('/upload (RUFH compliance)', () => { // Verify offset const offsetCheck = await request(baseUrl) .head(uploadResource) - .set('Authorization', `Bearer ${user.accessToken}`); + .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8'); expect(offsetCheck.headers['upload-offset']).toBe('5000'); @@ -312,6 +340,7 @@ describe('/upload (RUFH compliance)', () => { response = await request(baseUrl) .patch(uploadResource) .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('Upload-Offset', currentOffset.toString()) .set('Upload-Complete', '?1') .set('Content-Type', 'application/partial-upload') @@ -329,6 +358,7 @@ describe('/upload (RUFH compliance)', () => { const { status, body } = await request(app) .post('/upload') .set('Authorization', `Bearer ${user.accessToken}`) + .set('Upload-Draft-Interop-Version', '8') .set('X-Immich-Asset-Data', base64Metadata) .set('Repr-Digest', `sha=:${createHash('sha1').update(content).digest('base64')}:`) .set('Upload-Complete', '?1') @@ -337,7 +367,10 @@ describe('/upload (RUFH compliance)', () => { .send(content); expect(status).toBe(400); - expect(body.type).toBe('https://iana.org/assignments/http-problem-types#inconsistent-upload-length'); + expect(body).toEqual({ + type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length', + title: 'inconsistent length values for upload', + }); }); }); @@ -348,27 +381,8 @@ describe('/upload (RUFH compliance)', () => { .set('Authorization', `Bearer ${user.accessToken}`); expect(status).toBe(204); - expect(headers['upload-limit']).toBeDefined(); - - const limits = parseUploadLimit(headers['upload-limit']); - expect(limits).toHaveProperty('min-size'); + expect(headers['upload-limit']).toEqual('min-size=0'); }); }); }); -// Helper function to parse Upload-Limit header -function parseUploadLimit(headerValue: string): Record { - const limits: Record = {}; - if (!headerValue) return limits; - - // Parse structured field dictionary format - const pairs = headerValue.split(',').map((p) => p.trim()); - for (const pair of pairs) { - const [key, value] = pair.split('='); - if (key && value) { - limits[key] = parseInt(value, 10); - } - } - - return limits; -} diff --git a/mobile/openapi/lib/api/upload_api.dart b/mobile/openapi/lib/api/upload_api.dart index b7e04827f3..60b8956a4d 100644 --- a/mobile/openapi/lib/api/upload_api.dart +++ b/mobile/openapi/lib/api/upload_api.dart @@ -137,12 +137,15 @@ class UploadApi { /// /// Parameters: /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// /// * [String] id (required): /// /// * [String] key: /// /// * [String] slug: - Future getUploadStatusWithHttpInfo(String id, { String? key, String? slug, }) async { + Future getUploadStatusWithHttpInfo(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async { // ignore: prefer_const_declarations final apiPath = r'/upload/{id}' .replaceAll('{id}', id); @@ -161,6 +164,8 @@ class UploadApi { queryParams.addAll(_queryParams('', 'slug', slug)); } + headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion); + const contentTypes = []; @@ -179,13 +184,16 @@ class UploadApi { /// /// Parameters: /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// /// * [String] id (required): /// /// * [String] key: /// /// * [String] slug: - Future getUploadStatus(String id, { String? key, String? slug, }) async { - final response = await getUploadStatusWithHttpInfo(id, key: key, slug: slug, ); + Future getUploadStatus(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async { + final response = await getUploadStatusWithHttpInfo(draftUploadInteropVersion, id, key: key, slug: slug, ); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -197,12 +205,24 @@ class UploadApi { /// /// Parameters: /// + /// * [String] contentLength (required): + /// Non-negative size of the request body in bytes. + /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// /// * [String] id (required): /// + /// * [String] uploadComplete (required): + /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. + /// + /// * [String] uploadOffset (required): + /// Non-negative byte offset indicating the starting position of the data in the request body within the entire file. + /// /// * [String] key: /// /// * [String] slug: - Future resumeUploadWithHttpInfo(String id, { String? key, String? slug, }) async { + Future resumeUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async { // ignore: prefer_const_declarations final apiPath = r'/upload/{id}' .replaceAll('{id}', id); @@ -221,6 +241,11 @@ class UploadApi { queryParams.addAll(_queryParams('', 'slug', slug)); } + headerParams[r'content-length'] = parameterToString(contentLength); + headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion); + headerParams[r'upload-complete'] = parameterToString(uploadComplete); + headerParams[r'upload-offset'] = parameterToString(uploadOffset); + const contentTypes = []; @@ -239,13 +264,25 @@ class UploadApi { /// /// Parameters: /// + /// * [String] contentLength (required): + /// Non-negative size of the request body in bytes. + /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// /// * [String] id (required): /// + /// * [String] uploadComplete (required): + /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. + /// + /// * [String] uploadOffset (required): + /// Non-negative byte offset indicating the starting position of the data in the request body within the entire file. + /// /// * [String] key: /// /// * [String] slug: - Future resumeUpload(String id, { String? key, String? slug, }) async { - final response = await resumeUploadWithHttpInfo(id, key: key, slug: slug, ); + Future resumeUpload(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async { + final response = await resumeUploadWithHttpInfo(contentLength, draftUploadInteropVersion, id, uploadComplete, uploadOffset, key: key, slug: slug, ); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -257,10 +294,25 @@ class UploadApi { /// /// Parameters: /// + /// * [String] contentLength (required): + /// Non-negative size of the request body in bytes. + /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// + /// * [String] reprDigest (required): + /// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity. + /// + /// * [String] uploadComplete (required): + /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. + /// + /// * [String] xImmichAssetData (required): + /// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored. + /// /// * [String] key: /// /// * [String] slug: - Future startUploadWithHttpInfo({ String? key, String? slug, }) async { + Future startUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async { // ignore: prefer_const_declarations final apiPath = r'/upload'; @@ -278,6 +330,12 @@ class UploadApi { queryParams.addAll(_queryParams('', 'slug', slug)); } + headerParams[r'content-length'] = parameterToString(contentLength); + headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion); + headerParams[r'repr-digest'] = parameterToString(reprDigest); + headerParams[r'upload-complete'] = parameterToString(uploadComplete); + headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData); + const contentTypes = []; @@ -296,11 +354,26 @@ class UploadApi { /// /// Parameters: /// + /// * [String] contentLength (required): + /// Non-negative size of the request body in bytes. + /// + /// * [String] draftUploadInteropVersion (required): + /// Indicates the version of the RUFH protocol supported by the client. + /// + /// * [String] reprDigest (required): + /// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity. + /// + /// * [String] uploadComplete (required): + /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. + /// + /// * [String] xImmichAssetData (required): + /// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored. + /// /// * [String] key: /// /// * [String] slug: - Future startUpload({ String? key, String? slug, }) async { - final response = await startUploadWithHttpInfo( key: key, slug: slug, ); + Future startUpload(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async { + final response = await startUploadWithHttpInfo(contentLength, draftUploadInteropVersion, reprDigest, uploadComplete, xImmichAssetData, key: key, slug: slug, ); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } diff --git a/mobile/set_test.dart b/mobile/set_test.dart new file mode 100644 index 0000000000..8ce4b0f7fe --- /dev/null +++ b/mobile/set_test.dart @@ -0,0 +1,207 @@ +enum BackupSelection { + // Used to sort albums based on the backupSelection + // selected -> none -> excluded + // Do not change the order of these values + selected, + none, + excluded, +} + +class LocalAlbum { + final String id; + final String name; + final DateTime updatedAt; + final bool isIosSharedAlbum; + + final int assetCount; + final BackupSelection backupSelection; + final String? linkedRemoteAlbumId; + + const LocalAlbum({ + required this.id, + required this.name, + required this.updatedAt, + this.assetCount = 0, + this.backupSelection = BackupSelection.none, + this.isIosSharedAlbum = false, + this.linkedRemoteAlbumId, + }); + + LocalAlbum copyWith({ + String? id, + String? name, + DateTime? updatedAt, + int? assetCount, + BackupSelection? backupSelection, + bool? isIosSharedAlbum, + String? linkedRemoteAlbumId, + }) { + return LocalAlbum( + id: id ?? this.id, + name: name ?? this.name, + updatedAt: updatedAt ?? this.updatedAt, + assetCount: assetCount ?? this.assetCount, + backupSelection: backupSelection ?? this.backupSelection, + isIosSharedAlbum: isIosSharedAlbum ?? this.isIosSharedAlbum, + linkedRemoteAlbumId: linkedRemoteAlbumId ?? this.linkedRemoteAlbumId, + ); + } + + @override + bool operator ==(Object other) { + if (other is! LocalAlbum) return false; + if (identical(this, other)) return true; + + return other.id == id && + other.name == name && + other.updatedAt == updatedAt && + other.assetCount == assetCount && + other.backupSelection == backupSelection && + other.isIosSharedAlbum == isIosSharedAlbum && + other.linkedRemoteAlbumId == linkedRemoteAlbumId; + } + + @override + int get hashCode { + return id.hashCode ^ + name.hashCode ^ + updatedAt.hashCode ^ + assetCount.hashCode ^ + backupSelection.hashCode ^ + isIosSharedAlbum.hashCode ^ + linkedRemoteAlbumId.hashCode; + } + + @override + String toString() { + return '''LocalAlbum: { +id: $id, +name: $name, +updatedAt: $updatedAt, +assetCount: $assetCount, +backupSelection: $backupSelection, +isIosSharedAlbum: $isIosSharedAlbum +linkedRemoteAlbumId: $linkedRemoteAlbumId, +}'''; + } +} + +int square(int num) { + return num * num; +} + +@pragma('vm:never-inline') +List getAlbums() { + final updatedAt = DateTime.now(); + final selection = BackupSelection.values; + return List.generate(100000, (i) { + return LocalAlbum(id: i.toString(), name: '', updatedAt: updatedAt, backupSelection: selection[i % 3]); + }); +} + +@pragma('vm:never-inline') +List setAlbum1(List albums, LocalAlbum album) { + final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now())); + newAlbums.setAll(0, albums); + for (int i = 0; i < newAlbums.length; i++) { + final currentAlbum = newAlbums[i]; + if (currentAlbum.id == album.id) { + newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected); + break; + } + } + return newAlbums; +} + +@pragma('vm:never-inline') +List setAlbum2(List albums, LocalAlbum album) { + final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now())); + for (int i = 0; i < newAlbums.length; i++) { + final currentAlbum = newAlbums[i]; + newAlbums[i] = currentAlbum.id == album.id ? currentAlbum.copyWith(backupSelection: BackupSelection.selected) : currentAlbum; + } + return newAlbums; +} + +@pragma('vm:never-inline') +List setAlbum3(List albums, LocalAlbum album) { + final newAlbums = albums.toList(growable: false); + for (int i = 0; i < newAlbums.length; i++) { + final currentAlbum = newAlbums[i]; + if (currentAlbum.id == album.id) { + newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected); + break; + } + } + return newAlbums; +} + +@pragma('vm:never-inline') +Set toSet1(List albums) { + return albums.map((album) => album.id).toSet(); +} + +@pragma('vm:never-inline') +Set toSet2(List albums) { + final ids = {}; + for (final album in albums) { + ids.add(album.id); + } + return ids; +} + +@pragma('vm:never-inline') +Set toSet3(List albums) { + return Set.unmodifiable(albums.map((album) => album.id)); +} + +@pragma('vm:never-inline') +Set toSet4(List albums) { + final ids = {}; + for (int i = 0; i < albums.length; i++) { + final id = albums[i].id; + ids.add(id); + } + return ids; +} + +@pragma('vm:never-inline') +List toFiltered1(List albums, BackupSelection selection) { + return albums.where((album) => album.backupSelection == selection).toList(growable: false); +} + +@pragma('vm:never-inline') +List toFiltered2(List albums, BackupSelection selection) { + final filtered = []; + for (final album in albums) { + if (album.backupSelection == selection) { + filtered.add(album); + } + } + return filtered; +} + +@pragma('vm:never-inline') +List toFiltered3(List albums, BackupSelection selection) { + final filtered = []; + for (int i = 0; i < albums.length; i++) { + final album = albums[i]; + if (album.backupSelection == selection) { + filtered.add(album); + } + } + return filtered; +} + +late Set ids; +late List localAlbums; +void main(List args) { + final albums = getAlbums(); + // final album = LocalAlbum(id: '50000', name: '', updatedAt: DateTime.now()); + final stopwatch = Stopwatch()..start(); + // localAlbums = setAlbum3(albums, album); + // ids = toSet1(albums); + localAlbums = toFiltered2(albums, BackupSelection.selected); + stopwatch.stop(); + print('Elapsed time: ${(stopwatch.elapsedMicroseconds / 1000).toStringAsFixed(2)}ms'); +} diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index c5f2c26665..e87418a25a 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -9271,6 +9271,24 @@ "post": { "operationId": "startUpload", "parameters": [ + { + "name": "content-length", + "in": "header", + "description": "Non-negative size of the request body in bytes.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "draft-upload-interop-version", + "in": "header", + "description": "Indicates the version of the RUFH protocol supported by the client.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "key", "required": false, @@ -9279,6 +9297,15 @@ "type": "string" } }, + { + "name": "repr-digest", + "in": "header", + "description": "Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "slug", "required": false, @@ -9286,6 +9313,24 @@ "schema": { "type": "string" } + }, + { + "name": "upload-complete", + "in": "header", + "description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "x-immich-asset-data", + "in": "header", + "description": "Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { @@ -9366,6 +9411,15 @@ "head": { "operationId": "getUploadStatus", "parameters": [ + { + "name": "draft-upload-interop-version", + "in": "header", + "description": "Indicates the version of the RUFH protocol supported by the client.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "id", "required": true, @@ -9417,6 +9471,24 @@ "patch": { "operationId": "resumeUpload", "parameters": [ + { + "name": "content-length", + "in": "header", + "description": "Non-negative size of the request body in bytes.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "draft-upload-interop-version", + "in": "header", + "description": "Indicates the version of the RUFH protocol supported by the client.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "id", "required": true, @@ -9441,6 +9513,24 @@ "schema": { "type": "string" } + }, + { + "name": "upload-complete", + "in": "header", + "description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "upload-offset", + "in": "header", + "description": "Non-negative byte offset indicating the starting position of the data in the request body within the entire file.", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index d641e0f02e..e18870be15 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -4435,16 +4435,28 @@ export function getUploadOptions({ key, slug }: { /** * This endpoint requires the `asset.upload` permission. */ -export function startUpload({ key, slug }: { +export function startUpload({ contentLength, draftUploadInteropVersion, key, reprDigest, slug, uploadComplete, xImmichAssetData }: { + contentLength: string; + draftUploadInteropVersion: string; key?: string; + reprDigest: string; slug?: string; + uploadComplete: string; + xImmichAssetData: string; }, opts?: Oazapfts.RequestOpts) { return oazapfts.ok(oazapfts.fetchText(`/upload${QS.query(QS.explode({ key, slug }))}`, { ...opts, - method: "POST" + method: "POST", + headers: oazapfts.mergeHeaders(opts?.headers, { + "content-length": contentLength, + "draft-upload-interop-version": draftUploadInteropVersion, + "repr-digest": reprDigest, + "upload-complete": uploadComplete, + "x-immich-asset-data": xImmichAssetData + }) })); } /** @@ -4466,7 +4478,8 @@ export function cancelUpload({ id, key, slug }: { /** * This endpoint requires the `asset.upload` permission. */ -export function getUploadStatus({ id, key, slug }: { +export function getUploadStatus({ draftUploadInteropVersion, id, key, slug }: { + draftUploadInteropVersion: string; id: string; key?: string; slug?: string; @@ -4476,23 +4489,36 @@ export function getUploadStatus({ id, key, slug }: { slug }))}`, { ...opts, - method: "HEAD" + method: "HEAD", + headers: oazapfts.mergeHeaders(opts?.headers, { + "draft-upload-interop-version": draftUploadInteropVersion + }) })); } /** * This endpoint requires the `asset.upload` permission. */ -export function resumeUpload({ id, key, slug }: { +export function resumeUpload({ contentLength, draftUploadInteropVersion, id, key, slug, uploadComplete, uploadOffset }: { + contentLength: string; + draftUploadInteropVersion: string; id: string; key?: string; slug?: string; + uploadComplete: string; + uploadOffset: string; }, opts?: Oazapfts.RequestOpts) { return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({ key, slug }))}`, { ...opts, - method: "PATCH" + method: "PATCH", + headers: oazapfts.mergeHeaders(opts?.headers, { + "content-length": contentLength, + "draft-upload-interop-version": draftUploadInteropVersion, + "upload-complete": uploadComplete, + "upload-offset": uploadOffset + }) })); } /** diff --git a/server/src/controllers/asset-upload.controller.ts b/server/src/controllers/asset-upload.controller.ts index 24f36414e9..827c981fa1 100644 --- a/server/src/controllers/asset-upload.controller.ts +++ b/server/src/controllers/asset-upload.controller.ts @@ -1,12 +1,35 @@ -import { Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common'; -import { ApiTags } from '@nestjs/swagger'; -import { Request, Response } from 'express'; +import { BadRequestException, Controller, Delete, Head, Options, Param, Patch, Post, Req, Res } from '@nestjs/common'; +import { ApiHeader, ApiTags } from '@nestjs/swagger'; +import { plainToInstance } from 'class-transformer'; +import { validateSync } from 'class-validator'; +import { Response } from 'express'; +import { IncomingHttpHeaders } from 'node:http'; import { AuthDto } from 'src/dtos/auth.dto'; -import { Permission } from 'src/enum'; -import { Auth, Authenticated } from 'src/middleware/auth.guard'; +import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto, UploadHeader } from 'src/dtos/upload.dto'; +import { ImmichHeader, Permission } from 'src/enum'; +import { Auth, Authenticated, AuthenticatedRequest } from 'src/middleware/auth.guard'; import { AssetUploadService } from 'src/services/asset-upload.service'; import { UUIDParamDto } from 'src/validation'; +const apiInteropVersion = { + name: UploadHeader.InteropVersion, + description: `Indicates the version of the RUFH protocol supported by the client.`, + required: true, +}; + +const apiUploadComplete = { + name: UploadHeader.UploadComplete, + description: + 'Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.', + required: true, +}; + +const apiContentLength = { + name: UploadHeader.ContentLength, + description: 'Non-negative size of the request body in bytes.', + required: true, +}; + @ApiTags('Upload') @Controller('upload') export class AssetUploadController { @@ -14,36 +37,73 @@ export class AssetUploadController { @Post() @Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) - startUpload(@Auth() auth: AuthDto, @Req() request: Request, @Res() response: Response): Promise { - return this.service.startUpload(auth, request, response); + @ApiHeader({ + name: ImmichHeader.AssetData, + description: + 'Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.', + required: true, + }) + @ApiHeader({ + name: UploadHeader.ReprDigest, + description: + 'Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.', + required: true, + }) + @ApiHeader(apiInteropVersion) + @ApiHeader(apiUploadComplete) + @ApiHeader(apiContentLength) + startUpload(@Req() req: AuthenticatedRequest, @Res() res: Response): Promise { + const dto = this.getDto(StartUploadDto, req.headers); + console.log('Starting upload with dto:', JSON.stringify(dto)); + return this.service.startUpload(req, res, dto); } @Patch(':id') @Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) - resumeUpload( - @Auth() auth: AuthDto, - @Param() { id }: UUIDParamDto, - @Req() request: Request, - @Res() response: Response, - ): Promise { - return this.service.resumeUpload(auth, id, request, response); + @ApiHeader({ + name: UploadHeader.UploadOffset, + description: + 'Non-negative byte offset indicating the starting position of the data in the request body within the entire file.', + required: true, + }) + @ApiHeader(apiInteropVersion) + @ApiHeader(apiUploadComplete) + @ApiHeader(apiContentLength) + resumeUpload(@Req() req: AuthenticatedRequest, @Res() res: Response, @Param() { id }: UUIDParamDto) { + const dto = this.getDto(ResumeUploadDto, req.headers); + console.log('Resuming upload with dto:', JSON.stringify(dto)); + return this.service.resumeUpload(req, res, id, dto); } @Delete(':id') @Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) - cancelUpload(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Res() response: Response): Promise { - return this.service.cancelUpload(auth, id, response); + cancelUpload(@Auth() auth: AuthDto, @Res() res: Response, @Param() { id }: UUIDParamDto) { + return this.service.cancelUpload(auth, id, res); } @Head(':id') @Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) - getUploadStatus(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Res() response: Response): Promise { - return this.service.getUploadStatus(auth, id, response); + @ApiHeader(apiInteropVersion) + getUploadStatus(@Req() req: AuthenticatedRequest, @Res() res: Response, @Param() { id }: UUIDParamDto) { + const dto = this.getDto(GetUploadStatusDto, req.headers); + console.log('Getting upload status with dto:', JSON.stringify(dto)); + return this.service.getUploadStatus(req.auth, res, id, dto); } @Options() @Authenticated({ sharedLink: true, permission: Permission.AssetUpload }) - getUploadOptions(@Res() response: Response): Promise { - return this.service.getUploadOptions(response); + getUploadOptions(@Res() res: Response) { + return this.service.getUploadOptions(res); + } + + private getDto(cls: new () => T, headers: IncomingHttpHeaders): T { + const dto = plainToInstance(cls, headers, { excludeExtraneousValues: true }); + const errors = validateSync(dto); + if (errors.length > 0) { + const constraints = errors.map((e) => (e.constraints ? Object.values(e.constraints).join(', ') : '')).join('; '); + console.warn('Upload DTO validation failed:', JSON.stringify(errors, null, 2)); + throw new BadRequestException(constraints); + } + return dto; } } diff --git a/server/src/dtos/upload.dto.ts b/server/src/dtos/upload.dto.ts index d90ceeeb18..936e2e6d28 100644 --- a/server/src/dtos/upload.dto.ts +++ b/server/src/dtos/upload.dto.ts @@ -1,133 +1,159 @@ -import { Type } from 'class-transformer'; -import { IsEnum, IsInt, IsNotEmpty, IsObject, IsString, IsUUID, ValidateNested } from 'class-validator'; -import { AssetMediaCreateDto } from 'src/dtos/asset-media.dto'; +import { BadRequestException } from '@nestjs/common'; +import { Expose, plainToInstance, Transform, Type } from 'class-transformer'; +import { Equals, IsArray, IsEnum, IsInt, IsNotEmpty, IsString, Min, ValidateIf, ValidateNested } from 'class-validator'; +import { AssetMetadataUpsertItemDto } from 'src/dtos/asset.dto'; +import { AssetVisibility, ImmichHeader } from 'src/enum'; +import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } from 'src/validation'; +import { parseDictionary } from 'structured-headers'; -export enum TusdHookRequestType { - PreCreate = 'pre-create', - PreFinish = 'pre-finish', -} +export class UploadAssetDataDto { + @IsNotEmpty() + @IsString() + deviceAssetId!: string; -export enum TusdHookStorageType { - FileStore = 'filestore', -} + @IsNotEmpty() + @IsString() + deviceId!: string; -export class TusdStorageDto { - @IsEnum(TusdHookStorageType) - Type!: string; + @ValidateDate() + fileCreatedAt!: Date; + + @ValidateDate() + fileModifiedAt!: Date; + + @Optional() + @IsString() + duration?: string; @IsString() @IsNotEmpty() - Path!: string; + filename!: string; - @IsString() - @IsNotEmpty() - InfoPath!: string; + @ValidateBoolean({ optional: true }) + isFavorite?: boolean; + + @ValidateEnum({ enum: AssetVisibility, name: 'AssetVisibility', optional: true }) + visibility?: AssetVisibility; + + @ValidateUUID({ optional: true }) + livePhotoVideoId?: string; + + @Transform(({ value }) => { + try { + const json = JSON.parse(value); + const items = Array.isArray(json) ? json : [json]; + return items.map((item) => plainToInstance(AssetMetadataUpsertItemDto, item)); + } catch { + throw new BadRequestException(['metadata must be valid JSON']); + } + }) + @Optional() + @ValidateNested({ each: true }) + @IsArray() + metadata!: AssetMetadataUpsertItemDto[]; } -export class UploadAssetDataDto extends AssetMediaCreateDto { - @IsString() - @IsNotEmpty() - declare filename: string; +export enum StructuredBoolean { + False = '?0', + True = '?1', } -export class TusdMetaDataDto { - @IsString() - @IsNotEmpty() - declare AssetData: string; // base64-encoded JSON string of UploadAssetDataDto +export enum UploadHeader { + UploadOffset = 'upload-offset', + ContentLength = 'content-length', + UploadLength = 'upload-length', + UploadComplete = 'upload-complete', + UploadIncomplete = 'upload-incomplete', + InteropVersion = 'upload-draft-interop-version', + ReprDigest = 'repr-digest', } -export class TusdPreCreateUploadDto { +class BaseRufhHeadersDto { + @Expose({ name: UploadHeader.InteropVersion }) + @Min(3) @IsInt() - Size!: number; + @Type(() => Number) + version!: number; } -export class TusdPreFinishUploadDto { - @IsUUID() - ID!: string; - +export class BaseUploadHeadersDto extends BaseRufhHeadersDto { + @Expose({ name: UploadHeader.ContentLength }) + @Min(0) @IsInt() - Size!: number; + @Type(() => Number) + contentLength!: number; - @Type(() => TusdMetaDataDto) - @ValidateNested() - @IsObject() - MetaData!: TusdMetaDataDto; + @Expose({ name: UploadHeader.UploadComplete }) + @ValidateIf((o) => o.requestInterop !== null && o.requestInterop! <= 3) + @IsEnum(StructuredBoolean) + uploadComplete!: StructuredBoolean; - @Type(() => TusdStorageDto) - @ValidateNested() - @IsObject() - Storage!: TusdStorageDto; + @Expose({ name: UploadHeader.UploadIncomplete }) + @ValidateIf((o) => o.requestInterop === null || o.requestInterop! > 3) + @IsEnum(StructuredBoolean) + uploadIncomplete!: StructuredBoolean; + + @Expose({ name: UploadHeader.UploadLength }) + @Min(0) + @IsInt() + @Type(() => Number) + @Optional() + uploadLength?: number; + + get isComplete(): boolean { + if (this.version <= 3) { + return this.uploadIncomplete === StructuredBoolean.False; + } + return this.uploadComplete === StructuredBoolean.True; + } } -export class TusdHttpRequestDto { - @IsString() - @IsNotEmpty() - Method!: string; +export class StartUploadDto extends BaseUploadHeadersDto { + @Expose({ name: ImmichHeader.AssetData }) + // @ValidateNested() + // @IsObject() + @Type(() => UploadAssetDataDto) + @Transform(({ value }) => { + if (!value) { + return null; + } - @IsString() - @IsNotEmpty() - URI!: string; + const json = Buffer.from(value, 'base64').toString('utf-8'); + try { + return JSON.parse(json); + } catch { + throw new BadRequestException(`${ImmichHeader.AssetData} must be valid base64-encoded JSON`); + } + }) + assetData!: UploadAssetDataDto; - @IsObject() - Header!: Record; + @Expose({ name: UploadHeader.ReprDigest }) + @Transform(({ value }) => { + if (!value) { + return null; + } + + const checksum = parseDictionary(value).get('sha')?.[0]; + if (checksum instanceof ArrayBuffer) { + return Buffer.from(checksum); + } + throw new BadRequestException(`Invalid ${UploadHeader.ReprDigest} header`); + }) + checksum!: Buffer; } -export class TusdPreCreateEventDto { - @Type(() => TusdPreCreateUploadDto) - @ValidateNested() - @IsObject() - Upload!: TusdPreCreateUploadDto; +export class ResumeUploadDto extends BaseUploadHeadersDto { + @Expose({ name: 'content-type' }) + @ValidateIf((o) => o.requestInterop !== null && o.requestInterop >= 6) + @Equals('application/partial-upload') + contentType!: number | null; - @Type(() => TusdHttpRequestDto) - @ValidateNested() - @IsObject() - HTTPRequest!: TusdHttpRequestDto; + @Expose({ name: UploadHeader.UploadOffset }) + @Min(0) + @IsInt() + @Type(() => Number) + @Optional() + uploadOffset!: number | null; } -export class TusdPreFinishEventDto { - @Type(() => TusdPreFinishUploadDto) - @ValidateNested() - @IsObject() - Upload!: TusdPreFinishUploadDto; - - @Type(() => TusdHttpRequestDto) - @ValidateNested() - @IsObject() - HTTPRequest!: TusdHttpRequestDto; -} - -export class TusdHookRequestDto { - @IsEnum(TusdHookRequestType) - Type!: TusdHookRequestType; - - @IsObject() - Event!: TusdPreCreateEventDto | TusdPreFinishEventDto; -} - -export class TusdHttpResponseDto { - StatusCode!: number; - - Body?: string; - - Header?: Record; -} - -export class TusdChangeFileInfoStorageDto { - Path?: string; -} - -export class TusdChangeFileInfoDto { - ID?: string; - - MetaData?: TusdMetaDataDto; - - Storage?: TusdChangeFileInfoStorageDto; -} - -export class TusdHookResponseDto { - HTTPResponse?: TusdHttpResponseDto; - - RejectUpload?: boolean; - - ChangeFileInfo?: TusdChangeFileInfoDto; -} +export class GetUploadStatusDto extends BaseRufhHeadersDto {} diff --git a/server/src/middleware/asset-upload.interceptor.ts b/server/src/middleware/asset-upload.interceptor.ts index 0f1eaa4ce5..7763dbaac7 100644 --- a/server/src/middleware/asset-upload.interceptor.ts +++ b/server/src/middleware/asset-upload.interceptor.ts @@ -16,7 +16,7 @@ export class AssetUploadInterceptor implements NestInterceptor { const res = context.switchToHttp().getResponse>(); const checksum = fromMaybeArray(req.headers[ImmichHeader.Checksum]); - const response = await this.service.getUploadAssetIdByChecksum(req.user, checksum); + const response = await this.service.getUploadAssetIdByChecksum(req.auth, checksum); if (response) { res.status(200); return of({ status: AssetMediaStatus.DUPLICATE, id: response.id }); diff --git a/server/src/middleware/auth.guard.ts b/server/src/middleware/auth.guard.ts index 8af7bf7fb3..09fd4d5e02 100644 --- a/server/src/middleware/auth.guard.ts +++ b/server/src/middleware/auth.guard.ts @@ -46,7 +46,7 @@ export const Authenticated = (options: AuthenticatedOptions = {}): MethodDecorat }; export const Auth = createParamDecorator((data, context: ExecutionContext): AuthDto => { - return context.switchToHttp().getRequest().user; + return context.switchToHttp().getRequest().auth; }); export const FileResponse = () => @@ -67,11 +67,11 @@ export const GetLoginDetails = createParamDecorator((data, context: ExecutionCon }); export interface AuthRequest extends Request { - user?: AuthDto; + auth?: AuthDto; } export interface AuthenticatedRequest extends Request { - user: AuthDto; + auth: AuthDto; } @Injectable() @@ -99,7 +99,7 @@ export class AuthGuard implements CanActivate { } = { sharedLink: false, admin: false, ...options }; const request = context.switchToHttp().getRequest(); - request.user = await this.authService.authenticate({ + request.auth = await this.authService.authenticate({ headers: request.headers, queryParams: request.query as Record, metadata: { adminRoute, sharedLinkRoute, permission, uri: request.path }, diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index e311f92bbe..3b4aea42c5 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -253,6 +253,18 @@ export class AssetRepository { return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow(); } + createWithMetadata(asset: Insertable & { id: string }, metadata?: AssetMetadataItem[]) { + if (!metadata || metadata.length === 0) { + return this.db.insertInto('asset').values(asset).execute(); + } + + return this.db + .with('asset', (qb) => qb.insertInto('asset').values(asset).returning('id')) + .insertInto('asset_metadata') + .values(metadata.map(({ key, value }) => ({ assetId: asset.id, key, value }))) + .execute(); + } + getCompletionMetadata(assetId: string, ownerId: string) { return this.db .selectFrom('asset') diff --git a/server/src/repositories/storage.repository.ts b/server/src/repositories/storage.repository.ts index 9ece4fc722..0425d3a9d4 100644 --- a/server/src/repositories/storage.repository.ts +++ b/server/src/repositories/storage.repository.ts @@ -62,15 +62,11 @@ export class StorageRepository { } createWriteStream(filepath: string): Writable { - return createWriteStream(filepath, { flags: 'w' }); - } - - overwriteWriteStream(filepath: string, offset = 0): Writable { - return createWriteStream(filepath, { flags: 'r+', start: offset }); + return createWriteStream(filepath, { flags: 'w', highWaterMark: 1024 * 1024 }); } createOrAppendWriteStream(filepath: string): Writable { - return createWriteStream(filepath, { flags: 'a' }); + return createWriteStream(filepath, { flags: 'a', highWaterMark: 1024 * 1024 }); } createOrOverwriteFile(filepath: string, buffer: Buffer) { diff --git a/server/src/services/asset-upload.service.ts b/server/src/services/asset-upload.service.ts index bc61590368..9a552cd7c8 100644 --- a/server/src/services/asset-upload.service.ts +++ b/server/src/services/asset-upload.service.ts @@ -1,70 +1,65 @@ import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common'; -import { plainToInstance } from 'class-transformer'; -import { validateSync } from 'class-validator'; -import { Request, Response } from 'express'; +import { Response } from 'express'; import { createHash } from 'node:crypto'; import { extname, join } from 'node:path'; import { setTimeout } from 'node:timers/promises'; import { StorageCore } from 'src/cores/storage.core'; import { AuthDto } from 'src/dtos/auth.dto'; -import { UploadAssetDataDto } from 'src/dtos/upload.dto'; -import { AssetStatus, AssetType, AssetVisibility, ImmichHeader, JobName, StorageFolder } from 'src/enum'; +import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/upload.dto'; +import { AssetStatus, AssetType, AssetVisibility, JobName, StorageFolder } from 'src/enum'; +import { AuthenticatedRequest } from 'src/middleware/auth.guard'; import { BaseService } from 'src/services/base.service'; import { isAssetChecksumConstraint } from 'src/utils/database'; import { mimeTypes } from 'src/utils/mime-types'; -import { parseDictionary } from 'structured-headers'; -const MAX_INTEROP_VERSION = 8; +export const MAX_RUFH_INTEROP_VERSION = 8; @Injectable() export class AssetUploadService extends BaseService { - async startUpload(auth: AuthDto, request: Request, response: Response): Promise { - const headers = request.headers; - const requestInterop = this.getNumberHeader(headers, 'upload-draft-interop-version'); - const contentLength = this.requireContentLength(headers); - const isComplete = this.requireUploadComplete(headers, requestInterop); - const metadata = this.requireAssetData(headers); - const checksumHeader = this.requireChecksum(headers); - const uploadLength = this.getNumberHeader(headers, 'upload-length'); - - if (isComplete && uploadLength !== null && uploadLength !== contentLength) { + async startUpload(req: AuthenticatedRequest, response: Response, dto: StartUploadDto): Promise { + this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`); + const { isComplete, assetData, uploadLength, contentLength, version } = dto; + if (isComplete && uploadLength !== undefined && uploadLength !== contentLength) { return this.sendInconsistentLengthProblem(response); } const assetId = this.cryptoRepository.randomUUID(); - const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId); - const extension = extname(metadata.filename); + const folder = StorageCore.getNestedFolder(StorageFolder.Upload, req.auth.user.id, assetId); + const extension = extname(assetData.filename); const path = join(folder, `${assetId}${extension}`); const type = mimeTypes.assetType(path); if (type === AssetType.Other) { - throw new BadRequestException(`${metadata.filename} is an unsupported file type`); + throw new BadRequestException(`${assetData.filename} is an unsupported file type`); } - this.validateQuota(auth, uploadLength ?? contentLength); + this.validateQuota(req.auth, uploadLength ?? contentLength); try { - await this.assetRepository.create({ - id: assetId, - ownerId: auth.user.id, - libraryId: null, - checksum: checksumHeader, - originalPath: path, - deviceAssetId: metadata.deviceAssetId, - deviceId: metadata.deviceId, - fileCreatedAt: metadata.fileCreatedAt, - fileModifiedAt: metadata.fileModifiedAt, - localDateTime: metadata.fileCreatedAt, - type: mimeTypes.assetType(path), - isFavorite: metadata.isFavorite, - duration: metadata.duration || null, - visibility: metadata.visibility || AssetVisibility.Timeline, - originalFileName: metadata.filename, - status: AssetStatus.Partial, - }); + await this.assetRepository.createWithMetadata( + { + id: assetId, + ownerId: req.auth.user.id, + libraryId: null, + checksum: dto.checksum, + originalPath: path, + deviceAssetId: assetData.deviceAssetId, + deviceId: assetData.deviceId, + fileCreatedAt: assetData.fileCreatedAt, + fileModifiedAt: assetData.fileModifiedAt, + localDateTime: assetData.fileCreatedAt, + type: mimeTypes.assetType(path), + isFavorite: assetData.isFavorite, + duration: assetData.duration || null, + visibility: assetData.visibility || AssetVisibility.Timeline, + originalFileName: assetData.filename, + status: AssetStatus.Partial, + }, + assetData.metadata, + ); } catch (error: any) { if (isAssetChecksumConstraint(error)) { - const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksumHeader); + const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(req.auth.user.id, dto.checksum); if (!duplicate) { throw new InternalServerErrorException('Error locating duplicate for checksum constraint'); } @@ -82,8 +77,8 @@ export class AssetUploadService extends BaseService { } const location = `/api/upload/${assetId}`; - if (requestInterop !== null && requestInterop >= 3 && requestInterop <= MAX_INTEROP_VERSION) { - this.sendInterimResponse(response, location, requestInterop); + if (version <= MAX_RUFH_INTEROP_VERSION) { + this.sendInterimResponse(response, location, version); } await this.storageRepository.mkdir(folder); @@ -92,7 +87,7 @@ export class AssetUploadService extends BaseService { if (isComplete) { const hash = createHash('sha1'); - request.on('data', (chunk: Buffer) => hash.update(chunk)); + req.on('data', (chunk: Buffer) => hash.update(chunk)); writeStream.on('finish', () => (checksumBuffer = hash.digest())); } @@ -103,22 +98,24 @@ export class AssetUploadService extends BaseService { } }); - writeStream.on('finish', () => { + writeStream.on('finish', async () => { if (!isComplete) { return response.status(201).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); } this.logger.log(`Finished upload to ${path}`); - if (checksumHeader.compare(checksumBuffer!) !== 0) { + if (dto.checksum.compare(checksumBuffer!) !== 0) { return this.sendChecksumMismatchResponse(response, assetId, path); } - this.setCompleteHeader(response, requestInterop, true); - response.status(200).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); - - return this.onComplete({ assetId, path, size: contentLength, fileModifiedAt: metadata.fileModifiedAt }); + try { + await this.onComplete({ assetId, path, size: contentLength, fileModifiedAt: assetData.fileModifiedAt }); + } finally { + this.setCompleteHeader(response, dto.version, true); + response.status(200).setHeader('Location', location).setHeader('Upload-Limit', 'min-size=0').send(); + } }); - request.on('error', (error) => { + req.on('error', (error) => { this.logger.error(`Failed to read request body: ${error.message}`); writeStream.end(); if (!response.headersSent) { @@ -127,45 +124,44 @@ export class AssetUploadService extends BaseService { }); let receivedLength = 0; - request.on('data', (chunk: Buffer) => { + req.on('data', (chunk: Buffer) => { if (receivedLength + chunk.length > contentLength) { writeStream.destroy(); - request.destroy(); + req.destroy(); response.status(400).send('Received more data than specified in content-length'); - return this.removeAsset(assetId, path); + return this.onCancel(assetId, path); } receivedLength += chunk.length; if (!writeStream.write(chunk)) { - request.pause(); - writeStream.once('drain', () => request.resume()); + req.pause(); + writeStream.once('drain', () => req.resume()); } }); - request.on('end', () => { + req.on('end', () => { if (receivedLength === contentLength) { return writeStream.end(); } this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`); writeStream.destroy(); - this.removeAsset(assetId, path); + this.onCancel(assetId, path); }); } - async resumeUpload(auth: AuthDto, assetId: string, request: Request, response: Response): Promise { - const headers = request.headers; - const requestInterop = this.getNumberHeader(headers, 'upload-draft-interop-version'); - const isComplete = this.requireUploadComplete(headers, requestInterop); - const contentLength = this.requireContentLength(headers); - const providedOffset = this.getNumberHeader(headers, 'upload-offset'); - const uploadLength = this.getNumberHeader(headers, 'upload-length'); + resumeUpload(req: AuthenticatedRequest, response: Response, id: string, dto: ResumeUploadDto): Promise { + this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`); + const { isComplete, uploadLength, uploadOffset, contentLength, version } = dto; + if (isComplete && uploadLength !== undefined && uploadLength !== contentLength) { + this.sendInconsistentLengthProblem(response); + return Promise.resolve(); + } - const contentType = headers['content-type']; - if (requestInterop && requestInterop >= 6 && contentType !== 'application/partial-upload') { + if (version && version >= 6 && req.headers['content-type'] !== 'application/partial-upload') { throw new BadRequestException('Content-Type must be application/partial-upload for PATCH requests'); } - await this.databaseRepository.withUuidLock(assetId, async () => { - const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id); + return this.databaseRepository.withUuidLock(id, async () => { + const asset = await this.assetRepository.getCompletionMetadata(id, req.auth.user.id); if (!asset) { response.status(404).send('Asset not found'); return; @@ -174,30 +170,30 @@ export class AssetUploadService extends BaseService { if (asset.status !== AssetStatus.Partial) { return this.sendAlreadyCompletedProblem(response); } - if (providedOffset === null) { + if (uploadOffset === null) { throw new BadRequestException('Missing Upload-Offset header'); } const { path } = asset; const expectedOffset = await this.getCurrentOffset(path); - if (expectedOffset !== providedOffset) { - this.setCompleteHeader(response, requestInterop, false); - return this.sendOffsetMismatchProblem(response, expectedOffset, providedOffset); + if (expectedOffset !== uploadOffset) { + this.setCompleteHeader(response, version, false); + return this.sendOffsetMismatchProblem(response, expectedOffset, uploadOffset); } - const newLength = providedOffset + contentLength; + const newLength = uploadOffset + contentLength; // If upload length is provided, validate we're not exceeding it - if (uploadLength !== null && newLength > uploadLength) { + if (uploadLength !== undefined && newLength > uploadLength) { response.status(400).send('Upload would exceed declared length'); return; } - this.validateQuota(auth, newLength); + this.validateQuota(req.auth, newLength); // Empty PATCH without Upload-Complete if (contentLength === 0 && !isComplete) { - this.setCompleteHeader(response, requestInterop, false); + this.setCompleteHeader(response, version, false); response.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send(); return; } @@ -215,45 +211,47 @@ export class AssetUploadService extends BaseService { writeStream.on('finish', async () => { const currentOffset = await this.getCurrentOffset(path); if (!isComplete) { - this.setCompleteHeader(response, requestInterop, false); + this.setCompleteHeader(response, version, false); return response.status(204).setHeader('Upload-Offset', currentOffset.toString()).send(); } this.logger.log(`Finished upload to ${path}`); const checksum = await this.cryptoRepository.hashFile(path); if (asset.checksum.compare(checksum) !== 0) { - return this.sendChecksumMismatchResponse(response, assetId, path); + return this.sendChecksumMismatchResponse(response, id, path); } - this.setCompleteHeader(response, requestInterop, true); - response.status(200).setHeader('Upload-Offset', currentOffset.toString()).send(); - - await this.onComplete({ assetId, path, size: currentOffset, fileModifiedAt: asset.fileModifiedAt }); + try { + await this.onComplete({ assetId: id, path, size: currentOffset, fileModifiedAt: asset.fileModifiedAt }); + } finally { + this.setCompleteHeader(response, version, true); + response.status(200).setHeader('Upload-Offset', currentOffset.toString()).send(); + } }); - request.on('data', (chunk: Buffer) => { + req.on('data', (chunk: Buffer) => { if (receivedLength + chunk.length > contentLength) { this.logger.error(`Received more data than specified in content-length for upload to ${path}`); writeStream.destroy(); - request.destroy(); + req.destroy(); response.status(400).send('Received more data than specified in content-length'); - return this.removeAsset(assetId, path); + return this.onCancel(id, path); } receivedLength += chunk.length; if (!writeStream.write(chunk)) { - request.pause(); - writeStream.once('drain', () => request.resume()); + req.pause(); + writeStream.once('drain', () => req.resume()); } }); - request.on('end', () => { + req.on('end', () => { if (receivedLength === contentLength) { return writeStream.end(); } - this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${assetId}`); + this.logger.error(`Received ${receivedLength} bytes when expecting ${contentLength} for ${id}`); writeStream.destroy(); - return this.removeAsset(assetId, path); + return this.onCancel(id, path); }); }); } @@ -267,13 +265,13 @@ export class AssetUploadService extends BaseService { if (asset.status !== AssetStatus.Partial) { return this.sendAlreadyCompletedProblem(response); } - await this.removeAsset(assetId, asset.path); + await this.onCancel(assetId, asset.path); response.status(204).send(); } - async getUploadStatus(auth: AuthDto, assetId: string, response: Response) { - return this.databaseRepository.withUuidLock(assetId, async () => { - const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id); + async getUploadStatus(auth: AuthDto, response: Response, id: string, { version }: GetUploadStatusDto) { + return this.databaseRepository.withUuidLock(id, async () => { + const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id); if (!asset) { response.status(404).send('Asset not found'); return; @@ -282,8 +280,7 @@ export class AssetUploadService extends BaseService { const offset = await this.getCurrentOffset(asset.path); const isComplete = asset.status !== AssetStatus.Partial; - const requestInterop = this.getNumberHeader(response.req.headers, 'upload-draft-interop-version'); - this.setCompleteHeader(response, requestInterop, isComplete); + this.setCompleteHeader(response, version, isComplete); response .status(204) .setHeader('Upload-Offset', offset.toString()) @@ -299,13 +296,19 @@ export class AssetUploadService extends BaseService { private async onComplete(data: { assetId: string; path: string; size: number; fileModifiedAt: Date }): Promise { const { assetId, path, size, fileModifiedAt } = data; + this.logger.debug('Completing upload for asset', assetId); const jobData = { name: JobName.AssetExtractMetadata, data: { id: assetId, source: 'upload' } } as const; await this.withRetry(() => this.assetRepository.setCompleteWithSize(assetId, size)); + try { + await this.withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt)); + } catch (error: any) { + this.logger.error(`Failed to update times for ${path}: ${error.message}`); + } await this.withRetry(() => this.jobRepository.queue(jobData)); - await this.withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt)); } - private async removeAsset(assetId: string, path: string): Promise { + private async onCancel(assetId: string, path: string): Promise { + this.logger.debug('Cancelling upload for asset', assetId); await this.withRetry(() => this.storageRepository.unlink(path)); await this.withRetry(() => this.assetRepository.remove({ id: assetId })); } @@ -324,14 +327,14 @@ export class AssetUploadService extends BaseService { private sendInconsistentLengthProblem(response: Response): void { response.status(400).contentType('application/problem+json').send({ - type: `https://iana.org/assignments/http-problem-types#inconsistent-upload-length`, + type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length', title: 'inconsistent length values for upload', }); } private sendAlreadyCompletedProblem(response: Response): void { response.status(400).contentType('application/problem+json').send({ - type: `https://iana.org/assignments/http-problem-types#completed-upload`, + type: 'https://iana.org/assignments/http-problem-types#completed-upload', title: 'upload is already completed', }); } @@ -348,47 +351,7 @@ export class AssetUploadService extends BaseService { private sendChecksumMismatchResponse(response: Response, assetId: string, path: string): Promise { this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`); response.status(460).send('Checksum mismatch'); - return this.removeAsset(assetId, path); - } - - private requireUploadComplete(headers: Request['headers'], interopVersion: number | null): boolean { - if (interopVersion !== null && interopVersion <= 3) { - const value = headers['upload-incomplete'] as string | undefined; - if (value === undefined) { - throw new BadRequestException('Missing Upload-Incomplete header'); - } - return value === '?0'; - } - - const value = headers['upload-complete'] as string | undefined; - if (value === undefined) { - throw new BadRequestException('Missing Upload-Complete header'); - } - return value === '?1'; - } - - private getNumberHeader(headers: Request['headers'], name: string): number | null { - const value = headers[name] as string | undefined; - if (value === undefined) { - return null; - } - const number = parseInt(value, 10); - if (!isFinite(number) || number < 0) { - throw new BadRequestException(`Invalid ${name} header`); - } - return number; - } - - private requireContentLength(headers: Request['headers']): number { - const value = headers['content-length'] as string | undefined; - if (value === undefined) { - throw new BadRequestException('Missing Content-Length header'); - } - const length = parseInt(value, 10); - if (!isFinite(length) || length < 0) { - throw new BadRequestException('Invalid Content-Length header'); - } - return length; + return this.onCancel(assetId, path); } private async withRetry(operation: () => Promise, retries: number = 2, delay: number = 100): Promise { @@ -428,48 +391,6 @@ export class AssetUploadService extends BaseService { } } - private requireChecksum(headers: Request['headers']): Buffer { - const value = headers['repr-digest'] as string | undefined; - if (value === undefined) { - throw new BadRequestException(`Missing 'repr-digest' header`); - } - - const sha1Item = parseDictionary(value).get('sha'); - if (!sha1Item) { - throw new BadRequestException(`Missing 'sha' in 'repr-digest' header`); - } - - const checksum = sha1Item[0]; - if (!(checksum instanceof ArrayBuffer)) { - throw new BadRequestException(`Invalid 'sha' in 'repr-digest' header`); - } - - return Buffer.from(checksum); - } - - private requireAssetData(headers: Request['headers']): UploadAssetDataDto { - const value = headers[ImmichHeader.AssetData] as string | undefined; - if (value === undefined) { - throw new BadRequestException(`Missing ${ImmichHeader.AssetData} header`); - } - - let assetData: any; - try { - assetData = JSON.parse(Buffer.from(value, 'base64').toString('utf8')); - } catch { - throw new BadRequestException(`${ImmichHeader.AssetData} header is not valid base64-encoded JSON`); - } - - const dto = plainToInstance(UploadAssetDataDto, assetData); - const errors = validateSync(dto, { whitelist: true }); - if (errors.length > 0) { - const formatted = errors.map((e) => (e.constraints ? Object.values(e.constraints).join(', ') : '')); - throw new BadRequestException(`Invalid ${ImmichHeader.AssetData} header: ${formatted.join('; ')}`); - } - - return dto; - } - private setCompleteHeader(response: Response, interopVersion: number | null, isComplete: boolean): void { if (!interopVersion) { return; diff --git a/server/src/utils/asset.util.ts b/server/src/utils/asset.util.ts index 629b3bf819..aab3c86bdb 100644 --- a/server/src/utils/asset.util.ts +++ b/server/src/utils/asset.util.ts @@ -192,7 +192,7 @@ export function mapToUploadFile(file: ImmichFile): UploadFile { export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File): UploadRequest => { return { - auth: request.user || null, + auth: request.auth || null, body: request.body, fieldName: file.fieldname as UploadFieldName, file: mapToUploadFile(file as ImmichFile),