This commit is contained in:
mertalev 2025-10-06 23:32:20 -04:00
parent de117ebe7a
commit e3e8da168f
No known key found for this signature in database
GPG key ID: DF6ABC77AAD98C95
15 changed files with 317 additions and 468 deletions

View file

@ -171,7 +171,7 @@ describe('/upload', () => {
.send(partialContent); .send(partialContent);
expect(status).toBe(201); expect(status).toBe(201);
expect(headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9\-]+$/); expect(headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9-]+$/);
expect(headers['upload-complete']).toBe('?0'); expect(headers['upload-complete']).toBe('?0');
}); });
@ -190,7 +190,7 @@ describe('/upload', () => {
.send(partialContent); .send(partialContent);
expect(status).toBe(201); expect(status).toBe(201);
expect(headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9\-]+$/); expect(headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9-]+$/);
expect(headers['upload-incomplete']).toBe('?1'); expect(headers['upload-incomplete']).toBe('?1');
}); });
@ -247,7 +247,7 @@ describe('/upload', () => {
expect(firstRequest.status).toBe(201); expect(firstRequest.status).toBe(201);
expect(firstRequest.headers['upload-complete']).toBe('?0'); expect(firstRequest.headers['upload-complete']).toBe('?0');
expect(firstRequest.headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9\-]+$/); expect(firstRequest.headers['location']).toMatch(/^\/api\/upload\/[a-zA-Z0-9-]+$/);
const secondRequest = await request(app) const secondRequest = await request(app)
.post('/upload') .post('/upload')
@ -330,9 +330,9 @@ describe('/upload', () => {
const assetData = makeAssetData({ filename: '8bit-sRGB.jxl' }); const assetData = makeAssetData({ filename: '8bit-sRGB.jxl' });
const fullContent = await readFile(join(testAssetDir, 'formats/jxl/8bit-sRGB.jxl')); const fullContent = await readFile(join(testAssetDir, 'formats/jxl/8bit-sRGB.jxl'));
chunks = [ chunks = [
fullContent.subarray(0, 10000), fullContent.subarray(0, 10_000),
fullContent.subarray(10000, 100000), fullContent.subarray(10_000, 100_000),
fullContent.subarray(100000, fullContent.length), fullContent.subarray(100_000, fullContent.length),
]; ];
checksum = createHash('sha1').update(fullContent).digest('base64'); checksum = createHash('sha1').update(fullContent).digest('base64');
const response = await request(app) const response = await request(app)
@ -431,8 +431,8 @@ describe('/upload', () => {
expect(body).toEqual({ expect(body).toEqual({
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset', type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource', title: 'offset from request does not match offset of resource',
'expected-offset': 100000, 'expected-offset': 100_000,
'provided-offset': 10000, 'provided-offset': 10_000,
}); });
}); });
@ -442,8 +442,8 @@ describe('/upload', () => {
.set('Authorization', `Bearer ${admin.accessToken}`) .set('Authorization', `Bearer ${admin.accessToken}`)
.set('Upload-Draft-Interop-Version', '8'); .set('Upload-Draft-Interop-Version', '8');
const offset = parseInt(headResponse.headers['upload-offset']); const offset = Number.parseInt(headResponse.headers['upload-offset']);
expect(offset).toBe(100000); expect(offset).toBe(100_000);
const { status, headers, body } = await request(baseUrl) const { status, headers, body } = await request(baseUrl)
.patch(uploadResource) .patch(uploadResource)
@ -534,7 +534,9 @@ describe('/upload', () => {
it('should handle multiple interruptions and resumptions', async () => { it('should handle multiple interruptions and resumptions', async () => {
const chunks = [randomBytes(2000), randomBytes(3000), randomBytes(5000)]; const chunks = [randomBytes(2000), randomBytes(3000), randomBytes(5000)];
const hash = createHash('sha1'); const hash = createHash('sha1');
chunks.forEach((chunk) => hash.update(chunk)); for (const chunk of chunks) {
hash.update(chunk);
}
const createResponse = await request(app) const createResponse = await request(app)
.post('/upload') .post('/upload')

View file

@ -76,16 +76,8 @@ class UploadApi {
} }
} }
/// This endpoint requires the `asset.upload` permission. /// Performs an HTTP 'OPTIONS /upload' operation and returns the [Response].
/// Future<Response> getUploadOptionsWithHttpInfo() async {
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getUploadOptionsWithHttpInfo({ String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload'; final apiPath = r'/upload';
@ -96,13 +88,6 @@ class UploadApi {
final headerParams = <String, String>{}; final headerParams = <String, String>{};
final formParams = <String, String>{}; final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -117,15 +102,8 @@ class UploadApi {
); );
} }
/// This endpoint requires the `asset.upload` permission. Future<void> getUploadOptions() async {
/// final response = await getUploadOptionsWithHttpInfo();
/// Parameters:
///
/// * [String] key:
///
/// * [String] slug:
Future<void> getUploadOptions({ String? key, String? slug, }) async {
final response = await getUploadOptionsWithHttpInfo( key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
@ -137,15 +115,15 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> getUploadStatusWithHttpInfo(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async { Future<Response> getUploadStatusWithHttpInfo(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload/{id}' final apiPath = r'/upload/{id}'
.replaceAll('{id}', id); .replaceAll('{id}', id);
@ -164,7 +142,7 @@ class UploadApi {
queryParams.addAll(_queryParams('', 'slug', slug)); queryParams.addAll(_queryParams('', 'slug', slug));
} }
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion); headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -184,16 +162,16 @@ class UploadApi {
/// ///
/// Parameters: /// Parameters:
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> getUploadStatus(String draftUploadInteropVersion, String id, { String? key, String? slug, }) async { Future<void> getUploadStatus(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
final response = await getUploadStatusWithHttpInfo(draftUploadInteropVersion, id, key: key, slug: slug, ); final response = await getUploadStatusWithHttpInfo(id, uploadDraftInteropVersion, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
@ -208,21 +186,21 @@ class UploadApi {
/// * [String] contentLength (required): /// * [String] contentLength (required):
/// Non-negative size of the request body in bytes. /// Non-negative size of the request body in bytes.
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadComplete (required): /// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required): /// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file. /// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> resumeUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async { Future<Response> resumeUploadWithHttpInfo(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload/{id}' final apiPath = r'/upload/{id}'
.replaceAll('{id}', id); .replaceAll('{id}', id);
@ -242,8 +220,8 @@ class UploadApi {
} }
headerParams[r'content-length'] = parameterToString(contentLength); headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion);
headerParams[r'upload-complete'] = parameterToString(uploadComplete); headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
headerParams[r'upload-offset'] = parameterToString(uploadOffset); headerParams[r'upload-offset'] = parameterToString(uploadOffset);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -267,25 +245,33 @@ class UploadApi {
/// * [String] contentLength (required): /// * [String] contentLength (required):
/// Non-negative size of the request body in bytes. /// Non-negative size of the request body in bytes.
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] id (required): /// * [String] id (required):
/// ///
/// * [String] uploadComplete (required): /// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] uploadOffset (required): /// * [String] uploadOffset (required):
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file. /// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> resumeUpload(String contentLength, String draftUploadInteropVersion, String id, String uploadComplete, String uploadOffset, { String? key, String? slug, }) async { Future<Object?> resumeUpload(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
final response = await resumeUploadWithHttpInfo(contentLength, draftUploadInteropVersion, id, uploadComplete, uploadOffset, key: key, slug: slug, ); final response = await resumeUploadWithHttpInfo(contentLength, id, uploadComplete, uploadDraftInteropVersion, uploadOffset, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'Object',) as Object;
}
return null;
} }
/// This endpoint requires the `asset.upload` permission. /// This endpoint requires the `asset.upload` permission.
@ -297,22 +283,22 @@ class UploadApi {
/// * [String] contentLength (required): /// * [String] contentLength (required):
/// Non-negative size of the request body in bytes. /// Non-negative size of the request body in bytes.
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] reprDigest (required): /// * [String] reprDigest (required):
/// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity. /// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
/// ///
/// * [String] uploadComplete (required): /// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] xImmichAssetData (required): /// * [String] xImmichAssetData (required):
/// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored. /// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - icloud-id (string, optional): iCloud identifier for assets from iOS devices
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<Response> startUploadWithHttpInfo(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async { Future<Response> startUploadWithHttpInfo(String contentLength, String reprDigest, String uploadComplete, String uploadDraftInteropVersion, String xImmichAssetData, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations // ignore: prefer_const_declarations
final apiPath = r'/upload'; final apiPath = r'/upload';
@ -331,9 +317,9 @@ class UploadApi {
} }
headerParams[r'content-length'] = parameterToString(contentLength); headerParams[r'content-length'] = parameterToString(contentLength);
headerParams[r'draft-upload-interop-version'] = parameterToString(draftUploadInteropVersion);
headerParams[r'repr-digest'] = parameterToString(reprDigest); headerParams[r'repr-digest'] = parameterToString(reprDigest);
headerParams[r'upload-complete'] = parameterToString(uploadComplete); headerParams[r'upload-complete'] = parameterToString(uploadComplete);
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData); headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData);
const contentTypes = <String>[]; const contentTypes = <String>[];
@ -357,25 +343,33 @@ class UploadApi {
/// * [String] contentLength (required): /// * [String] contentLength (required):
/// Non-negative size of the request body in bytes. /// Non-negative size of the request body in bytes.
/// ///
/// * [String] draftUploadInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] reprDigest (required): /// * [String] reprDigest (required):
/// Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity. /// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
/// ///
/// * [String] uploadComplete (required): /// * [String] uploadComplete (required):
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3. /// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
/// ///
/// * [String] uploadDraftInteropVersion (required):
/// Indicates the version of the RUFH protocol supported by the client.
///
/// * [String] xImmichAssetData (required): /// * [String] xImmichAssetData (required):
/// Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored. /// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - icloud-id (string, optional): iCloud identifier for assets from iOS devices
/// ///
/// * [String] key: /// * [String] key:
/// ///
/// * [String] slug: /// * [String] slug:
Future<void> startUpload(String contentLength, String draftUploadInteropVersion, String reprDigest, String uploadComplete, String xImmichAssetData, { String? key, String? slug, }) async { Future<Object?> startUpload(String contentLength, String reprDigest, String uploadComplete, String uploadDraftInteropVersion, String xImmichAssetData, { String? key, String? slug, }) async {
final response = await startUploadWithHttpInfo(contentLength, draftUploadInteropVersion, reprDigest, uploadComplete, xImmichAssetData, key: key, slug: slug, ); final response = await startUploadWithHttpInfo(contentLength, reprDigest, uploadComplete, uploadDraftInteropVersion, xImmichAssetData, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) { if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response)); throw ApiException(response.statusCode, await _decodeBodyBytes(response));
} }
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'Object',) as Object;
}
return null;
} }
} }

View file

@ -1,207 +0,0 @@
enum BackupSelection {
// Used to sort albums based on the backupSelection
// selected -> none -> excluded
// Do not change the order of these values
selected,
none,
excluded,
}
class LocalAlbum {
final String id;
final String name;
final DateTime updatedAt;
final bool isIosSharedAlbum;
final int assetCount;
final BackupSelection backupSelection;
final String? linkedRemoteAlbumId;
const LocalAlbum({
required this.id,
required this.name,
required this.updatedAt,
this.assetCount = 0,
this.backupSelection = BackupSelection.none,
this.isIosSharedAlbum = false,
this.linkedRemoteAlbumId,
});
LocalAlbum copyWith({
String? id,
String? name,
DateTime? updatedAt,
int? assetCount,
BackupSelection? backupSelection,
bool? isIosSharedAlbum,
String? linkedRemoteAlbumId,
}) {
return LocalAlbum(
id: id ?? this.id,
name: name ?? this.name,
updatedAt: updatedAt ?? this.updatedAt,
assetCount: assetCount ?? this.assetCount,
backupSelection: backupSelection ?? this.backupSelection,
isIosSharedAlbum: isIosSharedAlbum ?? this.isIosSharedAlbum,
linkedRemoteAlbumId: linkedRemoteAlbumId ?? this.linkedRemoteAlbumId,
);
}
@override
bool operator ==(Object other) {
if (other is! LocalAlbum) return false;
if (identical(this, other)) return true;
return other.id == id &&
other.name == name &&
other.updatedAt == updatedAt &&
other.assetCount == assetCount &&
other.backupSelection == backupSelection &&
other.isIosSharedAlbum == isIosSharedAlbum &&
other.linkedRemoteAlbumId == linkedRemoteAlbumId;
}
@override
int get hashCode {
return id.hashCode ^
name.hashCode ^
updatedAt.hashCode ^
assetCount.hashCode ^
backupSelection.hashCode ^
isIosSharedAlbum.hashCode ^
linkedRemoteAlbumId.hashCode;
}
@override
String toString() {
return '''LocalAlbum: {
id: $id,
name: $name,
updatedAt: $updatedAt,
assetCount: $assetCount,
backupSelection: $backupSelection,
isIosSharedAlbum: $isIosSharedAlbum
linkedRemoteAlbumId: $linkedRemoteAlbumId,
}''';
}
}
int square(int num) {
return num * num;
}
@pragma('vm:never-inline')
List<LocalAlbum> getAlbums() {
final updatedAt = DateTime.now();
final selection = BackupSelection.values;
return List.generate(100000, (i) {
return LocalAlbum(id: i.toString(), name: '', updatedAt: updatedAt, backupSelection: selection[i % 3]);
});
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum1(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now()));
newAlbums.setAll(0, albums);
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
if (currentAlbum.id == album.id) {
newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected);
break;
}
}
return newAlbums;
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum2(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = List.filled(albums.length, LocalAlbum(id: '', name: '', updatedAt: DateTime.now()));
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
newAlbums[i] = currentAlbum.id == album.id ? currentAlbum.copyWith(backupSelection: BackupSelection.selected) : currentAlbum;
}
return newAlbums;
}
@pragma('vm:never-inline')
List<LocalAlbum> setAlbum3(List<LocalAlbum> albums, LocalAlbum album) {
final newAlbums = albums.toList(growable: false);
for (int i = 0; i < newAlbums.length; i++) {
final currentAlbum = newAlbums[i];
if (currentAlbum.id == album.id) {
newAlbums[i] = currentAlbum.copyWith(backupSelection: BackupSelection.selected);
break;
}
}
return newAlbums;
}
@pragma('vm:never-inline')
Set<String> toSet1(List<LocalAlbum> albums) {
return albums.map((album) => album.id).toSet();
}
@pragma('vm:never-inline')
Set<String> toSet2(List<LocalAlbum> albums) {
final ids = <String>{};
for (final album in albums) {
ids.add(album.id);
}
return ids;
}
@pragma('vm:never-inline')
Set<String> toSet3(List<LocalAlbum> albums) {
return Set.unmodifiable(albums.map((album) => album.id));
}
@pragma('vm:never-inline')
Set<String> toSet4(List<LocalAlbum> albums) {
final ids = <String>{};
for (int i = 0; i < albums.length; i++) {
final id = albums[i].id;
ids.add(id);
}
return ids;
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered1(List<LocalAlbum> albums, BackupSelection selection) {
return albums.where((album) => album.backupSelection == selection).toList(growable: false);
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered2(List<LocalAlbum> albums, BackupSelection selection) {
final filtered = <LocalAlbum>[];
for (final album in albums) {
if (album.backupSelection == selection) {
filtered.add(album);
}
}
return filtered;
}
@pragma('vm:never-inline')
List<LocalAlbum> toFiltered3(List<LocalAlbum> albums, BackupSelection selection) {
final filtered = <LocalAlbum>[];
for (int i = 0; i < albums.length; i++) {
final album = albums[i];
if (album.backupSelection == selection) {
filtered.add(album);
}
}
return filtered;
}
late Set<String> ids;
late List<LocalAlbum> localAlbums;
void main(List<String> args) {
final albums = getAlbums();
// final album = LocalAlbum(id: '50000', name: '', updatedAt: DateTime.now());
final stopwatch = Stopwatch()..start();
// localAlbums = setAlbum3(albums, album);
// ids = toSet1(albums);
localAlbums = toFiltered2(albums, BackupSelection.selected);
stopwatch.stop();
print('Elapsed time: ${(stopwatch.elapsedMicroseconds / 1000).toStringAsFixed(2)}ms');
}

View file

@ -9376,45 +9376,15 @@
"/upload": { "/upload": {
"options": { "options": {
"operationId": "getUploadOptions", "operationId": "getUploadOptions",
"parameters": [ "parameters": [],
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
}
],
"responses": { "responses": {
"200": { "204": {
"description": "" "description": ""
} }
}, },
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [ "tags": [
"Upload" "Upload"
], ]
"x-immich-permission": "asset.upload",
"description": "This endpoint requires the `asset.upload` permission."
}, },
"post": { "post": {
"operationId": "startUpload", "operationId": "startUpload",
@ -9428,15 +9398,6 @@
"type": "string" "type": "string"
} }
}, },
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "key", "name": "key",
"required": false, "required": false,
@ -9448,7 +9409,7 @@
{ {
"name": "repr-digest", "name": "repr-digest",
"in": "header", "in": "header",
"description": "Structured dictionary containing an SHA-1 checksum used to detect duplicate files and validate data integrity.", "description": "RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.",
"required": true, "required": true,
"schema": { "schema": {
"type": "string" "type": "string"
@ -9471,10 +9432,19 @@
"type": "string" "type": "string"
} }
}, },
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "x-immich-asset-data", "name": "x-immich-asset-data",
"in": "header", "in": "header",
"description": "Base64-encoded JSON of asset metadata. The expected content is the same as AssetMediaCreateDto, except that `filename` is required and `sidecarData` is ignored.", "description": "RFC 9651 structured dictionary containing asset metadata with the following keys:\n- device-asset-id (string, required): Unique device asset identifier\n- device-id (string, required): Device identifier\n- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp\n- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp\n- filename (string, required): Original filename\n- is-favorite (boolean, optional): Favorite status\n- icloud-id (string, optional): iCloud identifier for assets from iOS devices",
"required": true, "required": true,
"schema": { "schema": {
"type": "string" "type": "string"
@ -9482,6 +9452,16 @@
} }
], ],
"responses": { "responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": ""
},
"201": { "201": {
"description": "" "description": ""
} }
@ -9559,15 +9539,6 @@
"head": { "head": {
"operationId": "getUploadStatus", "operationId": "getUploadStatus",
"parameters": [ "parameters": [
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "id", "name": "id",
"required": true, "required": true,
@ -9592,6 +9563,15 @@
"schema": { "schema": {
"type": "string" "type": "string"
} }
},
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
} }
], ],
"responses": { "responses": {
@ -9628,15 +9608,6 @@
"type": "string" "type": "string"
} }
}, },
{
"name": "draft-upload-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "id", "name": "id",
"required": true, "required": true,
@ -9671,6 +9642,15 @@
"type": "string" "type": "string"
} }
}, },
{
"name": "upload-draft-interop-version",
"in": "header",
"description": "Indicates the version of the RUFH protocol supported by the client.",
"required": true,
"schema": {
"type": "string"
}
},
{ {
"name": "upload-offset", "name": "upload-offset",
"in": "header", "in": "header",
@ -9683,6 +9663,13 @@
], ],
"responses": { "responses": {
"200": { "200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadOkDto"
}
}
},
"description": "" "description": ""
} }
}, },
@ -18071,6 +18058,10 @@
}, },
"type": "object" "type": "object"
}, },
"UploadOkDto": {
"properties": {},
"type": "object"
},
"UsageByUserDto": { "UsageByUserDto": {
"properties": { "properties": {
"photos": { "photos": {

View file

@ -1654,6 +1654,7 @@ export type TimeBucketsResponseDto = {
export type TrashResponseDto = { export type TrashResponseDto = {
count: number; count: number;
}; };
export type UploadOkDto = {};
export type UserUpdateMeDto = { export type UserUpdateMeDto = {
avatarColor?: (UserAvatarColor) | null; avatarColor?: (UserAvatarColor) | null;
email?: string; email?: string;
@ -4518,17 +4519,8 @@ export function restoreAssets({ bulkIdsDto }: {
body: bulkIdsDto body: bulkIdsDto
}))); })));
} }
/** export function getUploadOptions(opts?: Oazapfts.RequestOpts) {
* This endpoint requires the `asset.upload` permission. return oazapfts.ok(oazapfts.fetchText("/upload", {
*/
export function getUploadOptions({ key, slug }: {
key?: string;
slug?: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload${QS.query(QS.explode({
key,
slug
}))}`, {
...opts, ...opts,
method: "OPTIONS" method: "OPTIONS"
})); }));
@ -4536,16 +4528,21 @@ export function getUploadOptions({ key, slug }: {
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function startUpload({ contentLength, draftUploadInteropVersion, key, reprDigest, slug, uploadComplete, xImmichAssetData }: { export function startUpload({ contentLength, key, reprDigest, slug, uploadComplete, uploadDraftInteropVersion, xImmichAssetData }: {
contentLength: string; contentLength: string;
draftUploadInteropVersion: string;
key?: string; key?: string;
reprDigest: string; reprDigest: string;
slug?: string; slug?: string;
uploadComplete: string; uploadComplete: string;
uploadDraftInteropVersion: string;
xImmichAssetData: string; xImmichAssetData: string;
}, opts?: Oazapfts.RequestOpts) { }, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload${QS.query(QS.explode({ return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
} | {
status: 201;
}>(`/upload${QS.query(QS.explode({
key, key,
slug slug
}))}`, { }))}`, {
@ -4553,9 +4550,9 @@ export function startUpload({ contentLength, draftUploadInteropVersion, key, rep
method: "POST", method: "POST",
headers: oazapfts.mergeHeaders(opts?.headers, { headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength, "content-length": contentLength,
"draft-upload-interop-version": draftUploadInteropVersion,
"repr-digest": reprDigest, "repr-digest": reprDigest,
"upload-complete": uploadComplete, "upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"x-immich-asset-data": xImmichAssetData "x-immich-asset-data": xImmichAssetData
}) })
})); }));
@ -4579,11 +4576,11 @@ export function cancelUpload({ id, key, slug }: {
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function getUploadStatus({ draftUploadInteropVersion, id, key, slug }: { export function getUploadStatus({ id, key, slug, uploadDraftInteropVersion }: {
draftUploadInteropVersion: string;
id: string; id: string;
key?: string; key?: string;
slug?: string; slug?: string;
uploadDraftInteropVersion: string;
}, opts?: Oazapfts.RequestOpts) { }, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({ return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key, key,
@ -4592,23 +4589,26 @@ export function getUploadStatus({ draftUploadInteropVersion, id, key, slug }: {
...opts, ...opts,
method: "HEAD", method: "HEAD",
headers: oazapfts.mergeHeaders(opts?.headers, { headers: oazapfts.mergeHeaders(opts?.headers, {
"draft-upload-interop-version": draftUploadInteropVersion "upload-draft-interop-version": uploadDraftInteropVersion
}) })
})); }));
} }
/** /**
* This endpoint requires the `asset.upload` permission. * This endpoint requires the `asset.upload` permission.
*/ */
export function resumeUpload({ contentLength, draftUploadInteropVersion, id, key, slug, uploadComplete, uploadOffset }: { export function resumeUpload({ contentLength, id, key, slug, uploadComplete, uploadDraftInteropVersion, uploadOffset }: {
contentLength: string; contentLength: string;
draftUploadInteropVersion: string;
id: string; id: string;
key?: string; key?: string;
slug?: string; slug?: string;
uploadComplete: string; uploadComplete: string;
uploadDraftInteropVersion: string;
uploadOffset: string; uploadOffset: string;
}, opts?: Oazapfts.RequestOpts) { }, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({ return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: UploadOkDto;
}>(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
key, key,
slug slug
}))}`, { }))}`, {
@ -4616,8 +4616,8 @@ export function resumeUpload({ contentLength, draftUploadInteropVersion, id, key
method: "PATCH", method: "PATCH",
headers: oazapfts.mergeHeaders(opts?.headers, { headers: oazapfts.mergeHeaders(opts?.headers, {
"content-length": contentLength, "content-length": contentLength,
"draft-upload-interop-version": draftUploadInteropVersion,
"upload-complete": uploadComplete, "upload-complete": uploadComplete,
"upload-draft-interop-version": uploadDraftInteropVersion,
"upload-offset": uploadOffset "upload-offset": uploadOffset
}) })
})); }));

View file

@ -1,4 +1,4 @@
import { createHash, randomUUID } from 'crypto'; import { createHash, randomUUID } from 'node:crypto';
import { AssetUploadController } from 'src/controllers/asset-upload.controller'; import { AssetUploadController } from 'src/controllers/asset-upload.controller';
import { AssetUploadService } from 'src/services/asset-upload.service'; import { AssetUploadService } from 'src/services/asset-upload.service';
import { serializeDictionary } from 'structured-headers'; import { serializeDictionary } from 'structured-headers';
@ -31,10 +31,22 @@ describe(AssetUploadController.name, () => {
beforeEach(() => { beforeEach(() => {
service.resetAllMocks(); service.resetAllMocks();
service.startUpload.mockImplementation(async (auth, req, res, dto) => void res.send()); service.startUpload.mockImplementation((_, __, res, ___) => {
service.resumeUpload.mockImplementation(async (auth, req, res, id, dto) => void res.send()); res.send();
service.cancelUpload.mockImplementation(async (auth, id, res) => void res.send()); return Promise.resolve();
service.getUploadStatus.mockImplementation(async (auth, res, id, dto) => void res.send()); });
service.resumeUpload.mockImplementation((_, __, res, ___, ____) => {
res.send();
return Promise.resolve();
});
service.cancelUpload.mockImplementation((_, __, res) => {
res.send();
return Promise.resolve();
});
service.getUploadStatus.mockImplementation((_, res, __, ___) => {
res.send();
return Promise.resolve();
});
ctx.reset(); ctx.reset();
buffer = Buffer.from(randomUUID()); buffer = Buffer.from(randomUUID());
@ -217,7 +229,7 @@ describe(AssetUploadController.name, () => {
}); });
it('should accept Upload-Incomplete header for version 3', async () => { it('should accept Upload-Incomplete header for version 3', async () => {
const { status, body } = await request(ctx.getHttpServer()) const { status } = await request(ctx.getHttpServer())
.post('/upload') .post('/upload')
.set('Upload-Draft-Interop-Version', '3') .set('Upload-Draft-Interop-Version', '3')
.set('X-Immich-Asset-Data', makeAssetData()) .set('X-Immich-Asset-Data', makeAssetData())
@ -428,7 +440,7 @@ describe(AssetUploadController.name, () => {
}); });
it('should validate UUID parameter', async () => { it('should validate UUID parameter', async () => {
const { status, body } = await request(ctx.getHttpServer()) const { status } = await request(ctx.getHttpServer())
.head('/upload/invalid-uuid') .head('/upload/invalid-uuid')
.set('Upload-Draft-Interop-Version', '8'); .set('Upload-Draft-Interop-Version', '8');

View file

@ -56,7 +56,6 @@ export class AssetUploadController {
- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp
- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp
- filename (string, required): Original filename - filename (string, required): Original filename
- duration (string, optional): Duration for video assets
- is-favorite (boolean, optional): Favorite status - is-favorite (boolean, optional): Favorite status
- icloud-id (string, optional): iCloud identifier for assets from iOS devices`, - icloud-id (string, optional): iCloud identifier for assets from iOS devices`,
required: true, required: true,

View file

@ -20,10 +20,6 @@ export class UploadAssetDataDto {
@ValidateDate() @ValidateDate()
fileModifiedAt!: Date; fileModifiedAt!: Date;
@Optional()
@IsString()
duration?: string;
@IsString() @IsString()
@IsNotEmpty() @IsNotEmpty()
filename!: string; filename!: string;
@ -105,7 +101,7 @@ export class StartUploadDto extends BaseUploadHeadersDto {
isFavorite: dict.get('is-favorite')?.[0], isFavorite: dict.get('is-favorite')?.[0],
iCloudId: dict.get('icloud-id')?.[0], iCloudId: dict.get('icloud-id')?.[0],
}); });
} catch (error: any) { } catch {
throw new BadRequestException(`${ImmichHeader.AssetData} must be a valid structured dictionary`); throw new BadRequestException(`${ImmichHeader.AssetData} must be a valid structured dictionary`);
} }
}) })
@ -156,4 +152,4 @@ export class GetUploadStatusDto extends BaseRufhHeadersDto {}
export class UploadOkDto { export class UploadOkDto {
id!: string; id!: string;
} }

View file

@ -14,6 +14,7 @@ from
left join "smart_search" on "asset"."id" = "smart_search"."assetId" left join "smart_search" on "asset"."id" = "smart_search"."assetId"
where where
"asset"."id" = $1::uuid "asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit limit
$2 $2
@ -40,6 +41,7 @@ from
"asset" "asset"
where where
"asset"."id" = $1::uuid "asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit limit
$2 $2
@ -52,6 +54,7 @@ from
"asset" "asset"
where where
"asset"."id" = $1::uuid "asset"."id" = $1::uuid
and "asset"."status" != 'partial'
limit limit
$2 $2
@ -78,7 +81,8 @@ from
"asset" "asset"
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id" inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
where where
"asset"."deletedAt" is null "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."visibility" != $1 and "asset"."visibility" != $1
and ( and (
"asset_job_status"."previewAt" is null "asset_job_status"."previewAt" is null
@ -110,6 +114,7 @@ from
"asset" "asset"
where where
"asset"."id" = $1 "asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForGenerateThumbnailJob -- AssetJobRepository.getForGenerateThumbnailJob
select select
@ -141,6 +146,7 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId" inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where where
"asset"."id" = $1 "asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getForMetadataExtraction -- AssetJobRepository.getForMetadataExtraction
select select
@ -178,6 +184,7 @@ from
"asset" "asset"
where where
"asset"."id" = $1 "asset"."id" = $1
and "asset"."status" != 'partial'
-- AssetJobRepository.getAlbumThumbnailFiles -- AssetJobRepository.getAlbumThumbnailFiles
select select
@ -198,7 +205,8 @@ from
inner join "smart_search" on "asset"."id" = "smart_search"."assetId" inner join "smart_search" on "asset"."id" = "smart_search"."assetId"
inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "asset"."id" inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "asset"."id"
where where
"asset"."deletedAt" is null "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."visibility" in ('archive', 'timeline') and "asset"."visibility" in ('archive', 'timeline')
and "job_status"."duplicatesDetectedAt" is null and "job_status"."duplicatesDetectedAt" is null
@ -210,6 +218,7 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id" inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where where
"asset"."visibility" != $1 "asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null and "job_status"."previewAt" is not null
and not exists ( and not exists (
@ -244,6 +253,7 @@ from
"asset" "asset"
where where
"asset"."id" = $2 "asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForDetectFacesJob -- AssetJobRepository.getForDetectFacesJob
select select
@ -284,6 +294,7 @@ from
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId" inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where where
"asset"."id" = $2 "asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.getForOcr -- AssetJobRepository.getForOcr
select select
@ -385,6 +396,7 @@ from
) as "stacked_assets" on "stack"."id" is not null ) as "stacked_assets" on "stack"."id" is not null
where where
"asset"."id" = $2 "asset"."id" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForVideoConversion -- AssetJobRepository.streamForVideoConversion
select select
@ -398,6 +410,7 @@ where
or "asset"."encodedVideoPath" = $2 or "asset"."encodedVideoPath" = $2
) )
and "asset"."visibility" != $3 and "asset"."visibility" != $3
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null and "asset"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion -- AssetJobRepository.getForVideoConversion
@ -411,6 +424,7 @@ from
where where
"asset"."id" = $1 "asset"."id" = $1
and "asset"."type" = $2 and "asset"."type" = $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForMetadataExtraction -- AssetJobRepository.streamForMetadataExtraction
select select
@ -423,6 +437,7 @@ where
"asset_job_status"."metadataExtractedAt" is null "asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null or "asset_job_status"."assetId" is null
) )
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null and "asset"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob -- AssetJobRepository.getForStorageTemplateJob
@ -443,7 +458,8 @@ from
"asset" "asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId" inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where where
"asset"."deletedAt" is null "asset"."status" != 'partial'
and "asset"."deletedAt" is null
and "asset"."id" = $1 and "asset"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob -- AssetJobRepository.streamForStorageTemplateJob
@ -464,7 +480,8 @@ from
"asset" "asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId" inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where where
"asset"."deletedAt" is null "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.streamForDeletedJob -- AssetJobRepository.streamForDeletedJob
select select
@ -474,6 +491,7 @@ from
"asset" "asset"
where where
"asset"."deletedAt" <= $1 "asset"."deletedAt" <= $1
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForSidecar -- AssetJobRepository.streamForSidecar
select select
@ -486,6 +504,7 @@ where
or "asset"."sidecarPath" is null or "asset"."sidecarPath" is null
) )
and "asset"."visibility" != $2 and "asset"."visibility" != $2
and "asset"."status" != 'partial'
-- AssetJobRepository.streamForDetectFacesJob -- AssetJobRepository.streamForDetectFacesJob
select select
@ -495,8 +514,10 @@ from
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id" inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
where where
"asset"."visibility" != $1 "asset"."visibility" != $1
and "asset"."status" != 'partial'
and "asset"."deletedAt" is null and "asset"."deletedAt" is null
and "job_status"."previewAt" is not null and "job_status"."previewAt" is not null
and "asset"."status" != 'partial'
order by order by
"asset"."fileCreatedAt" desc "asset"."fileCreatedAt" desc
@ -517,4 +538,14 @@ select
from from
"asset" "asset"
where where
"asset"."deletedAt" is null "asset"."status" != 'partial'
and "asset"."deletedAt" is null
-- AssetJobRepository.streamForPartialAssetCleanupJob
select
"id"
from
"asset"
where
"asset"."status" = 'partial'
and "asset"."createdAt" < $1

View file

@ -46,6 +46,56 @@ where
"assetId" = $1 "assetId" = $1
and "key" = $2 and "key" = $2
-- AssetRepository.getCompletionMetadata
select
"originalPath" as "path",
"status",
"fileModifiedAt",
"createdAt",
"checksum",
"fileSizeInByte" as "size"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"id" = $1
and "ownerId" = $2
-- AssetRepository.setComplete
update "asset"
set
"status" = $1,
"visibility" = $2
where
"id" = $3
and "status" = 'partial'
-- AssetRepository.removeAndDecrementQuota
with
"asset_exif" as (
select
"fileSizeInByte"
from
"asset_exif"
where
"assetId" = $1
),
"asset" as (
delete from "asset"
where
"id" = $2
returning
"ownerId"
)
update "user"
set
"quotaUsageInBytes" = "quotaUsageInBytes" - "fileSizeInByte"
from
"asset_exif",
"asset"
where
"user"."id" = "asset"."ownerId"
-- AssetRepository.getByDayOfYear -- AssetRepository.getByDayOfYear
with with
"res" as ( "res" as (
@ -258,7 +308,9 @@ where
-- AssetRepository.getUploadAssetIdByChecksum -- AssetRepository.getUploadAssetIdByChecksum
select select
"id" "id",
"status",
"createdAt"
from from
"asset" "asset"
where where

View file

@ -279,6 +279,7 @@ export class AssetRepository {
.execute(); .execute();
} }
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
getCompletionMetadata(assetId: string, ownerId: string) { getCompletionMetadata(assetId: string, ownerId: string) {
return this.db return this.db
.selectFrom('asset') .selectFrom('asset')
@ -289,6 +290,7 @@ export class AssetRepository {
.executeTakeFirst(); .executeTakeFirst();
} }
@GenerateSql({ params: [DummyValue.UUID] })
async setComplete(assetId: string) { async setComplete(assetId: string) {
await this.db await this.db
.updateTable('asset') .updateTable('asset')
@ -298,6 +300,7 @@ export class AssetRepository {
.execute(); .execute();
} }
@GenerateSql({ params: [DummyValue.UUID] })
async removeAndDecrementQuota(id: string): Promise<void> { async removeAndDecrementQuota(id: string): Promise<void> {
await this.db await this.db
.with('asset_exif', (qb) => qb.selectFrom('asset_exif').where('assetId', '=', id).select('fileSizeInByte')) .with('asset_exif', (qb) => qb.selectFrom('asset_exif').where('assetId', '=', id).select('fileSizeInByte'))

View file

@ -160,7 +160,7 @@ export class StorageRepository {
} }
} }
mkdir(filepath: string): Promise<String | undefined> { mkdir(filepath: string): Promise<string | undefined> {
return fs.mkdir(filepath, { recursive: true }); return fs.mkdir(filepath, { recursive: true });
} }

View file

@ -4,6 +4,6 @@ export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TYPE "assets_status_enum" ADD VALUE IF NOT EXISTS 'partial'`.execute(db); await sql`ALTER TYPE "assets_status_enum" ADD VALUE IF NOT EXISTS 'partial'`.execute(db);
} }
export async function down(db: Kysely<any>): Promise<void> { export async function down(): Promise<void> {
// Cannot remove enum values in PostgreSQL // Cannot remove enum values in PostgreSQL
} }

View file

@ -24,7 +24,7 @@ describe(AssetUploadService.name, () => {
fileCreatedAt: new Date('2025-01-01T00:00:00Z'), fileCreatedAt: new Date('2025-01-01T00:00:00Z'),
fileModifiedAt: new Date('2025-01-01T12:00:00Z'), fileModifiedAt: new Date('2025-01-01T12:00:00Z'),
isFavorite: false, isFavorite: false,
iCloudId: '' iCloudId: '',
}, },
checksum: Buffer.from('checksum'), checksum: Buffer.from('checksum'),
uploadLength: 1024, uploadLength: 1024,
@ -167,6 +167,7 @@ describe(AssetUploadService.name, () => {
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT; (checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
mocks.asset.createWithMetadata.mockRejectedValue(checksumError); mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(undefined); mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(undefined);
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException); await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
@ -197,29 +198,6 @@ describe(AssetUploadService.name, () => {
]); ]);
}); });
it('should include duration for video assets', async () => {
const videoDto = {
...mockDto,
assetData: {
...mockDto.assetData,
filename: 'video.mp4',
duration: '00:05:30',
},
};
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
await sut.onStart(authStub.user1, videoDto);
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
expect.objectContaining({
duration: '00:05:30',
}),
expect.anything(),
undefined,
);
});
it('should set isFavorite when true', async () => { it('should set isFavorite when true', async () => {
const favoriteDto = { const favoriteDto = {
...mockDto, ...mockDto,
@ -327,6 +305,7 @@ describe(AssetUploadService.name, () => {
const staleAssets = [{ id: factory.uuid() }, { id: factory.uuid() }, { id: factory.uuid() }]; const staleAssets = [{ id: factory.uuid() }, { id: factory.uuid() }, { id: factory.uuid() }];
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue( mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () { (async function* () {
for (const asset of staleAssets) { for (const asset of staleAssets) {
yield asset; yield asset;
@ -339,16 +318,17 @@ describe(AssetUploadService.name, () => {
expect(mocks.assetJob.streamForPartialAssetCleanupJob).toHaveBeenCalledWith(expect.any(Date)); expect(mocks.assetJob.streamForPartialAssetCleanupJob).toHaveBeenCalledWith(expect.any(Date));
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.PartialAssetCleanup, data: staleAssets[0] }, { name: JobName.PartialAssetCleanup, data: staleAssets[0] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[1] }, { name: JobName.PartialAssetCleanup, data: staleAssets[1] },
{ name: JobName.PartialAssetCleanup, data: staleAssets[2] }, { name: JobName.PartialAssetCleanup, data: staleAssets[2] },
]); ]);
}); });
it('should batch cleanup jobs', async () => { it('should batch cleanup jobs', async () => {
const assets = Array.from({ length: 1500 }, () => ({ id: factory.uuid() })); const assets = Array.from({ length: 1500 }, () => ({ id: factory.uuid() }));
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue( mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
// eslint-disable-next-line @typescript-eslint/require-await
(async function* () { (async function* () {
for (const asset of assets) { for (const asset of assets) {
yield asset; yield asset;
@ -376,6 +356,7 @@ describe(AssetUploadService.name, () => {
const path = `/upload/${assetId}/file.jpg`; const path = `/upload/${assetId}/file.jpg`;
it('should skip if asset not found', async () => { it('should skip if asset not found', async () => {
// eslint-disable-next-line unicorn/no-useless-undefined
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue(undefined); mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue(undefined);
const result = await sut.removeStaleUpload({ id: assetId }); const result = await sut.removeStaleUpload({ id: assetId });

View file

@ -36,7 +36,7 @@ export class AssetUploadService extends BaseService {
const asset = await this.onStart(auth, dto); const asset = await this.onStart(auth, dto);
if (asset.isDuplicate) { if (asset.isDuplicate) {
if (asset.status !== AssetStatus.Partial) { if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res); return this.sendAlreadyCompleted(res);
} }
const location = `/api/upload/${asset.id}`; const location = `/api/upload/${asset.id}`;
@ -49,7 +49,7 @@ export class AssetUploadService extends BaseService {
} }
if (isComplete && uploadLength !== contentLength) { if (isComplete && uploadLength !== contentLength) {
return this.sendInconsistentLengthProblem(res); return this.sendInconsistentLength(res);
} }
const location = `/api/upload/${asset.id}`; const location = `/api/upload/${asset.id}`;
@ -66,25 +66,19 @@ export class AssetUploadService extends BaseService {
req.on('data', (data: Buffer) => hash.update(data)); req.on('data', (data: Buffer) => hash.update(data));
writeStream.on('finish', () => (checksumBuffer = hash.digest())); writeStream.on('finish', () => (checksumBuffer = hash.digest()));
} }
await new Promise((resolve, reject) => writeStream.on('finish', resolve).on('close', reject));
this.setCompleteHeader(res, dto.version, isComplete);
if (!isComplete) {
res.status(201).set('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
return;
}
this.logger.log(`Finished upload to ${asset.path}`);
if (dto.checksum.compare(checksumBuffer!) !== 0) {
return await this.sendChecksumMismatch(res, asset.id, asset.path);
}
writeStream.on('finish', () => { await this.onComplete(metadata);
this.setCompleteHeader(res, dto.version, isComplete); res.status(200).send({ id: asset.id });
if (!isComplete) {
return res.status(201).set('Location', location).setHeader('Upload-Limit', 'min-size=0').send();
}
this.logger.log(`Finished upload to ${asset.path}`);
if (dto.checksum.compare(checksumBuffer!) !== 0) {
return this.sendChecksumMismatchResponse(res, asset.id, asset.path);
}
this.onComplete(metadata)
.then(() => res.status(200).send({ id: asset.id }))
.catch((error) => {
this.logger.error(`Failed to complete upload for ${asset.id}: ${error.message}`);
res.status(500).send();
});
});
await new Promise((resolve) => writeStream.on('close', resolve));
} }
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> { resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
@ -100,16 +94,16 @@ export class AssetUploadService extends BaseService {
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData; const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
if (status !== AssetStatus.Partial) { if (status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res); return this.sendAlreadyCompleted(res);
} }
if (uploadLength && size && size !== uploadLength) { if (uploadLength && size && size !== uploadLength) {
return this.sendInconsistentLengthProblem(res); return this.sendInconsistentLength(res);
} }
const expectedOffset = await this.getCurrentOffset(path); const expectedOffset = await this.getCurrentOffset(path);
if (expectedOffset !== uploadOffset) { if (expectedOffset !== uploadOffset) {
return this.sendOffsetMismatchProblem(res, expectedOffset, uploadOffset); return this.sendOffsetMismatch(res, expectedOffset, uploadOffset);
} }
const newLength = uploadOffset + contentLength; const newLength = uploadOffset + contentLength;
@ -123,28 +117,29 @@ export class AssetUploadService extends BaseService {
return; return;
} }
const metadata = { id, path, size: contentLength, fileModifiedAt: fileModifiedAt }; const metadata = { id, path, size: contentLength, fileModifiedAt };
const writeStream = this.pipe(req, res, metadata); const writeStream = this.pipe(req, res, metadata);
writeStream.on('finish', async () => { await new Promise((resolve, reject) => writeStream.on('finish', resolve).on('close', reject));
this.setCompleteHeader(res, version, isComplete); this.setCompleteHeader(res, version, isComplete);
const currentOffset = await this.getCurrentOffset(path); if (!isComplete) {
if (!isComplete) {
return res.status(204).setHeader('Upload-Offset', currentOffset.toString()).send();
}
this.logger.log(`Finished upload to ${path}`);
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return this.sendChecksumMismatchResponse(res, id, path);
}
try { try {
await this.onComplete(metadata); const offset = await this.getCurrentOffset(path);
} finally { res.status(204).setHeader('Upload-Offset', offset.toString()).send();
res.status(200).send({ id }); } catch {
this.logger.error(`Failed to get current offset for ${path} after write`);
res.status(500).send();
} }
}); return;
await new Promise((resolve) => writeStream.on('close', resolve)); }
this.logger.log(`Finished upload to ${path}`);
const checksum = await this.cryptoRepository.hashFile(path);
if (providedChecksum.compare(checksum) !== 0) {
return await this.sendChecksumMismatch(res, id, path);
}
await this.onComplete(metadata);
res.status(200).send({ id });
}); });
} }
@ -156,7 +151,7 @@ export class AssetUploadService extends BaseService {
return; return;
} }
if (asset.status !== AssetStatus.Partial) { if (asset.status !== AssetStatus.Partial) {
return this.sendAlreadyCompletedProblem(res); return this.sendAlreadyCompleted(res);
} }
await this.onCancel(assetId, asset.path); await this.onCancel(assetId, asset.path);
res.status(204).send(); res.status(204).send();
@ -250,9 +245,8 @@ export class AssetUploadService extends BaseService {
fileCreatedAt: assetData.fileCreatedAt, fileCreatedAt: assetData.fileCreatedAt,
fileModifiedAt: assetData.fileModifiedAt, fileModifiedAt: assetData.fileModifiedAt,
localDateTime: assetData.fileCreatedAt, localDateTime: assetData.fileCreatedAt,
type: type, type,
isFavorite: assetData.isFavorite, isFavorite: assetData.isFavorite,
duration: assetData.duration || null,
visibility: AssetVisibility.Hidden, visibility: AssetVisibility.Hidden,
originalFileName: assetData.filename, originalFileName: assetData.filename,
status: AssetStatus.Partial, status: AssetStatus.Partial,
@ -280,7 +274,7 @@ export class AssetUploadService extends BaseService {
async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) { async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
this.logger.debug('Completing upload for asset', id); this.logger.debug('Completing upload for asset', id);
const jobData = { name: JobName.AssetExtractMetadata, data: { id: id, source: 'upload' } } as const; const jobData = { name: JobName.AssetExtractMetadata, data: { id, source: 'upload' } } as const;
await withRetry(() => this.assetRepository.setComplete(id)); await withRetry(() => this.assetRepository.setComplete(id));
try { try {
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt)); await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
@ -317,9 +311,10 @@ export class AssetUploadService extends BaseService {
if (receivedLength + data.length > size) { if (receivedLength + data.length > size) {
writeStream.destroy(); writeStream.destroy();
req.destroy(); req.destroy();
return this.onCancel(id, path).finally(() => void this.onCancel(id, path)
res.status(400).send('Received more data than specified in content-length'), .catch((error: any) => this.logger.error(`Failed to remove ${id} after too much data: ${error.message}`))
); .finally(() => res.status(400).send('Received more data than specified in content-length'));
return;
} }
receivedLength += data.length; receivedLength += data.length;
if (!writeStream.write(data)) { if (!writeStream.write(data)) {
@ -333,9 +328,9 @@ export class AssetUploadService extends BaseService {
return writeStream.end(); return writeStream.end();
} }
writeStream.destroy(); writeStream.destroy();
this.onCancel(id, path).finally(() => void this.onCancel(id, path)
res.status(400).send(`Received ${receivedLength} bytes when expecting ${size}`), .catch((error: any) => this.logger.error(`Failed to remove ${id} after unexpected length: ${error.message}`))
); .finally(() => res.status(400).send(`Received ${receivedLength} bytes when expecting ${size}`));
}); });
return writeStream; return writeStream;
@ -353,21 +348,21 @@ export class AssetUploadService extends BaseService {
} }
} }
private sendInconsistentLengthProblem(res: Response): void { private sendInconsistentLength(res: Response): void {
res.status(400).contentType('application/problem+json').send({ res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length', type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
title: 'inconsistent length values for upload', title: 'inconsistent length values for upload',
}); });
} }
private sendAlreadyCompletedProblem(res: Response): void { private sendAlreadyCompleted(res: Response): void {
res.status(400).contentType('application/problem+json').send({ res.status(400).contentType('application/problem+json').send({
type: 'https://iana.org/assignments/http-problem-types#completed-upload', type: 'https://iana.org/assignments/http-problem-types#completed-upload',
title: 'upload is already completed', title: 'upload is already completed',
}); });
} }
private sendOffsetMismatchProblem(res: Response, expected: number, actual: number): void { private sendOffsetMismatch(res: Response, expected: number, actual: number): void {
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({ res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset', type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
title: 'offset from request does not match offset of resource', title: 'offset from request does not match offset of resource',
@ -376,7 +371,7 @@ export class AssetUploadService extends BaseService {
}); });
} }
private sendChecksumMismatchResponse(res: Response, assetId: string, path: string): Promise<void> { private sendChecksumMismatch(res: Response, assetId: string, path: string) {
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`); this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
res.status(460).send('File on server does not match provided checksum'); res.status(460).send('File on server does not match provided checksum');
return this.onCancel(assetId, path); return this.onCancel(assetId, path);