feat(server): separate face clustering job (#5598)

* separate facial clustering job

* update api

* fixed some tests

* invert clustering

* hdbscan

* update api

* remove commented code

* wip dbscan

* cleanup

removed cluster endpoint

remove commented code

* fixes

updated tests

minor fixes and formatting

fixed queuing

refinements

* scale search range based on library size

* defer non-core faces

* optimizations

removed unused query option

* assign faces individually for correctness

fixed unit tests

remove unused method

* don't select face embedding

update sql

linting

fixed ml typing

* updated job mock

* paginate people query

* select face embeddings because typeorm

* fix setting face detection concurrency

* update sql

formatting

linting

* simplify logic

remove unused imports

* more specific delete signature

* more accurate typing for face stubs

* add migration

formatting

* chore: better typing

* don't select embedding by default

remove unused import

* updated sql

* use normal try/catch

* stricter concurrency typing and enforcement

* update api

* update job concurrency panel to show disabled queues

formatting

* check jobId in queueAll

fix tests

* remove outdated comment

* better facial recognition icon

* wording

wording

formatting

* fixed tests

* fix

* formatting & sql

* try to fix sql check

* more detailed description

* update sql

* formatting

* wording

* update `minFaces` description

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
Mert 2024-01-18 00:08:48 -05:00 committed by GitHub
parent 44873b4224
commit 68f52818ae
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
57 changed files with 1081 additions and 631 deletions

View file

@ -204,16 +204,20 @@ export class AuditService {
}
}
const people = await this.personRepository.getAll();
for (const { id, thumbnailPath } of people) {
track(thumbnailPath);
const entity = { entityId: id, entityType: PathEntityType.PERSON };
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.personRepository.getAll(pagination),
);
for await (const people of personPagination) {
for (const { id, thumbnailPath } of people) {
track(thumbnailPath);
const entity = { entityId: id, entityType: PathEntityType.PERSON };
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
}
}
}
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${people.length} people`);
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${people.length} people`);
}
const extras: string[] = [];
for (const file of allFiles) {

View file

@ -2,7 +2,8 @@ export enum QueueName {
THUMBNAIL_GENERATION = 'thumbnailGeneration',
METADATA_EXTRACTION = 'metadataExtraction',
VIDEO_CONVERSION = 'videoConversion',
RECOGNIZE_FACES = 'recognizeFaces',
FACE_DETECTION = 'faceDetection',
FACIAL_RECOGNITION = 'facialRecognition',
SMART_SEARCH = 'smartSearch',
BACKGROUND_TASK = 'backgroundTask',
STORAGE_TEMPLATE_MIGRATION = 'storageTemplateMigration',
@ -12,6 +13,11 @@ export enum QueueName {
LIBRARY = 'library',
}
export type ConcurrentQueueName = Exclude<
QueueName,
QueueName.STORAGE_TEMPLATE_MIGRATION | QueueName.FACIAL_RECOGNITION
>;
export enum JobCommand {
START = 'start',
PAUSE = 'pause',
@ -57,9 +63,10 @@ export enum JobName {
// facial recognition
PERSON_CLEANUP = 'person-cleanup',
PERSON_DELETE = 'person-delete',
QUEUE_RECOGNIZE_FACES = 'queue-recognize-faces',
RECOGNIZE_FACES = 'recognize-faces',
QUEUE_FACE_DETECTION = 'queue-face-detection',
FACE_DETECTION = 'face-detection',
QUEUE_FACIAL_RECOGNITION = 'queue-facial-recognition',
FACIAL_RECOGNITION = 'facial-recognition',
// library managment
LIBRARY_SCAN = 'library-refresh',
@ -95,7 +102,6 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.DELETE_FILES]: QueueName.BACKGROUND_TASK,
[JobName.CLEAN_OLD_AUDIT_LOGS]: QueueName.BACKGROUND_TASK,
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
[JobName.PERSON_DELETE]: QueueName.BACKGROUND_TASK,
[JobName.USER_SYNC_USAGE]: QueueName.BACKGROUND_TASK,
// conversion
@ -124,8 +130,10 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.MIGRATE_PERSON]: QueueName.MIGRATION,
// facial recognition
[JobName.QUEUE_RECOGNIZE_FACES]: QueueName.RECOGNIZE_FACES,
[JobName.RECOGNIZE_FACES]: QueueName.RECOGNIZE_FACES,
[JobName.QUEUE_FACE_DETECTION]: QueueName.FACE_DETECTION,
[JobName.FACE_DETECTION]: QueueName.FACE_DETECTION,
[JobName.QUEUE_FACIAL_RECOGNITION]: QueueName.FACIAL_RECOGNITION,
[JobName.FACIAL_RECOGNITION]: QueueName.FACIAL_RECOGNITION,
// clip
[JobName.QUEUE_ENCODE_CLIP]: QueueName.SMART_SEARCH,

View file

@ -75,7 +75,10 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
[QueueName.SEARCH]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.RECOGNIZE_FACES]!: JobStatusDto;
[QueueName.FACE_DETECTION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.FACIAL_RECOGNITION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.SIDECAR]!: JobStatusDto;

View file

@ -35,3 +35,7 @@ export interface ISidecarWriteJob extends IEntityJob {
latitude?: number;
longitude?: number;
}
export interface IDeferrableJob extends IEntityJob {
deferred?: boolean;
}

View file

@ -104,7 +104,8 @@ describe(JobService.name, () => {
[QueueName.MIGRATION]: expectedJobStatus,
[QueueName.THUMBNAIL_GENERATION]: expectedJobStatus,
[QueueName.VIDEO_CONVERSION]: expectedJobStatus,
[QueueName.RECOGNIZE_FACES]: expectedJobStatus,
[QueueName.FACE_DETECTION]: expectedJobStatus,
[QueueName.FACIAL_RECOGNITION]: expectedJobStatus,
[QueueName.SIDECAR]: expectedJobStatus,
[QueueName.LIBRARY]: expectedJobStatus,
});
@ -189,12 +190,20 @@ describe(JobService.name, () => {
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } });
});
it('should handle a start recognize faces command', async () => {
it('should handle a start face detection command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.RECOGNIZE_FACES, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_RECOGNIZE_FACES, data: { force: false } });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACE_DETECTION, data: { force: false } });
});
it('should handle a start facial recognition command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } });
});
it('should throw a bad request when an invalid queue is used', async () => {
@ -224,7 +233,7 @@ describe(JobService.name, () => {
[QueueName.BACKGROUND_TASK]: { concurrency: 10 },
[QueueName.SMART_SEARCH]: { concurrency: 10 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 10 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 10 },
[QueueName.FACE_DETECTION]: { concurrency: 10 },
[QueueName.SEARCH]: { concurrency: 10 },
[QueueName.SIDECAR]: { concurrency: 10 },
[QueueName.LIBRARY]: { concurrency: 10 },
@ -237,7 +246,7 @@ describe(JobService.name, () => {
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.BACKGROUND_TASK, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SMART_SEARCH, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.RECOGNIZE_FACES, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.FACE_DETECTION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SIDECAR, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.LIBRARY, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.MIGRATION, 10);
@ -280,7 +289,7 @@ describe(JobService.name, () => {
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.RECOGNIZE_FACES,
JobName.FACE_DETECTION,
],
},
{
@ -289,7 +298,7 @@ describe(JobService.name, () => {
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.RECOGNIZE_FACES,
JobName.FACE_DETECTION,
JobName.VIDEO_CONVERSION,
],
},
@ -299,7 +308,7 @@ describe(JobService.name, () => {
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.RECOGNIZE_FACES,
JobName.FACE_DETECTION,
JobName.VIDEO_CONVERSION,
],
},
@ -308,7 +317,11 @@ describe(JobService.name, () => {
jobs: [],
},
{
item: { name: JobName.RECOGNIZE_FACES, data: { id: 'asset-1' } },
item: { name: JobName.FACE_DETECTION, data: { id: 'asset-1' } },
jobs: [JobName.QUEUE_FACIAL_RECOGNITION],
},
{
item: { name: JobName.FACIAL_RECOGNITION, data: { id: 'asset-1' } },
jobs: [],
},
];
@ -355,7 +368,12 @@ describe(JobService.name, () => {
configKey: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED,
},
{
queue: QueueName.RECOGNIZE_FACES,
queue: QueueName.FACE_DETECTION,
feature: FeatureFlag.FACIAL_RECOGNITION,
configKey: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_ENABLED,
},
{
queue: QueueName.FACIAL_RECOGNITION,
feature: FeatureFlag.FACIAL_RECOGNITION,
configKey: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_ENABLED,
},

View file

@ -14,7 +14,7 @@ import {
QueueCleanType,
} from '../repositories';
import { FeatureFlag, SystemConfigCore } from '../system-config/system-config.core';
import { JobCommand, JobName, QueueName } from './job.constants';
import { ConcurrentQueueName, JobCommand, JobName, QueueName } from './job.constants';
import { AllJobStatusResponseDto, JobCommandDto, JobStatusDto } from './job.dto';
@Injectable()
@ -108,9 +108,13 @@ export class JobService {
case QueueName.THUMBNAIL_GENERATION:
return this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force } });
case QueueName.RECOGNIZE_FACES:
case QueueName.FACE_DETECTION:
await this.configCore.requireFeature(FeatureFlag.FACIAL_RECOGNITION);
return this.jobRepository.queue({ name: JobName.QUEUE_RECOGNIZE_FACES, data: { force } });
return this.jobRepository.queue({ name: JobName.QUEUE_FACE_DETECTION, data: { force } });
case QueueName.FACIAL_RECOGNITION:
await this.configCore.requireFeature(FeatureFlag.FACIAL_RECOGNITION);
return this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force } });
case QueueName.LIBRARY:
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force } });
@ -124,7 +128,8 @@ export class JobService {
const config = await this.configCore.getConfig();
for (const queueName of Object.values(QueueName)) {
let concurrency = 1;
if (queueName !== QueueName.STORAGE_TEMPLATE_MIGRATION) {
if (this.isConcurrentQueue(queueName)) {
concurrency = config.job[queueName].concurrency;
}
@ -145,10 +150,10 @@ export class JobService {
}
this.configCore.config$.subscribe((config) => {
this.logger.log(`Updating queue concurrency settings`);
this.logger.debug(`Updating queue concurrency settings`);
for (const queueName of Object.values(QueueName)) {
let concurrency = 1;
if (queueName !== QueueName.STORAGE_TEMPLATE_MIGRATION) {
if (this.isConcurrentQueue(queueName)) {
concurrency = config.job[queueName].concurrency;
}
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
@ -157,6 +162,10 @@ export class JobService {
});
}
private isConcurrentQueue(name: QueueName): name is ConcurrentQueueName {
return ![QueueName.FACIAL_RECOGNITION, QueueName.STORAGE_TEMPLATE_MIGRATION].includes(name);
}
async handleNightlyJobs() {
await this.jobRepository.queueAll([
{ name: JobName.ASSET_DELETION_CHECK },
@ -217,7 +226,7 @@ export class JobService {
{ name: JobName.GENERATE_WEBP_THUMBNAIL, data: item.data },
{ name: JobName.GENERATE_THUMBHASH_THUMBNAIL, data: item.data },
{ name: JobName.ENCODE_CLIP, data: item.data },
{ name: JobName.RECOGNIZE_FACES, data: item.data },
{ name: JobName.FACE_DETECTION, data: item.data },
];
const [asset] = await this.assetRepository.getByIds([item.data.id]);
@ -244,6 +253,12 @@ export class JobService {
if (asset && asset.isVisible) {
this.communicationRepository.send(ClientEvent.UPLOAD_SUCCESS, asset.ownerId, mapAsset(asset));
}
break;
}
case JobName.FACE_DETECTION: {
await this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: item.data });
break;
}
}
}

View file

@ -70,7 +70,10 @@ describe(MediaService.name, () => {
items: [assetStub.image],
hasNextPage: false,
});
personMock.getAll.mockResolvedValue([personStub.newThumbnail]);
personMock.getAll.mockResolvedValue({
items: [personStub.newThumbnail],
hasNextPage: false,
});
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
await sut.handleQueueGenerateThumbnails({ force: true });
@ -84,8 +87,7 @@ describe(MediaService.name, () => {
},
]);
expect(personMock.getAll).toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).not.toHaveBeenCalled();
expect(personMock.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, {});
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.GENERATE_PERSON_THUMBNAIL,
@ -99,7 +101,10 @@ describe(MediaService.name, () => {
items: [assetStub.image],
hasNextPage: false,
});
personMock.getAllWithoutThumbnail.mockResolvedValue([personStub.noThumbnail]);
personMock.getAll.mockResolvedValue({
items: [personStub.noThumbnail],
hasNextPage: false,
});
personMock.getRandomFace.mockResolvedValue(faceStub.face1);
await sut.handleQueueGenerateThumbnails({ force: false });
@ -107,8 +112,7 @@ describe(MediaService.name, () => {
expect(assetMock.getAll).not.toHaveBeenCalled();
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.THUMBNAIL);
expect(personMock.getAll).not.toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled();
expect(personMock.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { where: { thumbnailPath: '' } });
expect(personMock.getRandomFace).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
@ -125,7 +129,10 @@ describe(MediaService.name, () => {
items: [assetStub.noResizePath],
hasNextPage: false,
});
personMock.getAllWithoutThumbnail.mockResolvedValue([]);
personMock.getAll.mockResolvedValue({
items: [],
hasNextPage: false,
});
await sut.handleQueueGenerateThumbnails({ force: false });
@ -138,8 +145,7 @@ describe(MediaService.name, () => {
},
]);
expect(personMock.getAll).not.toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled();
expect(personMock.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { where: { thumbnailPath: '' } });
});
it('should queue all assets with missing webp path', async () => {
@ -147,7 +153,10 @@ describe(MediaService.name, () => {
items: [assetStub.noWebpPath],
hasNextPage: false,
});
personMock.getAllWithoutThumbnail.mockResolvedValue([]);
personMock.getAll.mockResolvedValue({
items: [],
hasNextPage: false,
});
await sut.handleQueueGenerateThumbnails({ force: false });
@ -160,8 +169,7 @@ describe(MediaService.name, () => {
},
]);
expect(personMock.getAll).not.toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled();
expect(personMock.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { where: { thumbnailPath: '' } });
});
it('should queue all assets with missing thumbhash', async () => {
@ -169,7 +177,10 @@ describe(MediaService.name, () => {
items: [assetStub.noThumbhash],
hasNextPage: false,
});
personMock.getAllWithoutThumbnail.mockResolvedValue([]);
personMock.getAll.mockResolvedValue({
items: [],
hasNextPage: false,
});
await sut.handleQueueGenerateThumbnails({ force: false });
@ -182,8 +193,7 @@ describe(MediaService.name, () => {
},
]);
expect(personMock.getAll).not.toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled();
expect(personMock.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { where: { thumbnailPath: '' } });
});
});
@ -394,7 +404,10 @@ describe(MediaService.name, () => {
items: [assetStub.video],
hasNextPage: false,
});
personMock.getAll.mockResolvedValue([]);
personMock.getAll.mockResolvedValue({
items: [],
hasNextPage: false,
});
await sut.handleQueueVideoConversion({ force: true });

View file

@ -93,20 +93,24 @@ export class MediaService {
await this.jobRepository.queueAll(jobs);
}
const people = force ? await this.personRepository.getAll() : await this.personRepository.getAllWithoutThumbnail();
const jobs: JobItem[] = [];
for (const person of people) {
if (!person.faceAssetId) {
const face = await this.personRepository.getRandomFace(person.id);
if (!face) {
continue;
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.personRepository.getAll(pagination, { where: force ? undefined : { thumbnailPath: '' } }),
);
for await (const people of personPagination) {
for (const person of people) {
if (!person.faceAssetId) {
const face = await this.personRepository.getRandomFace(person.id);
if (!face) {
continue;
}
await this.personRepository.update({ id: person.id, faceAssetId: face.assetId });
}
await this.personRepository.update({ id: person.id, faceAssetId: face.assetId });
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
}
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
}
await this.jobRepository.queueAll(jobs);
@ -131,11 +135,16 @@ export class MediaService {
);
}
const people = await this.personRepository.getAll();
await this.jobRepository.queueAll(
people.map((person) => ({ name: JobName.MIGRATE_PERSON, data: { id: person.id } })),
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.personRepository.getAll(pagination),
);
for await (const people of personPagination) {
await this.jobRepository.queueAll(
people.map((person) => ({ name: JobName.MIGRATE_PERSON, data: { id: person.id } })),
);
}
return true;
}

View file

@ -18,10 +18,12 @@ import {
newSystemConfigRepositoryMock,
personStub,
} from '@test';
import { IsNull } from 'typeorm';
import { BulkIdErrorReason } from '../asset';
import { CacheControl, ImmichFileResponse } from '../domain.util';
import { JobName } from '../job';
import {
FaceSearchResult,
IAssetRepository,
ICryptoRepository,
IJobRepository,
@ -120,7 +122,7 @@ describe(PersonService.name, () => {
people: [responseDto],
});
expect(personMock.getAllForUser).toHaveBeenCalledWith(authStub.admin.user.id, {
minimumFaceCount: 1,
minimumFaceCount: 3,
withHidden: false,
});
});
@ -132,7 +134,7 @@ describe(PersonService.name, () => {
people: [responseDto],
});
expect(personMock.getAllForUser).toHaveBeenCalledWith(authStub.admin.user.id, {
minimumFaceCount: 1,
minimumFaceCount: 3,
withHidden: false,
});
});
@ -153,7 +155,7 @@ describe(PersonService.name, () => {
],
});
expect(personMock.getAllForUser).toHaveBeenCalledWith(authStub.admin.user.id, {
minimumFaceCount: 1,
minimumFaceCount: 3,
withHidden: true,
});
});
@ -516,51 +518,22 @@ describe(PersonService.name, () => {
});
});
describe('handlePersonDelete', () => {
it('should stop if a person has not be found', async () => {
personMock.getById.mockResolvedValue(null);
await expect(sut.handlePersonDelete({ id: 'person-1' })).resolves.toBe(false);
expect(personMock.update).not.toHaveBeenCalled();
expect(storageMock.unlink).not.toHaveBeenCalled();
});
it('should delete a person', async () => {
personMock.getById.mockResolvedValue(personStub.primaryPerson);
await expect(sut.handlePersonDelete({ id: 'person-1' })).resolves.toBe(true);
expect(personMock.delete).toHaveBeenCalledWith(personStub.primaryPerson);
expect(storageMock.unlink).toHaveBeenCalledWith(personStub.primaryPerson.thumbnailPath);
});
});
describe('handlePersonDelete', () => {
it('should delete person', async () => {
personMock.getById.mockResolvedValue(personStub.withName);
await sut.handlePersonDelete({ id: personStub.withName.id });
expect(personMock.delete).toHaveBeenCalledWith(personStub.withName);
expect(storageMock.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
});
});
describe('handlePersonCleanup', () => {
it('should delete people without faces', async () => {
personMock.getAllWithoutFaces.mockResolvedValue([personStub.noName]);
await sut.handlePersonCleanup();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.PERSON_DELETE, data: { id: personStub.noName.id } },
]);
expect(personMock.delete).toHaveBeenCalledWith([personStub.noName]);
expect(storageMock.unlink).toHaveBeenCalledWith(personStub.noName.thumbnailPath);
});
});
describe('handleQueueRecognizeFaces', () => {
describe('handleQueueDetectFaces', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
await expect(sut.handleQueueDetectFaces({})).resolves.toBe(true);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
@ -571,12 +544,13 @@ describe(PersonService.name, () => {
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({});
await sut.handleQueueDetectFaces({});
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.RECOGNIZE_FACES,
name: JobName.FACE_DETECTION,
data: { id: assetStub.image.id },
},
]);
@ -587,39 +561,133 @@ describe(PersonService.name, () => {
items: [assetStub.image],
hasNextPage: false,
});
personMock.getAll.mockResolvedValue([personStub.withName]);
personMock.deleteAll.mockResolvedValue(5);
personMock.getAll.mockResolvedValue({
items: [personStub.withName],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({ force: true });
await sut.handleQueueDetectFaces({ force: true });
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.RECOGNIZE_FACES,
name: JobName.FACE_DETECTION,
data: { id: assetStub.image.id },
},
]);
});
it('should delete existing people and faces if forced', async () => {
personMock.getAll.mockResolvedValue({
items: [faceStub.face1.person],
hasNextPage: false,
});
personMock.getAllFaces.mockResolvedValue({
items: [faceStub.face1],
hasNextPage: false,
});
assetMock.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueDetectFaces({ force: true });
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.PERSON_DELETE,
data: { id: personStub.withName.id },
name: JobName.FACE_DETECTION,
data: { id: assetStub.image.id },
},
]);
expect(personMock.delete).toHaveBeenCalledWith([faceStub.face1.person]);
expect(storageMock.unlink).toHaveBeenCalledWith(faceStub.face1.person.thumbnailPath);
});
});
describe('handleRecognizeFaces', () => {
describe('handleQueueRecognizeFaces', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleRecognizeFaces({ id: 'foo' })).resolves.toBe(true);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should queue missing assets', async () => {
personMock.getAllFaces.mockResolvedValue({
items: [faceStub.face1],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({});
expect(personMock.getAllFaces).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { where: { personId: IsNull() } });
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
data: { id: faceStub.face1.id, deferred: false },
},
]);
});
it('should queue all assets', async () => {
personMock.getAll.mockResolvedValue({
items: [],
hasNextPage: false,
});
personMock.getAllFaces.mockResolvedValue({
items: [faceStub.face1],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({ force: true });
expect(personMock.getAllFaces).toHaveBeenCalledWith({ skip: 0, take: 1000 }, {});
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
data: { id: faceStub.face1.id, deferred: false },
},
]);
});
it('should delete existing people and faces if forced', async () => {
personMock.getAll.mockResolvedValue({
items: [faceStub.face1.person],
hasNextPage: false,
});
personMock.getAllFaces.mockResolvedValue({
items: [faceStub.face1],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({ force: true });
expect(personMock.getAllFaces).toHaveBeenCalledWith({ skip: 0, take: 1000 }, {});
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
data: { id: faceStub.face1.id, deferred: false },
},
]);
expect(personMock.delete).toHaveBeenCalledWith([faceStub.face1.person]);
expect(storageMock.unlink).toHaveBeenCalledWith(faceStub.face1.person.thumbnailPath);
});
});
describe('handleDetectFaces', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(true);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should skip when no resize path', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
await sut.handleRecognizeFaces({ id: assetStub.noResizePath.id });
await sut.handleDetectFaces({ id: assetStub.noResizePath.id });
expect(machineLearningMock.detectFaces).not.toHaveBeenCalled();
});
@ -636,7 +704,7 @@ describe(PersonService.name, () => {
],
},
]);
await sut.handleRecognizeFaces({ id: assetStub.noResizePath.id });
await sut.handleDetectFaces({ id: assetStub.noResizePath.id });
expect(machineLearningMock.detectFaces).not.toHaveBeenCalled();
});
@ -645,7 +713,7 @@ describe(PersonService.name, () => {
machineLearningMock.detectFaces.mockResolvedValue([]);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith(
'http://immich-machine-learning:3003',
{
@ -655,7 +723,7 @@ describe(PersonService.name, () => {
enabled: true,
maxDistance: 0.6,
minScore: 0.7,
minFaces: 1,
minFaces: 3,
modelName: 'buffalo_l',
},
);
@ -670,37 +738,13 @@ describe(PersonService.name, () => {
expect(assetMock.upsertJobStatus.mock.calls[0][0].facesRecognizedAt?.getTime()).toBeGreaterThan(start);
});
it('should match existing people', async () => {
it('should create a face with no person', async () => {
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
smartInfoMock.searchFaces.mockResolvedValue([faceStub.face1]);
smartInfoMock.searchFaces.mockResolvedValue([{ face: faceStub.face1, distance: 0.7 }]);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.createFace).toHaveBeenCalledWith({
personId: 'person-1',
assetId: 'asset-id',
embedding: [1, 2, 3, 4],
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
});
});
it('should create a new person', async () => {
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
smartInfoMock.searchFaces.mockResolvedValue([]);
personMock.create.mockResolvedValue(personStub.noName);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
personMock.createFace.mockResolvedValue(faceStub.primaryFace1);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
expect(personMock.create).toHaveBeenCalledWith({ ownerId: assetStub.image.ownerId });
expect(personMock.createFace).toHaveBeenCalledWith({
personId: 'person-1',
assetId: 'asset-id',
embedding: [1, 2, 3, 4],
boundingBoxX1: 100,
@ -710,8 +754,130 @@ describe(PersonService.name, () => {
imageHeight: 500,
imageWidth: 400,
});
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
});
describe('handleRecognizeFaces', () => {
it('should return false if face does not exist', async () => {
personMock.getFaceByIdWithAssets.mockResolvedValue(null);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(false);
expect(personMock.reassignFaces).not.toHaveBeenCalled();
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.createFace).not.toHaveBeenCalled();
});
it('should return true if face already has an assigned person', async () => {
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.face1);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(true);
expect(personMock.reassignFaces).not.toHaveBeenCalled();
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.createFace).not.toHaveBeenCalled();
});
it('should match existing person', async () => {
if (!faceStub.primaryFace1.person) {
throw new Error('faceStub.primaryFace1.person is null');
}
const faces = [
{ face: faceStub.noPerson1, distance: 0.0 },
{ face: faceStub.primaryFace1, distance: 0.2 },
{ face: faceStub.noPerson2, distance: 0.3 },
{ face: faceStub.face1, distance: 0.4 },
] as FaceSearchResult[];
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_MIN_FACES, value: 1 },
]);
smartInfoMock.searchFaces.mockResolvedValue(faces);
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.noPerson1);
personMock.create.mockResolvedValue(faceStub.primaryFace1.person);
await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id });
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.reassignFaces).toHaveBeenCalledTimes(1);
expect(personMock.reassignFaces).toHaveBeenCalledWith({
faceIds: expect.arrayContaining([faceStub.noPerson1.id]),
newPersonId: faceStub.primaryFace1.person.id,
});
expect(personMock.reassignFaces).toHaveBeenCalledWith({
faceIds: expect.not.arrayContaining([faceStub.face1.id]),
newPersonId: faceStub.primaryFace1.person.id,
});
});
it('should create a new person if the face is a core point with no person', async () => {
const faces = [
{ face: faceStub.noPerson1, distance: 0.0 },
{ face: faceStub.noPerson2, distance: 0.3 },
] as FaceSearchResult[];
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_MIN_FACES, value: 1 },
]);
smartInfoMock.searchFaces.mockResolvedValue(faces);
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.noPerson1);
personMock.create.mockResolvedValue(personStub.withName);
await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id });
expect(personMock.create).toHaveBeenCalledWith({
ownerId: faceStub.noPerson1.asset.ownerId,
faceAssetId: faceStub.noPerson1.id,
});
expect(personMock.reassignFaces).toHaveBeenCalledWith({
faceIds: [faceStub.noPerson1.id],
newPersonId: personStub.withName.id,
});
});
it('should defer non-core faces to end of queue', async () => {
const faces = [{ face: faceStub.noPerson1, distance: 0.0 }] as FaceSearchResult[];
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_MIN_FACES, value: 2 },
]);
smartInfoMock.searchFaces.mockResolvedValue(faces);
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.noPerson1);
personMock.create.mockResolvedValue(personStub.withName);
await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id });
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.FACIAL_RECOGNITION,
data: { id: faceStub.noPerson1.id, deferred: true },
});
expect(smartInfoMock.searchFaces).toHaveBeenCalledTimes(1);
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
it('should not assign person to non-core face with no matching person', async () => {
const faces = [{ face: faceStub.noPerson1, distance: 0.0 }] as FaceSearchResult[];
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_MIN_FACES, value: 2 },
]);
smartInfoMock.searchFaces.mockResolvedValueOnce(faces).mockResolvedValueOnce([]);
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.noPerson1);
personMock.create.mockResolvedValue(personStub.withName);
await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id, deferred: true });
expect(jobMock.queue).not.toHaveBeenCalled();
expect(smartInfoMock.searchFaces).toHaveBeenCalledTimes(2);
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
});
describe('handleGeneratePersonThumbnail', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
@ -822,7 +988,6 @@ describe(PersonService.name, () => {
it('should require person.write and person.merge permission', async () => {
personMock.getById.mockResolvedValueOnce(personStub.primaryPerson);
personMock.getById.mockResolvedValueOnce(personStub.mergePerson);
personMock.delete.mockResolvedValue(personStub.mergePerson);
await expect(sut.mergePerson(authStub.admin, 'person-1', { ids: ['person-2'] })).rejects.toBeInstanceOf(
BadRequestException,
@ -837,7 +1002,6 @@ describe(PersonService.name, () => {
it('should merge two people without smart merge', async () => {
personMock.getById.mockResolvedValueOnce(personStub.primaryPerson);
personMock.getById.mockResolvedValueOnce(personStub.mergePerson);
personMock.delete.mockResolvedValue(personStub.mergePerson);
accessMock.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
accessMock.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-2']));
@ -852,17 +1016,12 @@ describe(PersonService.name, () => {
expect(personMock.update).not.toHaveBeenCalled();
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.PERSON_DELETE,
data: { id: personStub.mergePerson.id },
});
expect(accessMock.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
});
it('should merge two people with smart merge', async () => {
personMock.getById.mockResolvedValueOnce(personStub.randomPerson);
personMock.getById.mockResolvedValueOnce(personStub.primaryPerson);
personMock.delete.mockResolvedValue(personStub.primaryPerson);
personMock.update.mockResolvedValue({ ...personStub.randomPerson, name: personStub.primaryPerson.name });
accessMock.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-3']));
accessMock.person.checkOwnerAccess.mockResolvedValueOnce(new Set(['person-1']));
@ -881,10 +1040,7 @@ describe(PersonService.name, () => {
name: personStub.primaryPerson.name,
});
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.PERSON_DELETE,
data: { id: personStub.primaryPerson.id },
});
expect(personMock.delete).toHaveBeenCalledWith([personStub.primaryPerson]);
expect(accessMock.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
});
@ -954,7 +1110,7 @@ describe(PersonService.name, () => {
boundingBoxX2: 1,
boundingBoxY1: 0,
boundingBoxY2: 1,
id: 'assetFaceId',
id: faceStub.face1.id,
imageHeight: 1024,
imageWidth: 1024,
person: mapPerson(personStub.withName),

View file

@ -2,12 +2,13 @@ import { PersonEntity } from '@app/infra/entities';
import { PersonPathType } from '@app/infra/entities/move.entity';
import { ImmichLogger } from '@app/infra/logger';
import { BadRequestException, Inject, Injectable, NotFoundException } from '@nestjs/common';
import { IsNull } from 'typeorm';
import { AccessCore, Permission } from '../access';
import { AssetResponseDto, BulkIdErrorReason, BulkIdResponseDto, mapAsset } from '../asset';
import { AuthDto } from '../auth';
import { mimeTypes } from '../domain.constant';
import { CacheControl, ImmichFileResponse, usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
import { IBaseJob, IDeferrableJob, IEntityJob, JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from '../job';
import { FACE_THUMBNAIL_SIZE } from '../media';
import {
CropOptions,
@ -249,64 +250,63 @@ export class PersonService {
return results;
}
async handlePersonDelete({ id }: IEntityJob) {
const person = await this.repository.getById(id);
if (!person) {
return false;
}
private async delete(people: PersonEntity[]) {
await Promise.all(people.map((person) => this.storageRepository.unlink(person.thumbnailPath)));
await this.repository.delete(people);
this.logger.debug(`Deleted ${people.length} people`);
}
try {
await this.repository.delete(person);
await this.storageRepository.unlink(person.thumbnailPath);
} catch (error: Error | any) {
this.logger.error(`Unable to delete person: ${error}`, error?.stack);
}
private async deleteAllPeople() {
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.repository.getAll(pagination),
);
return true;
for await (const people of personPagination) {
await this.delete(people); // deletes thumbnails too
}
}
async handlePersonCleanup() {
const people = await this.repository.getAllWithoutFaces();
for (const person of people) {
this.logger.debug(`Person ${person.name || person.id} no longer has any faces, deleting.`);
}
await this.jobRepository.queueAll(
people.map((person) => ({ name: JobName.PERSON_DELETE, data: { id: person.id } })),
);
await this.delete(people);
return true;
}
async handleQueueRecognizeFaces({ force }: IBaseJob) {
async handleQueueDetectFaces({ force }: IBaseJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
if (force) {
await this.deleteAllPeople();
await this.repository.deleteAllFaces();
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { order: 'DESC' })
? this.assetRepository.getAll(pagination, {
order: 'DESC',
withFaces: true,
withPeople: false,
withSmartInfo: false,
withSmartSearch: false,
withExif: false,
withStacked: false,
})
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
});
if (force) {
const people = await this.repository.getAll();
await this.jobRepository.queueAll(
people.map((person) => ({ name: JobName.PERSON_DELETE, data: { id: person.id } })),
);
this.logger.debug(`Deleted ${people.length} people`);
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.RECOGNIZE_FACES, data: { id: asset.id } })),
assets.map((asset) => ({ name: JobName.FACE_DETECTION, data: { id: asset.id } })),
);
}
return true;
}
async handleRecognizeFaces({ id }: IEntityJob) {
async handleDetectFaces({ id }: IEntityJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
@ -315,7 +315,7 @@ export class PersonService {
const relations = {
exifInfo: true,
faces: {
person: true,
person: false,
},
};
const [asset] = await this.assetRepository.getByIds([id], relations);
@ -332,38 +332,19 @@ export class PersonService {
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `vector(${face.embedding.length})` })));
for (const { embedding, ...rest } of faces) {
const matches = await this.smartInfoRepository.searchFaces({
userIds: [asset.ownerId],
embedding,
numResults: 1,
maxDistance: machineLearning.facialRecognition.maxDistance,
});
let personId = matches[0]?.personId || null;
let newPerson: PersonEntity | null = null;
if (!personId) {
this.logger.debug('No matches, creating a new person.');
newPerson = await this.repository.create({ ownerId: asset.ownerId });
personId = newPerson.id;
}
const face = await this.repository.createFace({
for (const face of faces) {
const mappedFace = {
assetId: asset.id,
personId,
embedding,
imageHeight: rest.imageHeight,
imageWidth: rest.imageWidth,
boundingBoxX1: rest.boundingBox.x1,
boundingBoxX2: rest.boundingBox.x2,
boundingBoxY1: rest.boundingBox.y1,
boundingBoxY2: rest.boundingBox.y2,
});
embedding: face.embedding,
imageHeight: face.imageHeight,
imageWidth: face.imageWidth,
boundingBoxX1: face.boundingBox.x1,
boundingBoxX2: face.boundingBox.x2,
boundingBoxY1: face.boundingBox.y1,
boundingBoxY2: face.boundingBox.y2,
};
if (newPerson) {
await this.repository.update({ id: personId, faceAssetId: face.id });
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
}
await this.repository.createFace(mappedFace);
}
await this.assetRepository.upsertJobStatus({
@ -374,6 +355,98 @@ export class PersonService {
return true;
}
async handleQueueRecognizeFaces({ force }: IBaseJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
if (force) {
await this.deleteAllPeople();
}
const facePagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.repository.getAllFaces(pagination, { where: force ? undefined : { personId: IsNull() } }),
);
for await (const page of facePagination) {
await this.jobRepository.queueAll(
page.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id, deferred: false } })),
);
}
return true;
}
async handleRecognizeFaces({ id, deferred }: IDeferrableJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
const face = await this.repository.getFaceByIdWithAssets(
id,
{ person: true, asset: true },
{ id: true, personId: true, embedding: true },
);
if (!face) {
this.logger.warn(`Face ${id} not found`);
return false;
}
if (face.personId) {
this.logger.debug(`Face ${id} already has a person assigned`);
return true;
}
const matches = await this.smartInfoRepository.searchFaces({
userIds: [face.asset.ownerId],
embedding: face.embedding,
maxDistance: machineLearning.facialRecognition.maxDistance,
numResults: machineLearning.facialRecognition.minFaces,
});
this.logger.debug(`Face ${id} has ${matches.length} match${matches.length != 1 ? 'es' : ''}`);
const isCore = matches.length >= machineLearning.facialRecognition.minFaces;
if (!isCore && !deferred) {
this.logger.debug(`Deferring non-core face ${id} for later processing`);
await this.jobRepository.queue({ name: JobName.FACIAL_RECOGNITION, data: { id, deferred: true } });
return true;
}
let personId = matches.find((match) => match.face.personId)?.face.personId; // `matches` also includes the face itself
if (!personId) {
const matchWithPerson = await this.smartInfoRepository.searchFaces({
userIds: [face.asset.ownerId],
embedding: face.embedding,
maxDistance: machineLearning.facialRecognition.maxDistance,
numResults: 1,
hasPerson: true,
});
if (matchWithPerson.length > 0) {
personId = matchWithPerson[0].face.personId;
}
}
if (isCore && !personId) {
this.logger.log(`Creating new person for face ${id}`);
const newPerson = await this.repository.create({ ownerId: face.asset.ownerId, faceAssetId: face.id });
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
personId = newPerson.id;
}
if (personId) {
this.logger.debug(`Assigning face ${id} to person ${personId}`);
await this.repository.reassignFaces({ faceIds: [id], newPersonId: personId });
}
return true;
}
async handlePersonMigration({ id }: IEntityJob) {
const person = await this.repository.getById(id);
if (!person) {
@ -499,7 +572,7 @@ export class PersonService {
this.logger.log(`Merging ${mergeName} into ${primaryName}`);
await this.repository.reassignFaces(mergeData);
await this.jobRepository.queue({ name: JobName.PERSON_DELETE, data: { id: mergePerson.id } });
await this.delete([mergePerson]);
this.logger.log(`Merged ${mergeName} into ${primaryName}`);
results.push({ id: mergeId, success: true });

View file

@ -1,6 +1,6 @@
import { SearchExploreItem } from '@app/domain';
import { AssetEntity, AssetJobStatusEntity, AssetType, ExifEntity } from '@app/infra/entities';
import { FindOptionsRelations } from 'typeorm';
import { FindOptionsRelations, FindOptionsSelect } from 'typeorm';
import { Paginated, PaginationOptions } from '../domain.util';
export type AssetStats = Record<AssetType, number>;
@ -33,6 +33,9 @@ export interface AssetSearchOptions {
withStacked?: boolean;
withExif?: boolean;
withPeople?: boolean;
withSmartInfo?: boolean;
withSmartSearch?: boolean;
withFaces?: boolean;
createdBefore?: Date;
createdAfter?: Date;
@ -93,6 +96,7 @@ export enum WithoutProperty {
CLIP_ENCODING = 'clip-embedding',
OBJECT_TAGS = 'object-tags',
FACES = 'faces',
PERSON = 'person',
SIDECAR = 'sidecar',
}
@ -168,7 +172,11 @@ export const IAssetRepository = 'IAssetRepository';
export interface IAssetRepository {
create(asset: AssetCreate): Promise<AssetEntity>;
getByDate(ownerId: string, date: Date): Promise<AssetEntity[]>;
getByIds(ids: string[], relations?: FindOptionsRelations<AssetEntity>): Promise<AssetEntity[]>;
getByIds(
ids: string[],
relations?: FindOptionsRelations<AssetEntity>,
select?: FindOptionsSelect<AssetEntity>,
): Promise<AssetEntity[]>;
getByDayOfYear(ownerId: string, monthDay: MonthDay): Promise<AssetEntity[]>;
getByChecksum(userId: string, checksum: Buffer): Promise<AssetEntity | null>;
getByAlbumId(pagination: PaginationOptions, albumId: string): Paginated<AssetEntity>;

View file

@ -3,6 +3,7 @@ import { JobName, QueueName } from '../job/job.constants';
import {
IAssetDeletionJob,
IBaseJob,
IDeferrableJob,
IDeleteFilesJob,
IEntityJob,
ILibraryFileJob,
@ -63,11 +64,12 @@ export type JobItem =
| { name: JobName.SIDECAR_SYNC; data: IEntityJob }
| { name: JobName.SIDECAR_WRITE; data: ISidecarWriteJob }
// Recognize Faces
| { name: JobName.QUEUE_RECOGNIZE_FACES; data: IBaseJob }
| { name: JobName.RECOGNIZE_FACES; data: IEntityJob }
// Facial Recognition
| { name: JobName.QUEUE_FACE_DETECTION; data: IBaseJob }
| { name: JobName.FACE_DETECTION; data: IEntityJob }
| { name: JobName.QUEUE_FACIAL_RECOGNITION; data: IBaseJob }
| { name: JobName.FACIAL_RECOGNITION; data: IDeferrableJob }
| { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob }
| { name: JobName.PERSON_DELETE; data: IEntityJob }
// Clip Embedding
| { name: JobName.QUEUE_ENCODE_CLIP; data: IBaseJob }
@ -111,4 +113,5 @@ export interface IJobRepository {
clear(name: QueueName, type: QueueCleanType): Promise<string[]>;
getQueueStatus(name: QueueName): Promise<QueueStatus>;
getJobCounts(name: QueueName): Promise<JobCounts>;
waitForQueueCompletion(...queues: QueueName[]): Promise<void>;
}

View file

@ -1,4 +1,6 @@
import { AssetEntity, AssetFaceEntity, PersonEntity } from '@app/infra/entities';
import { FindManyOptions, FindOptionsRelations, FindOptionsSelect } from 'typeorm';
import { Paginated, PaginationOptions } from '../domain.util';
export const IPersonRepository = 'IPersonRepository';
@ -17,7 +19,8 @@ export interface AssetFaceId {
}
export interface UpdateFacesData {
oldPersonId: string;
oldPersonId?: string;
faceIds?: string[];
newPersonId: string;
}
@ -26,8 +29,7 @@ export interface PersonStatistics {
}
export interface IPersonRepository {
getAll(): Promise<PersonEntity[]>;
getAllWithoutThumbnail(): Promise<PersonEntity[]>;
getAll(pagination: PaginationOptions, options?: FindManyOptions<PersonEntity>): Paginated<PersonEntity>;
getAllForUser(userId: string, options: PersonSearchOptions): Promise<PersonEntity[]>;
getAllWithoutFaces(): Promise<PersonEntity[]>;
getById(personId: string): Promise<PersonEntity | null>;
@ -35,19 +37,23 @@ export interface IPersonRepository {
getAssets(personId: string): Promise<AssetEntity[]>;
reassignFaces(data: UpdateFacesData): Promise<number>;
create(entity: Partial<PersonEntity>): Promise<PersonEntity>;
update(entity: Partial<PersonEntity>): Promise<PersonEntity>;
delete(entity: PersonEntity): Promise<PersonEntity | null>;
deleteAll(): Promise<number>;
getStatistics(personId: string): Promise<PersonStatistics>;
getAllFaces(): Promise<AssetFaceEntity[]>;
createFace(entity: Partial<AssetFaceEntity>): Promise<void>;
delete(entities: PersonEntity[]): Promise<void>;
deleteAll(): Promise<void>;
deleteAllFaces(): Promise<void>;
getAllFaces(pagination: PaginationOptions, options?: FindManyOptions<AssetFaceEntity>): Paginated<AssetFaceEntity>;
getFaceById(id: string): Promise<AssetFaceEntity>;
getFaceByIdWithAssets(
id: string,
relations?: FindOptionsRelations<AssetFaceEntity>,
select?: FindOptionsSelect<AssetFaceEntity>,
): Promise<AssetFaceEntity | null>;
getFaces(assetId: string): Promise<AssetFaceEntity[]>;
getFacesByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]>;
getRandomFace(personId: string): Promise<AssetFaceEntity | null>;
createFace(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity>;
getFaces(assetId: string): Promise<AssetFaceEntity[]>;
getStatistics(personId: string): Promise<PersonStatistics>;
reassignFace(assetFaceId: string, newPersonId: string): Promise<number>;
getFaceById(id: string): Promise<AssetFaceEntity>;
getFaceByIdWithAssets(id: string): Promise<AssetFaceEntity | null>;
reassignFaces(data: UpdateFacesData): Promise<number>;
update(entity: Partial<PersonEntity>): Promise<PersonEntity>;
}

View file

@ -7,14 +7,23 @@ export type Embedding = number[];
export interface EmbeddingSearch {
userIds: string[];
embedding: Embedding;
numResults: number;
maxDistance?: number;
numResults?: number;
withArchived?: boolean;
}
export interface FaceEmbeddingSearch extends EmbeddingSearch {
maxDistance?: number;
hasPerson?: boolean;
}
export interface FaceSearchResult {
face: AssetFaceEntity;
distance: number;
}
export interface ISmartInfoRepository {
init(modelName: string): Promise<void>;
searchCLIP(search: EmbeddingSearch): Promise<AssetEntity[]>;
searchFaces(search: EmbeddingSearch): Promise<AssetFaceEntity[]>;
searchFaces(search: FaceEmbeddingSearch): Promise<FaceSearchResult[]>;
upsert(smartInfo: Partial<SmartInfoEntity>, embedding?: Embedding): Promise<void>;
}

View file

@ -30,14 +30,14 @@ export class RecognitionConfig extends ModelConfig {
@Min(0)
@Max(1)
@Type(() => Number)
@ApiProperty({ type: 'integer' })
@ApiProperty({ type: 'number', format: 'float' })
minScore!: number;
@IsNumber()
@Min(0)
@Max(2)
@Type(() => Number)
@ApiProperty({ type: 'integer' })
@ApiProperty({ type: 'number', format: 'float' })
maxDistance!: number;
@IsNumber()

View file

@ -1,6 +1,5 @@
import { ImmichLogger } from '@app/infra/logger';
import { Inject, Injectable } from '@nestjs/common';
import { setTimeout } from 'timers/promises';
import { usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from '../job';
import {
@ -34,13 +33,7 @@ export class SmartInfoService {
async init() {
await this.jobRepository.pause(QueueName.SMART_SEARCH);
let { isActive } = await this.jobRepository.getQueueStatus(QueueName.SMART_SEARCH);
while (isActive) {
this.logger.verbose('Waiting for CLIP encoding queue to stop...');
await setTimeout(1000).then(async () => {
({ isActive } = await this.jobRepository.getQueueStatus(QueueName.SMART_SEARCH));
});
}
await this.jobRepository.waitForQueueCompletion(QueueName.SMART_SEARCH);
const { machineLearning } = await this.configCore.getConfig();

View file

@ -1,7 +1,7 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsInt, IsObject, IsPositive, ValidateNested } from 'class-validator';
import { QueueName } from '../../job';
import { ConcurrentQueueName, QueueName } from '../../job';
export class JobSettingsDto {
@IsInt()
@ -10,9 +10,7 @@ export class JobSettingsDto {
concurrency!: number;
}
export class SystemConfigJobDto
implements Record<Exclude<QueueName, QueueName.STORAGE_TEMPLATE_MIGRATION>, JobSettingsDto>
{
export class SystemConfigJobDto implements Record<ConcurrentQueueName, JobSettingsDto> {
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()
@IsObject()
@ -59,7 +57,7 @@ export class SystemConfigJobDto
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.RECOGNIZE_FACES]!: JobSettingsDto;
[QueueName.FACE_DETECTION]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()

View file

@ -49,7 +49,7 @@ export const defaults = Object.freeze<SystemConfig>({
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },
[QueueName.FACE_DETECTION]: { concurrency: 2 },
[QueueName.SEARCH]: { concurrency: 5 },
[QueueName.SIDECAR]: { concurrency: 5 },
[QueueName.LIBRARY]: { concurrency: 5 },
@ -73,7 +73,7 @@ export const defaults = Object.freeze<SystemConfig>({
modelName: 'buffalo_l',
minScore: 0.7,
maxDistance: 0.6,
minFaces: 1,
minFaces: 3,
},
},
map: {

View file

@ -30,7 +30,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },
[QueueName.FACE_DETECTION]: { concurrency: 2 },
[QueueName.SEARCH]: { concurrency: 5 },
[QueueName.SIDECAR]: { concurrency: 5 },
[QueueName.LIBRARY]: { concurrency: 5 },
@ -73,7 +73,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
modelName: 'buffalo_l',
minScore: 0.7,
maxDistance: 0.6,
minFaces: 1,
minFaces: 3,
},
},
map: {

View file

@ -15,7 +15,7 @@ export class AssetFaceEntity {
personId!: string | null;
@Index('face_index', { synchronize: false })
@Column({ type: 'float4', array: true, select: false })
@Column({ type: 'float4', array: true, select: false, transformer: { from: (v) => JSON.parse(v), to: (v) => v } })
embedding!: number[];
@Column({ default: 0, type: 'int' })
@ -39,6 +39,10 @@ export class AssetFaceEntity {
@ManyToOne(() => AssetEntity, (asset) => asset.faces, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
asset!: AssetEntity;
@ManyToOne(() => PersonEntity, (person) => person.faces, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: true })
@ManyToOne(() => PersonEntity, (person) => person.faces, {
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
nullable: true,
})
person!: PersonEntity | null;
}

View file

@ -1,4 +1,4 @@
import { QueueName } from '@app/domain';
import { ConcurrentQueueName } from '@app/domain';
import { Column, Entity, PrimaryColumn } from 'typeorm';
@Entity('system_config')
@ -35,7 +35,7 @@ export enum SystemConfigKey {
JOB_THUMBNAIL_GENERATION_CONCURRENCY = 'job.thumbnailGeneration.concurrency',
JOB_METADATA_EXTRACTION_CONCURRENCY = 'job.metadataExtraction.concurrency',
JOB_VIDEO_CONVERSION_CONCURRENCY = 'job.videoConversion.concurrency',
JOB_RECOGNIZE_FACES_CONCURRENCY = 'job.recognizeFaces.concurrency',
JOB_FACE_DETECTION_CONCURRENCY = 'job.faceDetection.concurrency',
JOB_CLIP_ENCODING_CONCURRENCY = 'job.smartSearch.concurrency',
JOB_BACKGROUND_TASK_CONCURRENCY = 'job.backgroundTask.concurrency',
JOB_STORAGE_TEMPLATE_MIGRATION_CONCURRENCY = 'job.storageTemplateMigration.concurrency',
@ -176,7 +176,7 @@ export interface SystemConfig {
accel: TranscodeHWAccel;
tonemap: ToneMapping;
};
job: Record<Exclude<QueueName, QueueName.STORAGE_TEMPLATE_MIGRATION>, { concurrency: number }>;
job: Record<ConcurrentQueueName, { concurrency: number }>;
logging: {
enabled: boolean;
level: LogLevel;

View file

@ -1,6 +1,6 @@
import { Paginated, PaginationOptions } from '@app/domain';
import _ from 'lodash';
import { Between, FindOneOptions, LessThanOrEqual, MoreThanOrEqual, ObjectLiteral, Repository } from 'typeorm';
import { Between, FindManyOptions, LessThanOrEqual, MoreThanOrEqual, ObjectLiteral, Repository } from 'typeorm';
import { chunks, setUnion } from '../domain/domain.util';
import { DATABASE_PARAMETER_CHUNK_SIZE } from './infra.util';
@ -21,14 +21,19 @@ export function OptionalBetween<T>(from?: T, to?: T) {
export async function paginate<Entity extends ObjectLiteral>(
repository: Repository<Entity>,
paginationOptions: PaginationOptions,
searchOptions?: FindOneOptions<Entity>,
searchOptions?: FindManyOptions<Entity>,
): Paginated<Entity> {
const items = await repository.find({
...searchOptions,
// Take one more item to check if there's a next page
take: paginationOptions.take + 1,
skip: paginationOptions.skip,
});
const items = await repository.find(
_.omitBy(
{
...searchOptions,
// Take one more item to check if there's a next page
take: paginationOptions.take + 1,
skip: paginationOptions.skip,
},
_.isUndefined,
),
);
const hasNextPage = items.length > paginationOptions.take;
items.splice(paginationOptions.take);

View file

@ -0,0 +1,24 @@
import { MigrationInterface, QueryRunner } from "typeorm"
export class SetAssetFaceNullOnPersonDelete1704943345360 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "asset_faces"
DROP CONSTRAINT "FK_95ad7106dd7b484275443f580f9",
ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9"
FOREIGN KEY ("personId") REFERENCES "person"("id")
ON DELETE SET NULL ON UPDATE CASCADE
`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "asset_faces"
DROP CONSTRAINT "FK_95ad7106dd7b484275443f580f9",
ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9"
FOREIGN KEY ("personId") REFERENCES "person"("id")
ON DELETE CASCADE ON UPDATE CASCADE
`);
}
}

View file

@ -25,7 +25,18 @@ import { InjectRepository } from '@nestjs/typeorm';
import _ from 'lodash';
import { DateTime } from 'luxon';
import path from 'path';
import { And, Brackets, FindOptionsRelations, FindOptionsWhere, In, IsNull, LessThan, Not, Repository } from 'typeorm';
import {
And,
Brackets,
FindOptionsRelations,
FindOptionsSelect,
FindOptionsWhere,
In,
IsNull,
LessThan,
Not,
Repository,
} from 'typeorm';
import { AssetEntity, AssetJobStatusEntity, AssetType, ExifEntity, SmartInfoEntity } from '../entities';
import { DummyValue, GenerateSql } from '../infra.util';
import { Chunked, ChunkedArray, OptionalBetween, paginate } from '../infra.utils';
@ -103,6 +114,7 @@ export class AssetRepository implements IAssetRepository {
withExif: _withExif,
withStacked,
withPeople,
withSmartInfo,
order,
} = options;
@ -174,6 +186,10 @@ export class AssetRepository implements IAssetRepository {
builder.leftJoinAndSelect('asset.stack', 'stack');
}
if (withSmartInfo) {
builder.leftJoinAndSelect('asset.smartInfo', 'smartInfo');
}
if (withDeleted) {
builder.withDeleted();
}
@ -250,7 +266,11 @@ export class AssetRepository implements IAssetRepository {
@GenerateSql({ params: [[DummyValue.UUID]] })
@ChunkedArray()
getByIds(ids: string[], relations?: FindOptionsRelations<AssetEntity>): Promise<AssetEntity[]> {
getByIds(
ids: string[],
relations?: FindOptionsRelations<AssetEntity>,
select?: FindOptionsSelect<AssetEntity>,
): Promise<AssetEntity[]> {
if (!relations) {
relations = {
exifInfo: true,
@ -262,9 +282,11 @@ export class AssetRepository implements IAssetRepository {
stack: true,
};
}
return this.repository.find({
where: { id: In(ids) },
relations,
select,
withDeleted: true,
});
}
@ -325,12 +347,11 @@ export class AssetRepository implements IAssetRepository {
deletedAt: options.trashedBefore ? And(Not(IsNull()), LessThan(options.trashedBefore)) : undefined,
},
relations: {
exifInfo: true,
smartInfo: true,
tags: true,
faces: {
person: true,
},
exifInfo: options.withExif !== false,
smartInfo: options.withSmartInfo !== false,
tags: options.withSmartInfo !== false,
faces: options.withFaces !== false,
smartSearch: options.withSmartInfo === true,
},
withDeleted: options.withDeleted ?? !!options.trashedBefore,
order: {
@ -519,6 +540,20 @@ export class AssetRepository implements IAssetRepository {
};
break;
case WithoutProperty.PERSON:
relations = {
faces: true,
};
where = {
resizePath: Not(IsNull()),
isVisible: true,
faces: {
assetId: Not(IsNull()),
personId: IsNull(),
},
};
break;
case WithoutProperty.SIDECAR:
where = [
{ sidecarPath: IsNull(), isVisible: true },

View file

@ -64,7 +64,15 @@ export class FilesystemProvider implements IStorageRepository {
}
async unlink(file: string) {
await fs.unlink(file);
try {
await fs.unlink(file);
} catch (err) {
if ((err as NodeJS.ErrnoException)?.code === 'ENOENT') {
this.logger.warn(`File ${file} does not exist.`);
} else {
throw err;
}
}
}
stat = fs.stat;

View file

@ -15,6 +15,7 @@ import { ModuleRef } from '@nestjs/core';
import { SchedulerRegistry } from '@nestjs/schedule';
import { Job, JobsOptions, Processor, Queue, Worker, WorkerOptions } from 'bullmq';
import { CronJob, CronTime } from 'cron';
import { setTimeout } from 'timers/promises';
import { bullConfig } from '../infra.config';
@Injectable()
@ -121,26 +122,47 @@ export class JobRepository implements IJobRepository {
return;
}
const itemsByQueue = items.reduce<Record<string, JobItem[]>>((acc, item) => {
const promises = [];
const itemsByQueue = {} as Record<string, (JobItem & { data: any; options: JobsOptions | undefined })[]>;
for (const item of items) {
const queueName = JOBS_TO_QUEUE[item.name];
acc[queueName] = acc[queueName] || [];
acc[queueName].push(item);
return acc;
}, {});
for (const [queueName, items] of Object.entries(itemsByQueue)) {
const queue = this.getQueue(queueName as QueueName);
const jobs = items.map((item) => ({
const job = {
name: item.name,
data: (item as { data?: any })?.data || {},
data: item.data || {},
options: this.getJobOptions(item) || undefined,
}));
await queue.addBulk(jobs);
} as JobItem & { data: any; options: JobsOptions | undefined };
if (job.options?.jobId) {
// need to use add() instead of addBulk() for jobId deduplication
promises.push(this.getQueue(queueName).add(item.name, item.data, job.options));
} else {
itemsByQueue[queueName] = itemsByQueue[queueName] || [];
itemsByQueue[queueName].push(job);
}
}
for (const [queueName, jobs] of Object.entries(itemsByQueue)) {
const queue = this.getQueue(queueName as QueueName);
promises.push(queue.addBulk(jobs));
}
await Promise.all(promises);
}
async queue(item: JobItem): Promise<void> {
await this.queueAll([item]);
return this.queueAll([item]);
}
async waitForQueueCompletion(...queues: QueueName[]): Promise<void> {
let activeQueue: QueueStatus | undefined;
do {
const statuses = await Promise.all(queues.map((name) => this.getQueueStatus(name)));
activeQueue = statuses.find((status) => status.isActive);
} while (activeQueue);
{
this.logger.verbose(`Waiting for ${activeQueue} queue to stop...`);
await setTimeout(1000);
}
}
private getJobOptions(item: JobItem): JobsOptions | null {
@ -149,6 +171,8 @@ export class JobRepository implements IJobRepository {
return { jobId: item.data.id };
case JobName.GENERATE_PERSON_THUMBNAIL:
return { priority: 1 };
case JobName.QUEUE_FACIAL_RECOGNITION:
return { jobId: JobName.QUEUE_FACIAL_RECOGNITION };
default:
return null;

View file

@ -16,7 +16,7 @@ const errorPrefix = 'Machine learning request';
@Injectable()
export class MachineLearningRepository implements IMachineLearningRepository {
private async post<T>(url: string, input: TextModelInput | VisionModelInput, config: ModelConfig): Promise<T> {
private async predict<T>(url: string, input: TextModelInput | VisionModelInput, config: ModelConfig): Promise<T> {
const formData = await this.getFormData(input, config);
const res = await fetch(`${url}/predict`, { method: 'POST', body: formData }).catch((error: Error | any) => {
@ -31,11 +31,11 @@ export class MachineLearningRepository implements IMachineLearningRepository {
}
detectFaces(url: string, input: VisionModelInput, config: RecognitionConfig): Promise<DetectFaceResult[]> {
return this.post<DetectFaceResult[]>(url, input, { ...config, modelType: ModelType.FACIAL_RECOGNITION });
return this.predict<DetectFaceResult[]>(url, input, { ...config, modelType: ModelType.FACIAL_RECOGNITION });
}
encodeImage(url: string, input: VisionModelInput, config: CLIPConfig): Promise<number[]> {
return this.post<number[]>(url, input, {
return this.predict<number[]>(url, input, {
...config,
modelType: ModelType.CLIP,
mode: CLIPMode.VISION,
@ -43,7 +43,11 @@ export class MachineLearningRepository implements IMachineLearningRepository {
}
encodeText(url: string, input: TextModelInput, config: CLIPConfig): Promise<number[]> {
return this.post<number[]>(url, input, { ...config, modelType: ModelType.CLIP, mode: CLIPMode.TEXT } as CLIPConfig);
return this.predict<number[]>(url, input, {
...config,
modelType: ModelType.CLIP,
mode: CLIPMode.TEXT,
} as CLIPConfig);
}
async getFormData(input: TextModelInput | VisionModelInput, config: ModelConfig): Promise<FormData> {

View file

@ -1,16 +1,19 @@
import {
AssetFaceId,
IPersonRepository,
Paginated,
PaginationOptions,
PersonNameSearchOptions,
PersonSearchOptions,
PersonStatistics,
UpdateFacesData,
} from '@app/domain';
import { InjectRepository } from '@nestjs/typeorm';
import { In, Repository } from 'typeorm';
import _ from 'lodash';
import { FindManyOptions, FindOptionsRelations, FindOptionsSelect, In, Repository } from 'typeorm';
import { AssetEntity, AssetFaceEntity, PersonEntity } from '../entities';
import { DummyValue, GenerateSql } from '../infra.util';
import { Chunked, ChunkedArray, asVector } from '../infra.utils';
import { ChunkedArray, asVector, paginate } from '../infra.utils';
export class PersonRepository implements IPersonRepository {
constructor(
@ -19,64 +22,44 @@ export class PersonRepository implements IPersonRepository {
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
) {}
/**
* Before reassigning faces, delete potential key violations
*/
async prepareReassignFaces({ oldPersonId, newPersonId }: UpdateFacesData): Promise<string[]> {
const results = await this.assetFaceRepository
.createQueryBuilder('face')
.select('face."assetId"')
.where(`face."personId" IN (:...ids)`, { ids: [oldPersonId, newPersonId] })
.groupBy('face."assetId"')
.having('COUNT(face."personId") > 1')
.getRawMany();
const assetIds = results.map(({ assetId }) => assetId);
await this.deletePersonFromAssets(oldPersonId, assetIds);
return assetIds;
}
@Chunked({ paramIndex: 1 })
async deletePersonFromAssets(personId: string, assetIds: string[]): Promise<void> {
await this.assetFaceRepository.delete({ personId: personId, assetId: In(assetIds) });
}
@GenerateSql({ params: [{ oldPersonId: DummyValue.UUID, newPersonId: DummyValue.UUID }] })
async reassignFaces({ oldPersonId, newPersonId }: UpdateFacesData): Promise<number> {
async reassignFaces({ oldPersonId, faceIds, newPersonId }: UpdateFacesData): Promise<number> {
const result = await this.assetFaceRepository
.createQueryBuilder()
.update()
.set({ personId: newPersonId })
.where({ personId: oldPersonId })
.where(
_.omitBy(
{ personId: oldPersonId ? oldPersonId : undefined, id: faceIds ? In(faceIds) : undefined },
_.isUndefined,
),
)
.execute();
return result.affected ?? 0;
}
delete(entity: PersonEntity): Promise<PersonEntity | null> {
return this.personRepository.remove(entity);
async delete(entities: PersonEntity[]): Promise<void> {
await this.personRepository.remove(entities);
}
async deleteAll(): Promise<number> {
const people = await this.personRepository.find();
await this.personRepository.remove(people);
return people.length;
async deleteAll(): Promise<void> {
await this.personRepository.delete({});
}
@GenerateSql()
getAllFaces(): Promise<AssetFaceEntity[]> {
return this.assetFaceRepository.find({ relations: { asset: true }, withDeleted: true });
async deleteAllFaces(): Promise<void> {
await this.assetFaceRepository.delete({});
}
@GenerateSql()
getAll(): Promise<PersonEntity[]> {
return this.personRepository.find();
getAllFaces(
pagination: PaginationOptions,
options: FindManyOptions<AssetFaceEntity> = {},
): Paginated<AssetFaceEntity> {
return paginate(this.assetFaceRepository, pagination, options);
}
@GenerateSql()
getAllWithoutThumbnail(): Promise<PersonEntity[]> {
return this.personRepository.findBy({ thumbnailPath: '' });
getAll(pagination: PaginationOptions, options: FindManyOptions<PersonEntity> = {}): Paginated<PersonEntity> {
return paginate(this.personRepository, pagination, options);
}
@GenerateSql({ params: [DummyValue.UUID] })
@ -133,14 +116,25 @@ export class PersonRepository implements IPersonRepository {
}
@GenerateSql({ params: [DummyValue.UUID] })
getFaceByIdWithAssets(id: string): Promise<AssetFaceEntity | null> {
return this.assetFaceRepository.findOne({
where: { id },
relations: {
person: true,
asset: true,
},
});
getFaceByIdWithAssets(
id: string,
relations: FindOptionsRelations<AssetFaceEntity>,
select: FindOptionsSelect<AssetFaceEntity>,
): Promise<AssetFaceEntity | null> {
return this.assetFaceRepository.findOne(
_.omitBy(
{
where: { id },
relations: {
...relations,
person: true,
asset: true,
},
select,
},
_.isUndefined,
),
);
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
@ -221,15 +215,11 @@ export class PersonRepository implements IPersonRepository {
return this.personRepository.save(entity);
}
async createFace(entity: AssetFaceEntity): Promise<AssetFaceEntity> {
if (!entity.personId) {
throw new Error('Person ID is required to create a face');
}
async createFace(entity: AssetFaceEntity): Promise<void> {
if (!entity.embedding) {
throw new Error('Embedding is required to create a face');
}
await this.assetFaceRepository.insert({ ...entity, embedding: () => asVector(entity.embedding, true) });
return this.assetFaceRepository.findOneByOrFail({ assetId: entity.assetId, personId: entity.personId });
}
async update(entity: Partial<PersonEntity>): Promise<PersonEntity> {

View file

@ -1,4 +1,4 @@
import { Embedding, EmbeddingSearch, ISmartInfoRepository } from '@app/domain';
import { Embedding, EmbeddingSearch, FaceEmbeddingSearch, FaceSearchResult, ISmartInfoRepository } from '@app/domain';
import { getCLIPModelInfo } from '@app/domain/smart-info/smart-info.constant';
import { AssetEntity, AssetFaceEntity, SmartInfoEntity, SmartSearchEntity } from '@app/infra/entities';
import { ImmichLogger } from '@app/infra/logger';
@ -44,32 +44,33 @@ export class SmartInfoRepository implements ISmartInfoRepository {
params: [{ userIds: [DummyValue.UUID], embedding: Array.from({ length: 512 }, Math.random), numResults: 100 }],
})
async searchCLIP({ userIds, embedding, numResults, withArchived }: EmbeddingSearch): Promise<AssetEntity[]> {
if (!isValidInteger(numResults, { min: 1 })) {
throw new Error(`Invalid value for 'numResults': ${numResults}`);
}
let results: AssetEntity[] = [];
await this.assetRepository.manager.transaction(async (manager) => {
await manager.query(`SET LOCAL vectors.k = '${numResults}'`);
await manager.query(`SET LOCAL vectors.enable_prefilter = on`);
const query = manager
let query = manager
.createQueryBuilder(AssetEntity, 'a')
.innerJoin('a.smartSearch', 's')
.leftJoinAndSelect('a.exifInfo', 'e')
.where('a.ownerId IN (:...userIds )')
.andWhere('a.isVisible = true');
.orderBy('s.embedding <=> :embedding')
.setParameters({ userIds, embedding: asVector(embedding) });
if (!withArchived) {
query.andWhere('a.isArchived = false');
}
query.andWhere('a.isVisible = true').andWhere('a.fileCreatedAt < NOW()');
results = await query
.andWhere('a.fileCreatedAt < NOW()')
.leftJoinAndSelect('a.exifInfo', 'e')
.orderBy('s.embedding <=> :embedding')
.setParameters({ userIds, embedding: asVector(embedding) })
.limit(numResults)
.getMany();
if (numResults) {
if (!isValidInteger(numResults, { min: 1 })) {
throw new Error(`Invalid value for 'numResults': ${numResults}`);
}
query = query.limit(numResults);
await manager.query(`SET LOCAL vectors.k = '${numResults}'`);
}
results = await query.getMany();
});
return results;
@ -85,22 +86,38 @@ export class SmartInfoRepository implements ISmartInfoRepository {
},
],
})
async searchFaces({ userIds, embedding, numResults, maxDistance }: EmbeddingSearch): Promise<AssetFaceEntity[]> {
if (!isValidInteger(numResults, { min: 1 })) {
throw new Error(`Invalid value for 'numResults': ${numResults}`);
}
let results: AssetFaceEntity[] = [];
async searchFaces({
userIds,
embedding,
numResults,
maxDistance,
hasPerson,
}: FaceEmbeddingSearch): Promise<FaceSearchResult[]> {
let results: Array<AssetFaceEntity & { distance: number }> = [];
await this.assetRepository.manager.transaction(async (manager) => {
await manager.query(`SET LOCAL vectors.k = '${numResults}'`);
const cte = manager
await manager.query(`SET LOCAL vectors.enable_prefilter = on`);
let cte = manager
.createQueryBuilder(AssetFaceEntity, 'faces')
.select('1 + (faces.embedding <=> :embedding)', 'distance')
.innerJoin('faces.asset', 'asset')
.where('asset.ownerId IN (:...userIds )')
.orderBy('1 + (faces.embedding <=> :embedding)')
.setParameters({ userIds, embedding: asVector(embedding) })
.limit(numResults);
.setParameters({ userIds, embedding: asVector(embedding) });
if (numResults) {
if (!isValidInteger(numResults, { min: 1 })) {
throw new Error(`Invalid value for 'numResults': ${numResults}`);
}
cte = cte.limit(numResults);
if (numResults > 64) {
// setting k too low messes with prefilter recall
await manager.query(`SET LOCAL vectors.k = '${numResults}'`);
}
}
if (hasPerson) {
cte = cte.andWhere('faces."personId" IS NOT NULL');
}
this.faceColumns.forEach((col) => cte.addSelect(`faces.${col}`, col));
@ -113,7 +130,10 @@ export class SmartInfoRepository implements ISmartInfoRepository {
.getRawMany();
});
return this.assetFaceRepository.create(results);
return results.map((row) => ({
face: this.assetFaceRepository.create(row),
distance: row.distance,
}));
}
async upsert(smartInfo: Partial<SmartInfoEntity>, embedding?: Embedding): Promise<void> {

View file

@ -7,80 +7,6 @@ SET
WHERE
"personId" = $2
-- PersonRepository.getAllFaces
SELECT
"AssetFaceEntity"."id" AS "AssetFaceEntity_id",
"AssetFaceEntity"."assetId" AS "AssetFaceEntity_assetId",
"AssetFaceEntity"."personId" AS "AssetFaceEntity_personId",
"AssetFaceEntity"."imageWidth" AS "AssetFaceEntity_imageWidth",
"AssetFaceEntity"."imageHeight" AS "AssetFaceEntity_imageHeight",
"AssetFaceEntity"."boundingBoxX1" AS "AssetFaceEntity_boundingBoxX1",
"AssetFaceEntity"."boundingBoxY1" AS "AssetFaceEntity_boundingBoxY1",
"AssetFaceEntity"."boundingBoxX2" AS "AssetFaceEntity_boundingBoxX2",
"AssetFaceEntity"."boundingBoxY2" AS "AssetFaceEntity_boundingBoxY2",
"AssetFaceEntity__AssetFaceEntity_asset"."id" AS "AssetFaceEntity__AssetFaceEntity_asset_id",
"AssetFaceEntity__AssetFaceEntity_asset"."deviceAssetId" AS "AssetFaceEntity__AssetFaceEntity_asset_deviceAssetId",
"AssetFaceEntity__AssetFaceEntity_asset"."ownerId" AS "AssetFaceEntity__AssetFaceEntity_asset_ownerId",
"AssetFaceEntity__AssetFaceEntity_asset"."libraryId" AS "AssetFaceEntity__AssetFaceEntity_asset_libraryId",
"AssetFaceEntity__AssetFaceEntity_asset"."deviceId" AS "AssetFaceEntity__AssetFaceEntity_asset_deviceId",
"AssetFaceEntity__AssetFaceEntity_asset"."type" AS "AssetFaceEntity__AssetFaceEntity_asset_type",
"AssetFaceEntity__AssetFaceEntity_asset"."originalPath" AS "AssetFaceEntity__AssetFaceEntity_asset_originalPath",
"AssetFaceEntity__AssetFaceEntity_asset"."resizePath" AS "AssetFaceEntity__AssetFaceEntity_asset_resizePath",
"AssetFaceEntity__AssetFaceEntity_asset"."webpPath" AS "AssetFaceEntity__AssetFaceEntity_asset_webpPath",
"AssetFaceEntity__AssetFaceEntity_asset"."thumbhash" AS "AssetFaceEntity__AssetFaceEntity_asset_thumbhash",
"AssetFaceEntity__AssetFaceEntity_asset"."encodedVideoPath" AS "AssetFaceEntity__AssetFaceEntity_asset_encodedVideoPath",
"AssetFaceEntity__AssetFaceEntity_asset"."createdAt" AS "AssetFaceEntity__AssetFaceEntity_asset_createdAt",
"AssetFaceEntity__AssetFaceEntity_asset"."updatedAt" AS "AssetFaceEntity__AssetFaceEntity_asset_updatedAt",
"AssetFaceEntity__AssetFaceEntity_asset"."deletedAt" AS "AssetFaceEntity__AssetFaceEntity_asset_deletedAt",
"AssetFaceEntity__AssetFaceEntity_asset"."fileCreatedAt" AS "AssetFaceEntity__AssetFaceEntity_asset_fileCreatedAt",
"AssetFaceEntity__AssetFaceEntity_asset"."localDateTime" AS "AssetFaceEntity__AssetFaceEntity_asset_localDateTime",
"AssetFaceEntity__AssetFaceEntity_asset"."fileModifiedAt" AS "AssetFaceEntity__AssetFaceEntity_asset_fileModifiedAt",
"AssetFaceEntity__AssetFaceEntity_asset"."isFavorite" AS "AssetFaceEntity__AssetFaceEntity_asset_isFavorite",
"AssetFaceEntity__AssetFaceEntity_asset"."isArchived" AS "AssetFaceEntity__AssetFaceEntity_asset_isArchived",
"AssetFaceEntity__AssetFaceEntity_asset"."isExternal" AS "AssetFaceEntity__AssetFaceEntity_asset_isExternal",
"AssetFaceEntity__AssetFaceEntity_asset"."isReadOnly" AS "AssetFaceEntity__AssetFaceEntity_asset_isReadOnly",
"AssetFaceEntity__AssetFaceEntity_asset"."isOffline" AS "AssetFaceEntity__AssetFaceEntity_asset_isOffline",
"AssetFaceEntity__AssetFaceEntity_asset"."checksum" AS "AssetFaceEntity__AssetFaceEntity_asset_checksum",
"AssetFaceEntity__AssetFaceEntity_asset"."duration" AS "AssetFaceEntity__AssetFaceEntity_asset_duration",
"AssetFaceEntity__AssetFaceEntity_asset"."isVisible" AS "AssetFaceEntity__AssetFaceEntity_asset_isVisible",
"AssetFaceEntity__AssetFaceEntity_asset"."livePhotoVideoId" AS "AssetFaceEntity__AssetFaceEntity_asset_livePhotoVideoId",
"AssetFaceEntity__AssetFaceEntity_asset"."originalFileName" AS "AssetFaceEntity__AssetFaceEntity_asset_originalFileName",
"AssetFaceEntity__AssetFaceEntity_asset"."sidecarPath" AS "AssetFaceEntity__AssetFaceEntity_asset_sidecarPath",
"AssetFaceEntity__AssetFaceEntity_asset"."stackParentId" AS "AssetFaceEntity__AssetFaceEntity_asset_stackParentId"
FROM
"asset_faces" "AssetFaceEntity"
LEFT JOIN "assets" "AssetFaceEntity__AssetFaceEntity_asset" ON "AssetFaceEntity__AssetFaceEntity_asset"."id" = "AssetFaceEntity"."assetId"
-- PersonRepository.getAll
SELECT
"PersonEntity"."id" AS "PersonEntity_id",
"PersonEntity"."createdAt" AS "PersonEntity_createdAt",
"PersonEntity"."updatedAt" AS "PersonEntity_updatedAt",
"PersonEntity"."ownerId" AS "PersonEntity_ownerId",
"PersonEntity"."name" AS "PersonEntity_name",
"PersonEntity"."birthDate" AS "PersonEntity_birthDate",
"PersonEntity"."thumbnailPath" AS "PersonEntity_thumbnailPath",
"PersonEntity"."faceAssetId" AS "PersonEntity_faceAssetId",
"PersonEntity"."isHidden" AS "PersonEntity_isHidden"
FROM
"person" "PersonEntity"
-- PersonRepository.getAllWithoutThumbnail
SELECT
"PersonEntity"."id" AS "PersonEntity_id",
"PersonEntity"."createdAt" AS "PersonEntity_createdAt",
"PersonEntity"."updatedAt" AS "PersonEntity_updatedAt",
"PersonEntity"."ownerId" AS "PersonEntity_ownerId",
"PersonEntity"."name" AS "PersonEntity_name",
"PersonEntity"."birthDate" AS "PersonEntity_birthDate",
"PersonEntity"."thumbnailPath" AS "PersonEntity_thumbnailPath",
"PersonEntity"."faceAssetId" AS "PersonEntity_faceAssetId",
"PersonEntity"."isHidden" AS "PersonEntity_isHidden"
FROM
"person" "PersonEntity"
WHERE
("PersonEntity"."thumbnailPath" = $1)
-- PersonRepository.getAllForUser
SELECT
"person"."id" AS "person_id",

View file

@ -2,10 +2,10 @@
-- SmartInfoRepository.searchCLIP
START TRANSACTION
SET
LOCAL vectors.k = '100'
SET
LOCAL vectors.enable_prefilter = on
SET
LOCAL vectors.k = '100'
SELECT
"a"."id" AS "a_id",
"a"."deviceAssetId" AS "a_deviceAssetId",
@ -70,8 +70,8 @@ FROM
WHERE
(
"a"."ownerId" IN ($1)
AND "a"."isVisible" = true
AND "a"."isArchived" = false
AND "a"."isVisible" = true
AND "a"."fileCreatedAt" < NOW()
)
AND ("a"."deletedAt" IS NULL)
@ -83,6 +83,8 @@ COMMIT
-- SmartInfoRepository.searchFaces
START TRANSACTION
SET
LOCAL vectors.enable_prefilter = on
SET
LOCAL vectors.k = '100'
WITH

View file

@ -62,11 +62,12 @@ export class AppService {
[JobName.QUEUE_METADATA_EXTRACTION]: (data) => this.metadataService.handleQueueMetadataExtraction(data),
[JobName.METADATA_EXTRACTION]: (data) => this.metadataService.handleMetadataExtraction(data),
[JobName.LINK_LIVE_PHOTOS]: (data) => this.metadataService.handleLivePhotoLinking(data),
[JobName.QUEUE_RECOGNIZE_FACES]: (data) => this.personService.handleQueueRecognizeFaces(data),
[JobName.RECOGNIZE_FACES]: (data) => this.personService.handleRecognizeFaces(data),
[JobName.QUEUE_FACE_DETECTION]: (data) => this.personService.handleQueueDetectFaces(data),
[JobName.FACE_DETECTION]: (data) => this.personService.handleDetectFaces(data),
[JobName.QUEUE_FACIAL_RECOGNITION]: (data) => this.personService.handleQueueRecognizeFaces(data),
[JobName.FACIAL_RECOGNITION]: (data) => this.personService.handleRecognizeFaces(data),
[JobName.GENERATE_PERSON_THUMBNAIL]: (data) => this.personService.handleGeneratePersonThumbnail(data),
[JobName.PERSON_CLEANUP]: () => this.personService.handlePersonCleanup(),
[JobName.PERSON_DELETE]: (data) => this.personService.handlePersonDelete(data),
[JobName.QUEUE_SIDECAR]: (data) => this.metadataService.handleQueueSidecar(data),
[JobName.SIDECAR_DISCOVERY]: (data) => this.metadataService.handleSidecarDiscovery(data),
[JobName.SIDECAR_SYNC]: () => this.metadataService.handleSidecarSync(),