refactor: rename clip -> smart search (#6713)

This commit is contained in:
Mert 2024-01-29 09:51:22 -05:00 committed by GitHub
parent e5a70329c9
commit ae7f174948
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 162 additions and 119 deletions

View file

@ -80,9 +80,9 @@ export enum JobName {
DELETE_FILES = 'delete-files',
CLEAN_OLD_AUDIT_LOGS = 'clean-old-audit-logs',
// clip
QUEUE_ENCODE_CLIP = 'queue-clip-encode',
ENCODE_CLIP = 'clip-encode',
// smart search
QUEUE_SMART_SEARCH = 'queue-smart-search',
SMART_SEARCH = 'smart-search',
// XMP sidecars
QUEUE_SIDECAR = 'queue-sidecar',
@ -135,9 +135,9 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.QUEUE_FACIAL_RECOGNITION]: QueueName.FACIAL_RECOGNITION,
[JobName.FACIAL_RECOGNITION]: QueueName.FACIAL_RECOGNITION,
// clip
[JobName.QUEUE_ENCODE_CLIP]: QueueName.SMART_SEARCH,
[JobName.ENCODE_CLIP]: QueueName.SMART_SEARCH,
// smart search
[JobName.QUEUE_SMART_SEARCH]: QueueName.SMART_SEARCH,
[JobName.SMART_SEARCH]: QueueName.SMART_SEARCH,
// XMP sidecars
[JobName.QUEUE_SIDECAR]: QueueName.SIDECAR,

View file

@ -159,12 +159,12 @@ describe(JobService.name, () => {
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
});
it('should handle a start clip encoding command', async () => {
it('should handle a start smart search command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_ENCODE_CLIP, data: { force: false } });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SMART_SEARCH, data: { force: false } });
});
it('should handle a start metadata extraction command', async () => {
@ -289,7 +289,7 @@ describe(JobService.name, () => {
jobs: [
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.SMART_SEARCH,
JobName.FACE_DETECTION,
],
},
@ -298,7 +298,7 @@ describe(JobService.name, () => {
jobs: [
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.SMART_SEARCH,
JobName.FACE_DETECTION,
JobName.VIDEO_CONVERSION,
],
@ -308,13 +308,13 @@ describe(JobService.name, () => {
jobs: [
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
JobName.ENCODE_CLIP,
JobName.SMART_SEARCH,
JobName.FACE_DETECTION,
JobName.VIDEO_CONVERSION,
],
},
{
item: { name: JobName.ENCODE_CLIP, data: { id: 'asset-1' } },
item: { name: JobName.SMART_SEARCH, data: { id: 'asset-1' } },
jobs: [],
},
{
@ -365,7 +365,7 @@ describe(JobService.name, () => {
const featureTests: Array<{ queue: QueueName; feature: FeatureFlag; configKey: SystemConfigKey }> = [
{
queue: QueueName.SMART_SEARCH,
feature: FeatureFlag.CLIP_ENCODE,
feature: FeatureFlag.SMART_SEARCH,
configKey: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED,
},
{

View file

@ -95,8 +95,8 @@ export class JobService {
return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION });
case QueueName.SMART_SEARCH:
await this.configCore.requireFeature(FeatureFlag.CLIP_ENCODE);
return this.jobRepository.queue({ name: JobName.QUEUE_ENCODE_CLIP, data: { force } });
await this.configCore.requireFeature(FeatureFlag.SMART_SEARCH);
return this.jobRepository.queue({ name: JobName.QUEUE_SMART_SEARCH, data: { force } });
case QueueName.METADATA_EXTRACTION:
return this.jobRepository.queue({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force } });
@ -226,7 +226,7 @@ export class JobService {
const jobs: JobItem[] = [
{ name: JobName.GENERATE_WEBP_THUMBNAIL, data: item.data },
{ name: JobName.GENERATE_THUMBHASH_THUMBNAIL, data: item.data },
{ name: JobName.ENCODE_CLIP, data: item.data },
{ name: JobName.SMART_SEARCH, data: item.data },
{ name: JobName.FACE_DETECTION, data: item.data },
];

View file

@ -93,7 +93,7 @@ export enum WithoutProperty {
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded-video',
EXIF = 'exif',
CLIP_ENCODING = 'clip-embedding',
SMART_SEARCH = 'smart-search',
OBJECT_TAGS = 'object-tags',
FACES = 'faces',
PERSON = 'person',

View file

@ -71,9 +71,9 @@ export type JobItem =
| { name: JobName.FACIAL_RECOGNITION; data: IDeferrableJob }
| { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob }
// Clip Embedding
| { name: JobName.QUEUE_ENCODE_CLIP; data: IBaseJob }
| { name: JobName.ENCODE_CLIP; data: IEntityJob }
// Smart Search
| { name: JobName.QUEUE_SMART_SEARCH; data: IBaseJob }
| { name: JobName.SMART_SEARCH; data: IEntityJob }
// Filesystem
| { name: JobName.DELETE_FILES; data: IDeleteFilesJob }

View file

@ -1,7 +1,7 @@
import { AssetType } from '@app/infra/entities';
export enum SearchStrategy {
CLIP = 'CLIP',
SMART = 'SMART',
TEXT = 'TEXT',
}

View file

@ -14,6 +14,12 @@ export class SearchDto {
@Optional()
query?: string;
@IsBoolean()
@Optional()
@Transform(toBoolean)
smart?: boolean;
/** @deprecated */
@IsBoolean()
@Optional()
@Transform(toBoolean)

View file

@ -180,14 +180,14 @@ describe(SearchService.name, () => {
expect(assetMock.searchMetadata).not.toHaveBeenCalled();
});
it('should throw an error if clip is requested but disabled', async () => {
it.each([
{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED },
{ key: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED },
])('should throw an error if clip is requested but disabled', async ({ key }) => {
const dto: SearchDto = { q: 'test query', clip: true };
configMock.load
.mockResolvedValueOnce([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }])
.mockResolvedValueOnce([{ key: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED, value: false }]);
configMock.load.mockResolvedValue([{ key, value: false }]);
await expect(sut.search(authStub.user1, dto)).rejects.toThrow('CLIP is not enabled');
await expect(sut.search(authStub.user1, dto)).rejects.toThrow('CLIP is not enabled');
await expect(sut.search(authStub.user1, dto)).rejects.toThrow('Smart search is not enabled');
});
});
});

View file

@ -56,23 +56,26 @@ export class SearchService {
}
async search(auth: AuthDto, dto: SearchDto): Promise<SearchResponseDto> {
await this.configCore.requireFeature(FeatureFlag.SEARCH);
const { machineLearning } = await this.configCore.getConfig();
const query = dto.q || dto.query;
if (!query) {
throw new Error('Missing query');
}
const hasClip = machineLearning.enabled && machineLearning.clip.enabled;
if (dto.clip && !hasClip) {
throw new Error('CLIP is not enabled');
let strategy = SearchStrategy.TEXT;
if (dto.smart || dto.clip) {
await this.configCore.requireFeature(FeatureFlag.SMART_SEARCH);
strategy = SearchStrategy.SMART;
}
const strategy = dto.clip ? SearchStrategy.CLIP : SearchStrategy.TEXT;
const userIds = await this.getUserIdsToSearch(auth);
const withArchived = dto.withArchived || false;
let assets: AssetEntity[] = [];
switch (strategy) {
case SearchStrategy.CLIP:
case SearchStrategy.SMART:
const embedding = await this.machineLearning.encodeText(
machineLearning.url,
{ text: query },

View file

@ -93,7 +93,7 @@ export class ServerConfigDto {
}
export class ServerFeaturesDto implements FeatureFlags {
clipEncode!: boolean;
smartSearch!: boolean;
configFile!: boolean;
facialRecognition!: boolean;
map!: boolean;

View file

@ -174,7 +174,7 @@ describe(ServerInfoService.name, () => {
describe('getFeatures', () => {
it('should respond the server features', async () => {
await expect(sut.getFeatures()).resolves.toEqual({
clipEncode: true,
smartSearch: true,
facialRecognition: true,
map: true,
reverseGeocoding: true,

View file

@ -69,8 +69,8 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({ force: false });
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.ENCODE_CLIP, data: { id: assetStub.image.id } }]);
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.CLIP_ENCODING);
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }]);
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.SMART_SEARCH);
});
it('should queue all the assets', async () => {
@ -81,7 +81,7 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({ force: true });
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.ENCODE_CLIP, data: { id: assetStub.image.id } }]);
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }]);
expect(assetMock.getAll).toHaveBeenCalled();
});
});

View file

@ -53,11 +53,13 @@ export class SmartInfoService {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.CLIP_ENCODING);
: this.assetRepository.getWithout(pagination, WithoutProperty.SMART_SEARCH);
});
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(assets.map((asset) => ({ name: JobName.ENCODE_CLIP, data: { id: asset.id } })));
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.SMART_SEARCH, data: { id: asset.id } })),
);
}
return true;

View file

@ -136,7 +136,7 @@ export const defaults = Object.freeze<SystemConfig>({
});
export enum FeatureFlag {
CLIP_ENCODE = 'clipEncode',
SMART_SEARCH = 'smartSearch',
FACIAL_RECOGNITION = 'facialRecognition',
MAP = 'map',
REVERSE_GEOCODING = 'reverseGeocoding',
@ -178,8 +178,8 @@ export class SystemConfigCore {
const hasFeature = await this.hasFeature(feature);
if (!hasFeature) {
switch (feature) {
case FeatureFlag.CLIP_ENCODE:
throw new BadRequestException('Clip encoding is not enabled');
case FeatureFlag.SMART_SEARCH:
throw new BadRequestException('Smart search is not enabled');
case FeatureFlag.FACIAL_RECOGNITION:
throw new BadRequestException('Facial recognition is not enabled');
case FeatureFlag.SIDECAR:
@ -208,7 +208,7 @@ export class SystemConfigCore {
const mlEnabled = config.machineLearning.enabled;
return {
[FeatureFlag.CLIP_ENCODE]: mlEnabled && config.machineLearning.clip.enabled,
[FeatureFlag.SMART_SEARCH]: mlEnabled && config.machineLearning.clip.enabled,
[FeatureFlag.FACIAL_RECOGNITION]: mlEnabled && config.machineLearning.facialRecognition.enabled,
[FeatureFlag.MAP]: config.map.enabled,
[FeatureFlag.REVERSE_GEOCODING]: config.reverseGeocoding.enabled,

View file

@ -501,7 +501,7 @@ export class AssetRepository implements IAssetRepository {
};
break;
case WithoutProperty.CLIP_ENCODING:
case WithoutProperty.SMART_SEARCH:
relations = {
smartSearch: true,
};

View file

@ -46,8 +46,8 @@ export class AppService {
[JobName.USER_DELETE_CHECK]: () => this.userService.handleUserDeleteCheck(),
[JobName.USER_DELETION]: (data) => this.userService.handleUserDelete(data),
[JobName.USER_SYNC_USAGE]: () => this.userService.handleUserSyncUsage(),
[JobName.QUEUE_ENCODE_CLIP]: (data) => this.smartInfoService.handleQueueEncodeClip(data),
[JobName.ENCODE_CLIP]: (data) => this.smartInfoService.handleEncodeClip(data),
[JobName.QUEUE_SMART_SEARCH]: (data) => this.smartInfoService.handleQueueEncodeClip(data),
[JobName.SMART_SEARCH]: (data) => this.smartInfoService.handleEncodeClip(data),
[JobName.STORAGE_TEMPLATE_MIGRATION]: () => this.storageTemplateService.handleMigration(),
[JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE]: (data) => this.storageTemplateService.handleMigrationSingle(data),
[JobName.QUEUE_MIGRATION]: () => this.mediaService.handleQueueMigration(),