feat(server,ml): remove image tagging (#5903)

* remove image tagging

* updated lock

* fixed tests, improved logging

* be nice

* fixed tests
This commit is contained in:
Mert 2023-12-20 20:47:56 -05:00 committed by GitHub
parent 154292242f
commit 092a23fd7f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
65 changed files with 988 additions and 2930 deletions

View file

@ -2,7 +2,6 @@ export enum QueueName {
THUMBNAIL_GENERATION = 'thumbnailGeneration',
METADATA_EXTRACTION = 'metadataExtraction',
VIDEO_CONVERSION = 'videoConversion',
OBJECT_TAGGING = 'objectTagging',
RECOGNIZE_FACES = 'recognizeFaces',
SMART_SEARCH = 'smartSearch',
BACKGROUND_TASK = 'backgroundTask',
@ -55,10 +54,6 @@ export enum JobName {
MIGRATE_ASSET = 'migrate-asset',
MIGRATE_PERSON = 'migrate-person',
// object tagging
QUEUE_OBJECT_TAGGING = 'queue-object-tagging',
CLASSIFY_IMAGE = 'classify-image',
// facial recognition
PERSON_CLEANUP = 'person-cleanup',
PERSON_DELETE = 'person-delete',
@ -126,10 +121,6 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.MIGRATE_ASSET]: QueueName.MIGRATION,
[JobName.MIGRATE_PERSON]: QueueName.MIGRATION,
// object tagging
[JobName.QUEUE_OBJECT_TAGGING]: QueueName.OBJECT_TAGGING,
[JobName.CLASSIFY_IMAGE]: QueueName.OBJECT_TAGGING,
// facial recognition
[JobName.QUEUE_RECOGNIZE_FACES]: QueueName.RECOGNIZE_FACES,
[JobName.RECOGNIZE_FACES]: QueueName.RECOGNIZE_FACES,

View file

@ -59,9 +59,6 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
@ApiProperty({ type: JobStatusDto })
[QueueName.VIDEO_CONVERSION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.OBJECT_TAGGING]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.SMART_SEARCH]!: JobStatusDto;

View file

@ -99,7 +99,6 @@ describe(JobService.name, () => {
[QueueName.BACKGROUND_TASK]: expectedJobStatus,
[QueueName.SMART_SEARCH]: expectedJobStatus,
[QueueName.METADATA_EXTRACTION]: expectedJobStatus,
[QueueName.OBJECT_TAGGING]: expectedJobStatus,
[QueueName.SEARCH]: expectedJobStatus,
[QueueName.STORAGE_TEMPLATE_MIGRATION]: expectedJobStatus,
[QueueName.MIGRATION]: expectedJobStatus,
@ -157,17 +156,6 @@ describe(JobService.name, () => {
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
});
it('should handle a start object tagging command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
await sut.handleCommand(QueueName.OBJECT_TAGGING, { command: JobCommand.START, force: false });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_OBJECT_TAGGING, data: { force: false } });
});
it('should handle a start clip encoding command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
@ -234,7 +222,6 @@ describe(JobService.name, () => {
[QueueName.BACKGROUND_TASK]: { concurrency: 10 },
[QueueName.SMART_SEARCH]: { concurrency: 10 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 10 },
[QueueName.OBJECT_TAGGING]: { concurrency: 10 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 10 },
[QueueName.SEARCH]: { concurrency: 10 },
[QueueName.SIDECAR]: { concurrency: 10 },
@ -249,7 +236,6 @@ describe(JobService.name, () => {
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.BACKGROUND_TASK, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SMART_SEARCH, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.OBJECT_TAGGING, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.RECOGNIZE_FACES, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SIDECAR, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.LIBRARY, 10);
@ -292,7 +278,6 @@ describe(JobService.name, () => {
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-1' } },
jobs: [
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.CLASSIFY_IMAGE,
JobName.ENCODE_CLIP,
JobName.RECOGNIZE_FACES,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
@ -302,7 +287,6 @@ describe(JobService.name, () => {
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-1', source: 'upload' } },
jobs: [
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.CLASSIFY_IMAGE,
JobName.ENCODE_CLIP,
JobName.RECOGNIZE_FACES,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
@ -312,7 +296,6 @@ describe(JobService.name, () => {
{
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-live-image', source: 'upload' } },
jobs: [
JobName.CLASSIFY_IMAGE,
JobName.GENERATE_WEBP_THUMBNAIL,
JobName.RECOGNIZE_FACES,
JobName.GENERATE_THUMBHASH_THUMBNAIL,
@ -320,10 +303,6 @@ describe(JobService.name, () => {
JobName.VIDEO_CONVERSION,
],
},
{
item: { name: JobName.CLASSIFY_IMAGE, data: { id: 'asset-1' } },
jobs: [],
},
{
item: { name: JobName.ENCODE_CLIP, data: { id: 'asset-1' } },
jobs: [],
@ -371,11 +350,6 @@ describe(JobService.name, () => {
feature: FeatureFlag.CLIP_ENCODE,
configKey: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED,
},
{
queue: QueueName.OBJECT_TAGGING,
feature: FeatureFlag.TAG_IMAGE,
configKey: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED,
},
{
queue: QueueName.RECOGNIZE_FACES,
feature: FeatureFlag.FACIAL_RECOGNITION,

View file

@ -94,10 +94,6 @@ export class JobService {
case QueueName.MIGRATION:
return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION });
case QueueName.OBJECT_TAGGING:
await this.configCore.requireFeature(FeatureFlag.TAG_IMAGE);
return this.jobRepository.queue({ name: JobName.QUEUE_OBJECT_TAGGING, data: { force } });
case QueueName.SMART_SEARCH:
await this.configCore.requireFeature(FeatureFlag.CLIP_ENCODE);
return this.jobRepository.queue({ name: JobName.QUEUE_ENCODE_CLIP, data: { force } });
@ -209,7 +205,6 @@ export class JobService {
case JobName.GENERATE_JPEG_THUMBNAIL: {
await this.jobRepository.queue({ name: JobName.GENERATE_WEBP_THUMBNAIL, data: item.data });
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBHASH_THUMBNAIL, data: item.data });
await this.jobRepository.queue({ name: JobName.CLASSIFY_IMAGE, data: item.data });
await this.jobRepository.queue({ name: JobName.ENCODE_CLIP, data: item.data });
await this.jobRepository.queue({ name: JobName.RECOGNIZE_FACES, data: item.data });

View file

@ -62,10 +62,6 @@ export type JobItem =
| { name: JobName.SIDECAR_SYNC; data: IEntityJob }
| { name: JobName.SIDECAR_WRITE; data: ISidecarWriteJob }
// Object Tagging
| { name: JobName.QUEUE_OBJECT_TAGGING; data: IBaseJob }
| { name: JobName.CLASSIFY_IMAGE; data: IEntityJob }
// Recognize Faces
| { name: JobName.QUEUE_RECOGNIZE_FACES; data: IBaseJob }
| { name: JobName.RECOGNIZE_FACES; data: IEntityJob }

View file

@ -1,4 +1,4 @@
import { ClassificationConfig, CLIPConfig, RecognitionConfig } from '../smart-info/dto';
import { CLIPConfig, RecognitionConfig } from '../smart-info/dto';
export const IMachineLearningRepository = 'IMachineLearningRepository';
@ -26,7 +26,6 @@ export interface DetectFaceResult {
}
export enum ModelType {
IMAGE_CLASSIFICATION = 'image-classification',
FACIAL_RECOGNITION = 'facial-recognition',
CLIP = 'clip',
}
@ -37,7 +36,6 @@ export enum CLIPMode {
}
export interface IMachineLearningRepository {
classifyImage(url: string, input: VisionModelInput, config: ClassificationConfig): Promise<string[]>;
encodeImage(url: string, input: VisionModelInput, config: CLIPConfig): Promise<number[]>;
encodeText(url: string, input: TextModelInput, config: CLIPConfig): Promise<number[]>;
detectFaces(url: string, input: VisionModelInput, config: RecognitionConfig): Promise<DetectFaceResult[]>;

View file

@ -100,5 +100,4 @@ export class ServerFeaturesDto implements FeatureFlags {
passwordLogin!: boolean;
sidecar!: boolean;
search!: boolean;
tagImage!: boolean;
}

View file

@ -171,7 +171,6 @@ describe(ServerInfoService.name, () => {
passwordLogin: true,
search: true,
sidecar: true,
tagImage: false,
configFile: false,
trash: true,
});

View file

@ -18,15 +18,6 @@ export class ModelConfig {
modelType?: ModelType;
}
export class ClassificationConfig extends ModelConfig {
@IsNumber()
@Min(0)
@Max(1)
@Type(() => Number)
@ApiProperty({ type: 'integer' })
minScore!: number;
}
export class CLIPConfig extends ModelConfig {
@IsEnum(CLIPMode)
@Optional()

View file

@ -47,107 +47,6 @@ describe(SmartInfoService.name, () => {
expect(sut).toBeDefined();
});
describe('handleQueueObjectTagging', () => {
beforeEach(async () => {
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
});
it('should do nothing if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await sut.handleQueueObjectTagging({});
expect(assetMock.getAll).not.toHaveBeenCalled();
expect(assetMock.getWithout).not.toHaveBeenCalled();
});
it('should queue the assets without tags', async () => {
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
assetMock.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueObjectTagging({ force: false });
expect(jobMock.queue.mock.calls).toEqual([[{ name: JobName.CLASSIFY_IMAGE, data: { id: assetStub.image.id } }]]);
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.OBJECT_TAGS);
});
it('should queue all the assets', async () => {
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
assetMock.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueObjectTagging({ force: true });
expect(jobMock.queue.mock.calls).toEqual([[{ name: JobName.CLASSIFY_IMAGE, data: { id: assetStub.image.id } }]]);
expect(assetMock.getAll).toHaveBeenCalled();
});
});
describe('handleClassifyImage', () => {
it('should do nothing if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await sut.handleClassifyImage({ id: '123' });
expect(machineMock.classifyImage).not.toHaveBeenCalled();
expect(assetMock.getByIds).not.toHaveBeenCalled();
});
it('should skip assets without a resize path', async () => {
const asset = { resizePath: '' } as AssetEntity;
assetMock.getByIds.mockResolvedValue([asset]);
await sut.handleClassifyImage({ id: asset.id });
expect(smartMock.upsert).not.toHaveBeenCalled();
expect(machineMock.classifyImage).not.toHaveBeenCalled();
});
it('should save the returned tags', async () => {
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
machineMock.classifyImage.mockResolvedValue(['tag1', 'tag2', 'tag3']);
await sut.handleClassifyImage({ id: asset.id });
expect(machineMock.classifyImage).toHaveBeenCalledWith(
'http://immich-machine-learning:3003',
{
imagePath: 'path/to/resize.ext',
},
{ enabled: true, minScore: 0.9, modelName: 'microsoft/resnet-50' },
);
expect(smartMock.upsert).toHaveBeenCalledWith({
assetId: 'asset-1',
tags: ['tag1', 'tag2', 'tag3'],
});
});
it('should always overwrite old tags', async () => {
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.MACHINE_LEARNING_CLASSIFICATION_ENABLED, value: true },
]);
machineMock.classifyImage.mockResolvedValue([]);
await sut.handleClassifyImage({ id: asset.id });
expect(machineMock.classifyImage).toHaveBeenCalled();
expect(smartMock.upsert).toHaveBeenCalled();
});
});
describe('handleQueueEncodeClip', () => {
it('should do nothing if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);

View file

@ -46,48 +46,6 @@ export class SmartInfoService {
await this.jobRepository.resume(QueueName.SMART_SEARCH);
}
async handleQueueObjectTagging({ force }: IBaseJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.classification.enabled) {
return true;
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.OBJECT_TAGS);
});
for await (const assets of assetPagination) {
for (const asset of assets) {
await this.jobRepository.queue({ name: JobName.CLASSIFY_IMAGE, data: { id: asset.id } });
}
}
return true;
}
async handleClassifyImage({ id }: IEntityJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.classification.enabled) {
return true;
}
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset.resizePath) {
return false;
}
const tags = await this.machineLearning.classifyImage(
machineLearning.url,
{ imagePath: asset.resizePath },
machineLearning.classification,
);
await this.repository.upsert({ assetId: asset.id, tags });
return true;
}
async handleQueueEncodeClip({ force }: IBaseJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.clip.enabled) {

View file

@ -29,12 +29,6 @@ export class SystemConfigJobDto implements Record<QueueName, JobSettingsDto> {
@Type(() => JobSettingsDto)
[QueueName.VIDEO_CONVERSION]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.OBJECT_TAGGING]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()
@IsObject()

View file

@ -1,4 +1,4 @@
import { ClassificationConfig, CLIPConfig, RecognitionConfig } from '@app/domain';
import { CLIPConfig, RecognitionConfig } from '@app/domain';
import { Type } from 'class-transformer';
import { IsBoolean, IsObject, IsUrl, ValidateIf, ValidateNested } from 'class-validator';
@ -10,11 +10,6 @@ export class SystemConfigMachineLearningDto {
@ValidateIf((dto) => dto.enabled)
url!: string;
@Type(() => ClassificationConfig)
@ValidateNested()
@IsObject()
classification!: ClassificationConfig;
@Type(() => CLIPConfig)
@ValidateNested()
@IsObject()

View file

@ -49,7 +49,6 @@ export const defaults = Object.freeze<SystemConfig>({
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.OBJECT_TAGGING]: { concurrency: 2 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },
[QueueName.SEARCH]: { concurrency: 5 },
[QueueName.SIDECAR]: { concurrency: 5 },
@ -66,11 +65,6 @@ export const defaults = Object.freeze<SystemConfig>({
machineLearning: {
enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false',
url: process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003',
classification: {
enabled: false,
modelName: 'microsoft/resnet-50',
minScore: 0.9,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',
@ -137,7 +131,6 @@ export const defaults = Object.freeze<SystemConfig>({
export enum FeatureFlag {
CLIP_ENCODE = 'clipEncode',
FACIAL_RECOGNITION = 'facialRecognition',
TAG_IMAGE = 'tagImage',
MAP = 'map',
REVERSE_GEOCODING = 'reverseGeocoding',
SIDECAR = 'sidecar',
@ -182,8 +175,6 @@ export class SystemConfigCore {
throw new BadRequestException('Clip encoding is not enabled');
case FeatureFlag.FACIAL_RECOGNITION:
throw new BadRequestException('Facial recognition is not enabled');
case FeatureFlag.TAG_IMAGE:
throw new BadRequestException('Image tagging is not enabled');
case FeatureFlag.SIDECAR:
throw new BadRequestException('Sidecar is not enabled');
case FeatureFlag.SEARCH:
@ -212,7 +203,6 @@ export class SystemConfigCore {
return {
[FeatureFlag.CLIP_ENCODE]: mlEnabled && config.machineLearning.clip.enabled,
[FeatureFlag.FACIAL_RECOGNITION]: mlEnabled && config.machineLearning.facialRecognition.enabled,
[FeatureFlag.TAG_IMAGE]: mlEnabled && config.machineLearning.classification.enabled,
[FeatureFlag.MAP]: config.map.enabled,
[FeatureFlag.REVERSE_GEOCODING]: config.reverseGeocoding.enabled,
[FeatureFlag.SIDECAR]: true,
@ -245,10 +235,7 @@ export class SystemConfigCore {
_.set(config, key, value);
}
const errors = await validate(plainToInstance(SystemConfigDto, config), {
forbidNonWhitelisted: true,
forbidUnknownValues: true,
});
const errors = await validate(plainToInstance(SystemConfigDto, config));
if (errors.length > 0) {
this.logger.error('Validation error', errors);
if (configFilePath) {
@ -334,13 +321,13 @@ export class SystemConfigCore {
}
if (!_.isEmpty(file)) {
throw new Error(`Unknown keys found: ${JSON.stringify(file)}`);
this.logger.warn(`Unknown keys found: ${JSON.stringify(file, null, 2)}`);
}
this.configCache = overrides;
} catch (error: Error | any) {
this.logger.error(`Unable to load configuration file: ${filepath} due to ${error}`, error?.stack);
throw new Error('Invalid configuration file');
this.logger.error(`Unable to load configuration file: ${filepath}`);
throw error;
}
}

View file

@ -11,6 +11,7 @@ import {
TranscodePolicy,
VideoCodec,
} from '@app/infra/entities';
import { ImmichLogger } from '@app/infra/logger';
import { BadRequestException } from '@nestjs/common';
import { newCommunicationRepositoryMock, newSystemConfigRepositoryMock } from '@test';
import { QueueName } from '../job';
@ -29,7 +30,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.OBJECT_TAGGING]: { concurrency: 2 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },
[QueueName.SEARCH]: { concurrency: 5 },
[QueueName.SIDECAR]: { concurrency: 5 },
@ -65,11 +65,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
machineLearning: {
enabled: true,
url: 'http://immich-machine-learning:3003',
classification: {
enabled: false,
modelName: 'microsoft/resnet-50',
minScore: 0.9,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',
@ -169,6 +164,16 @@ describe(SystemConfigService.name, () => {
});
describe('getConfig', () => {
let warnLog: jest.SpyInstance;
beforeEach(() => {
warnLog = jest.spyOn(ImmichLogger.prototype, 'warn');
});
afterEach(() => {
warnLog.mockRestore();
});
it('should return the default config', async () => {
configMock.load.mockResolvedValue([]);
@ -217,9 +222,9 @@ describe(SystemConfigService.name, () => {
{ should: 'validate numbers', config: { ffmpeg: { crf: 'not-a-number' } } },
{ should: 'validate booleans', config: { oauth: { enabled: 'invalid' } } },
{ should: 'validate enums', config: { ffmpeg: { transcode: 'unknown' } } },
{ should: 'validate top level unknown options', config: { unknownOption: true } },
{ should: 'validate nested unknown options', config: { ffmpeg: { unknownOption: true } } },
{ should: 'validate required oauth fields', config: { oauth: { enabled: true } } },
{ should: 'warn for top level unknown options', warn: true, config: { unknownOption: true } },
{ should: 'warn for nested unknown options', warn: true, config: { ffmpeg: { unknownOption: true } } },
];
for (const test of tests) {
@ -227,7 +232,12 @@ describe(SystemConfigService.name, () => {
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
configMock.readFile.mockResolvedValue(JSON.stringify(test.config));
await expect(sut.getConfig()).rejects.toBeInstanceOf(Error);
if (test.warn) {
await sut.getConfig();
expect(warnLog).toHaveBeenCalled();
} else {
await expect(sut.getConfig()).rejects.toBeInstanceOf(Error);
}
});
}
});

View file

@ -35,7 +35,6 @@ export enum SystemConfigKey {
JOB_THUMBNAIL_GENERATION_CONCURRENCY = 'job.thumbnailGeneration.concurrency',
JOB_METADATA_EXTRACTION_CONCURRENCY = 'job.metadataExtraction.concurrency',
JOB_VIDEO_CONVERSION_CONCURRENCY = 'job.videoConversion.concurrency',
JOB_OBJECT_TAGGING_CONCURRENCY = 'job.objectTagging.concurrency',
JOB_RECOGNIZE_FACES_CONCURRENCY = 'job.recognizeFaces.concurrency',
JOB_CLIP_ENCODING_CONCURRENCY = 'job.smartSearch.concurrency',
JOB_BACKGROUND_TASK_CONCURRENCY = 'job.backgroundTask.concurrency',
@ -54,10 +53,6 @@ export enum SystemConfigKey {
MACHINE_LEARNING_ENABLED = 'machineLearning.enabled',
MACHINE_LEARNING_URL = 'machineLearning.url',
MACHINE_LEARNING_CLASSIFICATION_ENABLED = 'machineLearning.classification.enabled',
MACHINE_LEARNING_CLASSIFICATION_MODEL_NAME = 'machineLearning.classification.modelName',
MACHINE_LEARNING_CLASSIFICATION_MIN_SCORE = 'machineLearning.classification.minScore',
MACHINE_LEARNING_CLIP_ENABLED = 'machineLearning.clip.enabled',
MACHINE_LEARNING_CLIP_MODEL_NAME = 'machineLearning.clip.modelName',
@ -184,11 +179,6 @@ export interface SystemConfig {
machineLearning: {
enabled: boolean;
url: string;
classification: {
enabled: boolean;
modelName: string;
minScore: number;
};
clip: {
enabled: boolean;
modelName: string;

View file

@ -1,5 +1,4 @@
import {
ClassificationConfig,
CLIPConfig,
CLIPMode,
DetectFaceResult,
@ -27,10 +26,6 @@ export class MachineLearningRepository implements IMachineLearningRepository {
return res.json();
}
classifyImage(url: string, input: VisionModelInput, config: ClassificationConfig): Promise<string[]> {
return this.post<string[]>(url, input, { ...config, modelType: ModelType.IMAGE_CLASSIFICATION });
}
detectFaces(url: string, input: VisionModelInput, config: RecognitionConfig): Promise<DetectFaceResult[]> {
return this.post<DetectFaceResult[]>(url, input, { ...config, modelType: ModelType.FACIAL_RECOGNITION });
}

View file

@ -42,8 +42,6 @@ export class AppService {
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
[JobName.USER_DELETE_CHECK]: () => this.userService.handleUserDeleteCheck(),
[JobName.USER_DELETION]: (data) => this.userService.handleUserDelete(data),
[JobName.QUEUE_OBJECT_TAGGING]: (data) => this.smartInfoService.handleQueueObjectTagging(data),
[JobName.CLASSIFY_IMAGE]: (data) => this.smartInfoService.handleClassifyImage(data),
[JobName.QUEUE_ENCODE_CLIP]: (data) => this.smartInfoService.handleQueueEncodeClip(data),
[JobName.ENCODE_CLIP]: (data) => this.smartInfoService.handleEncodeClip(data),
[JobName.STORAGE_TEMPLATE_MIGRATION]: () => this.storageTemplateService.handleMigration(),