diff --git a/server/src/config.ts b/server/src/config.ts index a9770b92cc..deb869e366 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -233,7 +233,7 @@ export const defaults = Object.freeze({ [QueueName.ThumbnailGeneration]: { concurrency: 3 }, [QueueName.VideoConversion]: { concurrency: 1 }, [QueueName.Notification]: { concurrency: 5 }, - [QueueName.OCR]: { concurrency: 1 }, + [QueueName.Ocr]: { concurrency: 1 }, }, logging: { enabled: true, @@ -264,9 +264,9 @@ export const defaults = Object.freeze({ }, ocr: { enabled: true, - modelName: 'PP-OCRv5_server', + modelName: 'PP-OCRv5_mobile', minDetectionScore: 0.5, - minRecognitionScore: 0.9, + minRecognitionScore: 0.8, maxResolution: 736, }, }, diff --git a/server/src/dtos/job.dto.ts b/server/src/dtos/job.dto.ts index 381b6a6f8c..5daaeacdd3 100644 --- a/server/src/dtos/job.dto.ts +++ b/server/src/dtos/job.dto.ts @@ -95,5 +95,5 @@ export class AllJobStatusResponseDto implements Record [QueueName.BackupDatabase]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.OCR]!: JobStatusDto; + [QueueName.Ocr]!: JobStatusDto; } diff --git a/server/src/dtos/system-config.dto.ts b/server/src/dtos/system-config.dto.ts index d793861ec4..ff304d2865 100644 --- a/server/src/dtos/system-config.dto.ts +++ b/server/src/dtos/system-config.dto.ts @@ -205,7 +205,7 @@ class SystemConfigJobDto implements Record @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.OCR]!: JobSettingsDto; + [QueueName.Ocr]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() diff --git a/server/src/enum.ts b/server/src/enum.ts index cac69eecf0..28667235ed 100644 --- a/server/src/enum.ts +++ b/server/src/enum.ts @@ -511,7 +511,7 @@ export enum QueueName { Library = 'library', Notification = 'notifications', BackupDatabase = 'backupDatabase', - OCR = 'ocr', + Ocr = 'ocr', } export enum JobName { @@ -586,8 +586,8 @@ export enum JobName { VersionCheck = 'VersionCheck', // OCR - QUEUE_OCR = 'queue-ocr', - OCR = 'ocr', + OcrQueueAll = 'OcrQueueAll', + Ocr = 'Ocr', } export enum JobCommand { diff --git a/server/src/repositories/job.repository.ts b/server/src/repositories/job.repository.ts index 84b7b4b58d..5acd8d5746 100644 --- a/server/src/repositories/job.repository.ts +++ b/server/src/repositories/job.repository.ts @@ -220,9 +220,6 @@ export class JobRepository { case JobName.FacialRecognitionQueueAll: { return { jobId: JobName.FacialRecognitionQueueAll }; } - case JobName.QUEUE_OCR: { - return { jobId: JobName.QUEUE_OCR }; - } default: { return null; } diff --git a/server/src/repositories/machine-learning.repository.ts b/server/src/repositories/machine-learning.repository.ts index 75180cb1ff..60a2011653 100644 --- a/server/src/repositories/machine-learning.repository.ts +++ b/server/src/repositories/machine-learning.repository.ts @@ -218,6 +218,17 @@ export class MachineLearningRepository { return response[ModelTask.SEARCH]; } + async ocr(imagePath: string, { modelName, minDetectionScore, minRecognitionScore, maxResolution }: OcrOptions) { + const request = { + [ModelTask.OCR]: { + [ModelType.DETECTION]: { modelName, options: { minScore: minDetectionScore, maxResolution } }, + [ModelType.RECOGNITION]: { modelName, options: { minScore: minRecognitionScore } }, + }, + }; + const response = await this.predict({ imagePath }, request); + return response[ModelTask.OCR]; + } + private async getFormData(payload: ModelPayload, config: MachineLearningRequest): Promise { const formData = new FormData(); formData.append('entries', JSON.stringify(config)); @@ -233,19 +244,4 @@ export class MachineLearningRepository { return formData; } - - async ocr( - urls: string[], - imagePath: string, - { modelName, minDetectionScore, minRecognitionScore, maxResolution }: OcrOptions, - ) { - const request = { - [ModelTask.OCR]: { - [ModelType.DETECTION]: { modelName, options: { minScore: minDetectionScore, maxResolution } }, - [ModelType.RECOGNITION]: { modelName, options: { minScore: minRecognitionScore } }, - }, - }; - const response = await this.predict({ imagePath }, request); - return response[ModelTask.OCR]; - } } diff --git a/server/src/services/job.service.spec.ts b/server/src/services/job.service.spec.ts index 6b85cdff4d..432a9b6dbf 100644 --- a/server/src/services/job.service.spec.ts +++ b/server/src/services/job.service.spec.ts @@ -24,7 +24,7 @@ describe(JobService.name, () => { it('should update concurrency', () => { sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig }); - expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15); + expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(16); expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1); expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1); expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5); @@ -98,6 +98,7 @@ describe(JobService.name, () => { [QueueName.Library]: expectedJobStatus, [QueueName.Notification]: expectedJobStatus, [QueueName.BackupDatabase]: expectedJobStatus, + [QueueName.Ocr]: expectedJobStatus, }); }); }); @@ -270,12 +271,12 @@ describe(JobService.name, () => { }, { item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } }, - jobs: [JobName.SmartSearch, JobName.AssetDetectFaces], + jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr], stub: [assetStub.livePhotoStillAsset], }, { item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } }, - jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.AssetEncodeVideo], + jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr, JobName.AssetEncodeVideo], stub: [assetStub.video], }, { diff --git a/server/src/services/job.service.ts b/server/src/services/job.service.ts index 81a14f41a8..c3b6912d89 100644 --- a/server/src/services/job.service.ts +++ b/server/src/services/job.service.ts @@ -237,12 +237,8 @@ export class JobService extends BaseService { return this.jobRepository.queue({ name: JobName.DatabaseBackup, data: { force } }); } - case QueueName.OCR: { - return this.jobRepository.queue({ name: JobName.QUEUE_OCR, data: { force } }); - } - - case QueueName.OCR: { - return this.jobRepository.queue({ name: JobName.QUEUE_OCR, data: { force } }); + case QueueName.Ocr: { + return this.jobRepository.queue({ name: JobName.OcrQueueAll, data: { force } }); } default: { @@ -361,7 +357,7 @@ export class JobService extends BaseService { const jobs: JobItem[] = [ { name: JobName.SmartSearch, data: item.data }, { name: JobName.AssetDetectFaces, data: item.data }, - { name: JobName.OCR, data: item.data }, + { name: JobName.Ocr, data: item.data }, ]; if (asset.type === AssetType.Video) { diff --git a/server/src/services/ocr.service.spec.ts b/server/src/services/ocr.service.spec.ts index e69de29bb2..6eedba1a5f 100644 --- a/server/src/services/ocr.service.spec.ts +++ b/server/src/services/ocr.service.spec.ts @@ -0,0 +1,177 @@ +import { AssetVisibility, ImmichWorker, JobName, JobStatus } from 'src/enum'; +import { OcrService } from 'src/services/ocr.service'; +import { assetStub } from 'test/fixtures/asset.stub'; +import { systemConfigStub } from 'test/fixtures/system-config.stub'; +import { makeStream, newTestService, ServiceMocks } from 'test/utils'; + +describe(OcrService.name, () => { + let sut: OcrService; + let mocks: ServiceMocks; + + beforeEach(() => { + ({ sut, mocks } = newTestService(OcrService)); + + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); + }); + + it('should work', () => { + expect(sut).toBeDefined(); + }); + + describe('handleQueueOcr', () => { + it('should do nothing if machine learning is disabled', async () => { + mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); + + await sut.handleQueueOcr({ force: false }); + + expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); + }); + + it('should queue the assets without ocr', async () => { + mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image])); + + await sut.handleQueueOcr({ force: false }); + + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]); + expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(false); + }); + + it('should queue all the assets', async () => { + mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image])); + + await sut.handleQueueOcr({ force: true }); + + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]); + expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(true); + }); + }); + + describe('handleOcr', () => { + it('should do nothing if machine learning is disabled', async () => { + mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); + + expect(await sut.handleOcr({ id: '123' })).toEqual(JobStatus.Skipped); + + expect(mocks.asset.getByIds).not.toHaveBeenCalled(); + expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled(); + }); + + it('should skip assets without a resize path', async () => { + mocks.assetJob.getForOcr.mockResolvedValue({ visibility: AssetVisibility.Timeline, previewFile: null }); + + expect(await sut.handleOcr({ id: assetStub.noResizePath.id })).toEqual(JobStatus.Failed); + + expect(mocks.ocr.upsert).not.toHaveBeenCalled(); + expect(mocks.machineLearning.ocr).not.toHaveBeenCalled(); + }); + + it('should save the returned objects', async () => { + mocks.machineLearning.ocr.mockResolvedValue({ + box: [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160], + boxScore: [0.9, 0.8], + text: ['One Two Three', 'Four Five'], + textScore: [0.95, 0.85], + }); + mocks.assetJob.getForOcr.mockResolvedValue({ + visibility: AssetVisibility.Timeline, + previewFile: assetStub.image.files[1].path, + }); + + expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success); + + expect(mocks.machineLearning.ocr).toHaveBeenCalledWith( + '/uploads/user-id/thumbs/path.jpg', + expect.objectContaining({ + modelName: 'PP-OCRv5_mobile', + minDetectionScore: 0.5, + minRecognitionScore: 0.8, + maxResolution: 736, + }), + ); + expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, [ + { + assetId: assetStub.image.id, + boxScore: 0.9, + text: 'One Two Three', + textScore: 0.95, + x1: 10, + y1: 20, + x2: 30, + y2: 40, + x3: 50, + y3: 60, + x4: 70, + y4: 80, + }, + { + assetId: assetStub.image.id, + boxScore: 0.8, + text: 'Four Five', + textScore: 0.85, + x1: 90, + y1: 100, + x2: 110, + y2: 120, + x3: 130, + y3: 140, + x4: 150, + y4: 160, + }, + ]); + }); + + it('should apply config settings', async () => { + mocks.systemMetadata.get.mockResolvedValue({ + machineLearning: { + enabled: true, + ocr: { + modelName: 'PP-OCRv5_server', + enabled: true, + minDetectionScore: 0.8, + minRecognitionScore: 0.9, + maxResolution: 1500, + }, + }, + }); + mocks.machineLearning.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] }); + mocks.assetJob.getForOcr.mockResolvedValue({ + visibility: AssetVisibility.Timeline, + previewFile: assetStub.image.files[1].path, + }); + + expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success); + + expect(mocks.machineLearning.ocr).toHaveBeenCalledWith( + '/uploads/user-id/thumbs/path.jpg', + expect.objectContaining({ + modelName: 'PP-OCRv5_server', + minDetectionScore: 0.8, + minRecognitionScore: 0.9, + maxResolution: 1500, + }), + ); + expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, []); + }); + + it('should skip invisible assets', async () => { + mocks.assetJob.getForOcr.mockResolvedValue({ + visibility: AssetVisibility.Hidden, + previewFile: assetStub.image.files[1].path, + }); + + expect(await sut.handleOcr({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped); + + expect(mocks.machineLearning.ocr).not.toHaveBeenCalled(); + expect(mocks.ocr.upsert).not.toHaveBeenCalled(); + }); + + it('should fail if asset could not be found', async () => { + mocks.assetJob.getForOcr.mockResolvedValue(void 0); + + expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Failed); + + expect(mocks.machineLearning.ocr).not.toHaveBeenCalled(); + expect(mocks.ocr.upsert).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/server/src/services/ocr.service.ts b/server/src/services/ocr.service.ts index 86a86fd88f..cba57e5bc7 100644 --- a/server/src/services/ocr.service.ts +++ b/server/src/services/ocr.service.ts @@ -9,8 +9,8 @@ import { isOcrEnabled } from 'src/utils/misc'; @Injectable() export class OcrService extends BaseService { - @OnJob({ name: JobName.QUEUE_OCR, queue: QueueName.OCR }) - async handleQueueOcr({ force, nightly }: JobOf): Promise { + @OnJob({ name: JobName.OcrQueueAll, queue: QueueName.Ocr }) + async handleQueueOcr({ force }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: false }); if (!isOcrEnabled(machineLearning)) { return JobStatus.Skipped; @@ -24,7 +24,7 @@ export class OcrService extends BaseService { const assets = this.assetJobRepository.streamForOcrJob(force); for await (const asset of assets) { - jobs.push({ name: JobName.OCR, data: { id: asset.id } }); + jobs.push({ name: JobName.Ocr, data: { id: asset.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(jobs); @@ -36,8 +36,8 @@ export class OcrService extends BaseService { return JobStatus.Success; } - @OnJob({ name: JobName.OCR, queue: QueueName.OCR }) - async handleOcr({ id }: JobOf): Promise { + @OnJob({ name: JobName.Ocr, queue: QueueName.Ocr }) + async handleOcr({ id }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: true }); if (!isOcrEnabled(machineLearning)) { return JobStatus.Skipped; @@ -52,11 +52,7 @@ export class OcrService extends BaseService { return JobStatus.Skipped; } - const ocrResults = await this.machineLearningRepository.ocr( - machineLearning.urls, - asset.previewFile, - machineLearning.ocr, - ); + const ocrResults = await this.machineLearningRepository.ocr(asset.previewFile, machineLearning.ocr); await this.ocrRepository.upsert(id, this.parseOcrResults(id, ocrResults)); @@ -66,7 +62,7 @@ export class OcrService extends BaseService { return JobStatus.Success; } - parseOcrResults(id: string, { box, boxScore, text, textScore }: OCR) { + private parseOcrResults(id: string, { box, boxScore, text, textScore }: OCR) { const ocrDataList = []; for (let i = 0; i < text.length; i++) { const boxOffset = i * 8; diff --git a/server/src/services/server.service.spec.ts b/server/src/services/server.service.spec.ts index a96a9925db..8e39f09c62 100644 --- a/server/src/services/server.service.spec.ts +++ b/server/src/services/server.service.spec.ts @@ -141,6 +141,7 @@ describe(ServerService.name, () => { reverseGeocoding: true, oauth: false, oauthAutoLaunch: false, + ocr: true, passwordLogin: true, search: true, sidecar: true, diff --git a/server/src/services/system-config.service.spec.ts b/server/src/services/system-config.service.spec.ts index 5a9c7f4df3..a72c5be7e2 100644 --- a/server/src/services/system-config.service.spec.ts +++ b/server/src/services/system-config.service.spec.ts @@ -39,6 +39,7 @@ const updatedConfig = Object.freeze({ [QueueName.ThumbnailGeneration]: { concurrency: 3 }, [QueueName.VideoConversion]: { concurrency: 1 }, [QueueName.Notification]: { concurrency: 5 }, + [QueueName.Ocr]: { concurrency: 1 }, }, backup: { database: { @@ -102,6 +103,13 @@ const updatedConfig = Object.freeze({ maxDistance: 0.5, minFaces: 3, }, + ocr: { + enabled: true, + modelName: 'PP-OCRv5_mobile', + minDetectionScore: 0.5, + minRecognitionScore: 0.8, + maxResolution: 736, + }, }, map: { enabled: true, diff --git a/server/src/types.ts b/server/src/types.ts index 066b39ecbc..66045521d0 100644 --- a/server/src/types.ts +++ b/server/src/types.ts @@ -373,8 +373,8 @@ export type JobItem = | { name: JobName.VersionCheck; data: IBaseJob } // OCR - | { name: JobName.QUEUE_OCR; data: INightlyJob } - | { name: JobName.OCR; data: IEntityJob }; + | { name: JobName.OcrQueueAll; data: IBaseJob } + | { name: JobName.Ocr; data: IEntityJob }; export type VectorExtension = (typeof VECTOR_EXTENSIONS)[number]; diff --git a/server/test/medium/specs/services/ocr.service.spec.ts b/server/test/medium/specs/services/ocr.service.spec.ts new file mode 100644 index 0000000000..cf51d980ec --- /dev/null +++ b/server/test/medium/specs/services/ocr.service.spec.ts @@ -0,0 +1,174 @@ +import { Kysely } from 'kysely'; +import { AssetJobRepository } from 'src/repositories/asset-job.repository'; +import { AssetRepository } from 'src/repositories/asset.repository'; +import { JobRepository } from 'src/repositories/job.repository'; +import { LoggingRepository } from 'src/repositories/logging.repository'; +import { MachineLearningRepository } from 'src/repositories/machine-learning.repository'; +import { OcrRepository } from 'src/repositories/ocr.repository'; +import { DB } from 'src/schema'; +import { OcrService } from 'src/services/ocr.service'; +import { newMediumService } from 'test/medium.factory'; +import { getKyselyDB } from 'test/utils'; + +let defaultDatabase: Kysely; + +const setup = (db?: Kysely) => { + return newMediumService(OcrService, { + database: db || defaultDatabase, + real: [AssetRepository, AssetJobRepository, JobRepository, OcrRepository], + mock: [LoggingRepository, MachineLearningRepository], + }); +}; + +beforeAll(async () => { + defaultDatabase = await getKyselyDB(); +}); + +describe(OcrService.name, () => { + it('should work', () => { + const { sut } = setup(); + expect(sut).toBeDefined(); + }); + + it('should parse asset', async () => { + const { sut, ctx } = setup(); + const { user } = await ctx.newUser(); + const { asset } = await ctx.newAsset({ ownerId: user.id }); + + const machineLearningMock = ctx.getMock(MachineLearningRepository); + machineLearningMock.ocr.mockResolvedValue({ + box: [10, 10, 50, 10, 50, 50, 10, 50], + boxScore: [0.99], + text: ['Test OCR'], + textScore: [0.95], + }); + + await expect(sut.handleOcr({ id: asset.id })).resolves.toBe('Success'); + + const ocrRepository = ctx.get(OcrRepository); + await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([ + { + assetId: asset.id, + boxScore: 0.99, + id: expect.any(String), + text: 'Test OCR', + textScore: 0.95, + x1: 10, + y1: 10, + x2: 50, + y2: 10, + x3: 50, + y3: 50, + x4: 10, + y4: 50, + }, + ]); + await expect( + ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(), + ).resolves.toEqual({ + assetId: asset.id, + text: 'Test OCR', + }); + }); + + it('should handle multiple boxes', async () => { + const { sut, ctx } = setup(); + const { user } = await ctx.newUser(); + const { asset } = await ctx.newAsset({ ownerId: user.id }); + + const machineLearningMock = ctx.getMock(MachineLearningRepository); + machineLearningMock.ocr.mockResolvedValue({ + box: Array.from({ length: 8 * 10 }, (_, i) => i), + boxScore: [0.7, 0.67, 0.65, 0.62, 0.6], + text: ['One', 'Two', 'Three', 'Four', 'Five'], + textScore: [0.9, 0.89, 0.88, 0.87, 0.86], + }); + + await expect(sut.handleOcr({ id: asset.id })).resolves.toBe('Success'); + + const ocrRepository = ctx.get(OcrRepository); + await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([ + { + assetId: asset.id, + boxScore: 0.7, + id: expect.any(String), + text: 'One', + textScore: 0.9, + x1: 0, + y1: 1, + x2: 2, + y2: 3, + x3: 4, + y3: 5, + x4: 6, + y4: 7, + }, + { + assetId: asset.id, + boxScore: 0.67, + id: expect.any(String), + text: 'Two', + textScore: 0.89, + x1: 8, + y1: 9, + x2: 10, + y2: 11, + x3: 12, + y3: 13, + x4: 14, + y4: 15, + }, + { + assetId: asset.id, + boxScore: 0.65, + id: expect.any(String), + text: 'Three', + textScore: 0.88, + x1: 16, + y1: 17, + x2: 18, + y2: 19, + x3: 20, + y3: 21, + x4: 22, + y4: 23, + }, + { + assetId: asset.id, + boxScore: 0.62, + id: expect.any(String), + text: 'Four', + textScore: 0.87, + x1: 24, + y1: 25, + x2: 26, + y2: 27, + x3: 28, + y3: 29, + x4: 30, + y4: 31, + }, + { + assetId: asset.id, + boxScore: 0.6, + id: expect.any(String), + text: 'Five', + textScore: 0.86, + x1: 32, + y1: 33, + x2: 34, + y2: 35, + x3: 36, + y3: 37, + x4: 38, + y4: 39, + }, + ]); + await expect( + ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(), + ).resolves.toEqual({ + assetId: asset.id, + text: 'One Two Three Four Fivee', + }); + }); +}); diff --git a/server/test/utils.ts b/server/test/utils.ts index c23341d64c..a7fa05c173 100644 --- a/server/test/utils.ts +++ b/server/test/utils.ts @@ -41,6 +41,7 @@ import { MetadataRepository } from 'src/repositories/metadata.repository'; import { MoveRepository } from 'src/repositories/move.repository'; import { NotificationRepository } from 'src/repositories/notification.repository'; import { OAuthRepository } from 'src/repositories/oauth.repository'; +import { OcrRepository } from 'src/repositories/ocr.repository'; import { PartnerRepository } from 'src/repositories/partner.repository'; import { PersonRepository } from 'src/repositories/person.repository'; import { ProcessRepository } from 'src/repositories/process.repository'; @@ -228,6 +229,7 @@ export type ServiceOverrides = { metadata: MetadataRepository; move: MoveRepository; notification: NotificationRepository; + ocr: OcrRepository; oauth: OAuthRepository; partner: PartnerRepository; person: PersonRepository; @@ -298,6 +300,7 @@ export const newTestService = ( metadata: newMetadataRepositoryMock(), move: automock(MoveRepository, { strict: false }), notification: automock(NotificationRepository), + ocr: automock(OcrRepository, { strict: false }), oauth: automock(OAuthRepository, { args: [loggerMock] }), partner: automock(PartnerRepository, { strict: false }), person: automock(PersonRepository, { strict: false }), @@ -350,6 +353,7 @@ export const newTestService = ( overrides.move || (mocks.move as As), overrides.notification || (mocks.notification as As), overrides.oauth || (mocks.oauth as As), + overrides.ocr || (mocks.ocr as As), overrides.partner || (mocks.partner as As), overrides.person || (mocks.person as As), overrides.process || (mocks.process as As),