2024-10-02 10:54:35 -04:00
|
|
|
import { Injectable } from '@nestjs/common';
|
2025-05-06 14:18:22 -04:00
|
|
|
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
2025-04-16 20:08:49 +02:00
|
|
|
import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
|
|
|
|
|
import { Exif } from 'src/database';
|
2024-12-10 12:11:19 -05:00
|
|
|
import { OnEvent, OnJob } from 'src/decorators';
|
2024-09-27 10:28:42 -04:00
|
|
|
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
2024-03-20 19:32:04 +01:00
|
|
|
import {
|
2024-09-27 10:28:42 -04:00
|
|
|
AssetFileType,
|
|
|
|
|
AssetPathType,
|
|
|
|
|
AssetType,
|
2025-05-06 12:12:48 -05:00
|
|
|
AssetVisibility,
|
2024-03-20 19:32:04 +01:00
|
|
|
AudioCodec,
|
|
|
|
|
Colorspace,
|
2025-05-06 14:18:22 -04:00
|
|
|
ImageFormat,
|
2025-02-11 17:15:56 -05:00
|
|
|
JobName,
|
|
|
|
|
JobStatus,
|
2024-09-27 18:10:39 -04:00
|
|
|
LogLevel,
|
2025-02-11 17:15:56 -05:00
|
|
|
QueueName,
|
2025-04-29 06:18:46 +08:00
|
|
|
RawExtractedFormat,
|
2024-09-27 10:28:42 -04:00
|
|
|
StorageFolder,
|
2025-07-15 14:50:13 -04:00
|
|
|
TranscodeHardwareAcceleration,
|
2024-03-20 19:32:04 +01:00
|
|
|
TranscodePolicy,
|
|
|
|
|
TranscodeTarget,
|
|
|
|
|
VideoCodec,
|
2024-07-21 17:14:23 -04:00
|
|
|
VideoContainer,
|
2024-09-27 10:28:42 -04:00
|
|
|
} from 'src/enum';
|
2025-05-06 14:18:22 -04:00
|
|
|
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
2024-09-30 17:31:21 -04:00
|
|
|
import { BaseService } from 'src/services/base.service';
|
2025-04-01 01:24:28 +08:00
|
|
|
import {
|
|
|
|
|
AudioStreamInfo,
|
2025-05-06 14:18:22 -04:00
|
|
|
CropOptions,
|
2025-04-01 01:24:28 +08:00
|
|
|
DecodeToBufferOptions,
|
2025-05-06 14:18:22 -04:00
|
|
|
ImageDimensions,
|
2025-04-01 01:24:28 +08:00
|
|
|
JobItem,
|
|
|
|
|
JobOf,
|
|
|
|
|
VideoFormat,
|
|
|
|
|
VideoInterfaces,
|
|
|
|
|
VideoStreamInfo,
|
|
|
|
|
} from 'src/types';
|
2024-08-19 20:03:33 -04:00
|
|
|
import { getAssetFiles } from 'src/utils/asset.util';
|
2024-05-27 15:20:07 -04:00
|
|
|
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
2024-04-19 11:50:13 -04:00
|
|
|
import { mimeTypes } from 'src/utils/mime-types';
|
2025-05-06 14:18:22 -04:00
|
|
|
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
2025-06-12 14:23:02 -04:00
|
|
|
interface UpsertFileOptions {
|
|
|
|
|
assetId: string;
|
|
|
|
|
type: AssetFileType;
|
|
|
|
|
path: string;
|
|
|
|
|
}
|
2023-09-08 08:49:43 +02:00
|
|
|
|
2023-02-25 09:12:03 -05:00
|
|
|
@Injectable()
|
2024-09-30 17:31:21 -04:00
|
|
|
export class MediaService extends BaseService {
|
2024-12-10 12:11:19 -05:00
|
|
|
videoInterfaces: VideoInterfaces = { dri: [], mali: false };
|
|
|
|
|
|
2025-07-15 13:41:19 -04:00
|
|
|
@OnEvent({ name: 'AppBootstrap' })
|
2024-12-10 12:11:19 -05:00
|
|
|
async onBootstrap() {
|
|
|
|
|
const [dri, mali] = await Promise.all([this.getDevices(), this.hasMaliOpenCL()]);
|
|
|
|
|
this.videoInterfaces = { dri, mali };
|
|
|
|
|
}
|
2023-02-25 09:12:03 -05:00
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.AssetGenerateThumbnailsQueueAll, queue: QueueName.ThumbnailGeneration })
|
|
|
|
|
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.AssetGenerateThumbnailsQueueAll>): Promise<JobStatus> {
|
2025-04-30 10:52:51 -04:00
|
|
|
let jobs: JobItem[] = [];
|
|
|
|
|
|
|
|
|
|
const queueAll = async () => {
|
|
|
|
|
await this.jobRepository.queueAll(jobs);
|
|
|
|
|
jobs = [];
|
|
|
|
|
};
|
|
|
|
|
|
2025-04-15 19:53:28 +02:00
|
|
|
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
|
|
|
|
|
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
|
2023-03-20 11:55:28 -04:00
|
|
|
|
2025-04-15 19:53:28 +02:00
|
|
|
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
|
2025-07-15 18:39:00 -04:00
|
|
|
jobs.push({ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } });
|
2025-04-30 10:52:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
|
|
|
|
await queueAll();
|
2023-03-20 11:55:28 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-30 10:52:51 -04:00
|
|
|
await queueAll();
|
2024-01-18 00:08:48 -05:00
|
|
|
|
2025-01-21 19:12:28 +01:00
|
|
|
const people = this.personRepository.getAll(force ? undefined : { thumbnailPath: '' });
|
2024-01-18 00:08:48 -05:00
|
|
|
|
2025-01-21 19:12:28 +01:00
|
|
|
for await (const person of people) {
|
|
|
|
|
if (!person.faceAssetId) {
|
|
|
|
|
const face = await this.personRepository.getRandomFace(person.id);
|
|
|
|
|
if (!face) {
|
|
|
|
|
continue;
|
2023-09-26 03:03:22 -04:00
|
|
|
}
|
|
|
|
|
|
2025-01-21 19:12:28 +01:00
|
|
|
await this.personRepository.update({ id: person.id, faceAssetId: face.id });
|
2023-09-08 08:49:43 +02:00
|
|
|
}
|
2025-01-21 19:12:28 +01:00
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
jobs.push({ name: JobName.PersonGenerateThumbnail, data: { id: person.id } });
|
2025-04-30 10:52:51 -04:00
|
|
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
|
|
|
|
await queueAll();
|
|
|
|
|
}
|
2023-09-08 08:49:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-04-30 10:52:51 -04:00
|
|
|
await queueAll();
|
2024-01-01 15:45:42 -05:00
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-05-26 15:43:24 -04:00
|
|
|
}
|
2023-04-11 20:28:25 -05:00
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.FileMigrationQueueAll, queue: QueueName.Migration })
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleQueueMigration(): Promise<JobStatus> {
|
2025-07-15 14:50:13 -04:00
|
|
|
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.Migration);
|
2023-09-25 17:07:21 +02:00
|
|
|
if (active === 1 && waiting === 0) {
|
2025-07-15 14:50:13 -04:00
|
|
|
await this.storageCore.removeEmptyDirs(StorageFolder.Thumbnails);
|
|
|
|
|
await this.storageCore.removeEmptyDirs(StorageFolder.EncodedVideo);
|
2023-09-25 17:07:21 +02:00
|
|
|
}
|
|
|
|
|
|
2025-04-30 12:23:13 -04:00
|
|
|
let jobs: JobItem[] = [];
|
|
|
|
|
const assets = this.assetJobRepository.streamForMigrationJob();
|
|
|
|
|
for await (const asset of assets) {
|
2025-07-15 18:39:00 -04:00
|
|
|
jobs.push({ name: JobName.AssetFileMigration, data: { id: asset.id } });
|
2025-04-30 12:23:13 -04:00
|
|
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
|
|
|
|
await this.jobRepository.queueAll(jobs);
|
|
|
|
|
jobs = [];
|
|
|
|
|
}
|
2023-09-25 17:07:21 +02:00
|
|
|
}
|
|
|
|
|
|
2025-04-30 12:23:13 -04:00
|
|
|
await this.jobRepository.queueAll(jobs);
|
|
|
|
|
jobs = [];
|
2023-09-25 17:07:21 +02:00
|
|
|
|
2025-01-21 19:12:28 +01:00
|
|
|
for await (const person of this.personRepository.getAll()) {
|
2025-07-15 18:39:00 -04:00
|
|
|
jobs.push({ name: JobName.PersonFileMigration, data: { id: person.id } });
|
2025-01-21 19:12:28 +01:00
|
|
|
|
|
|
|
|
if (jobs.length === JOBS_ASSET_PAGINATION_SIZE) {
|
|
|
|
|
await this.jobRepository.queueAll(jobs);
|
|
|
|
|
jobs = [];
|
|
|
|
|
}
|
2024-01-18 00:08:48 -05:00
|
|
|
}
|
|
|
|
|
|
2025-01-21 19:12:28 +01:00
|
|
|
await this.jobRepository.queueAll(jobs);
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-09-25 17:07:21 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.AssetFileMigration, queue: QueueName.Migration })
|
|
|
|
|
async handleAssetMigration({ id }: JobOf<JobName.AssetFileMigration>): Promise<JobStatus> {
|
2024-09-30 17:31:21 -04:00
|
|
|
const { image } = await this.getConfig({ withCache: true });
|
2025-04-15 21:49:15 +02:00
|
|
|
const asset = await this.assetJobRepository.getForMigrationJob(id);
|
2023-09-25 17:07:21 +02:00
|
|
|
if (!asset) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2023-09-25 17:07:21 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
await this.storageCore.moveAssetImage(asset, AssetPathType.FullSize, image.fullsize.format);
|
|
|
|
|
await this.storageCore.moveAssetImage(asset, AssetPathType.Preview, image.preview.format);
|
|
|
|
|
await this.storageCore.moveAssetImage(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
2024-04-02 00:56:56 -04:00
|
|
|
await this.storageCore.moveAssetVideo(asset);
|
2023-09-25 17:07:21 +02:00
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-09-25 17:07:21 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.AssetGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
|
|
|
|
|
async handleGenerateThumbnails({ id }: JobOf<JobName.AssetGenerateThumbnails>): Promise<JobStatus> {
|
2025-04-16 20:08:49 +02:00
|
|
|
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
|
2023-04-11 20:28:25 -05:00
|
|
|
if (!asset) {
|
2024-09-28 13:47:24 -04:00
|
|
|
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2023-04-11 20:28:25 -05:00
|
|
|
}
|
2023-02-25 09:12:03 -05:00
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
if (asset.visibility === AssetVisibility.Hidden) {
|
2024-09-28 13:47:24 -04:00
|
|
|
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Skipped;
|
2024-04-18 21:37:55 -04:00
|
|
|
}
|
|
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
let generated: {
|
|
|
|
|
previewPath: string;
|
|
|
|
|
thumbnailPath: string;
|
|
|
|
|
fullsizePath?: string;
|
|
|
|
|
thumbhash: Buffer;
|
|
|
|
|
};
|
2025-07-15 14:50:13 -04:00
|
|
|
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
|
2024-09-28 13:47:24 -04:00
|
|
|
generated = await this.generateVideoThumbnails(asset);
|
2025-07-15 14:50:13 -04:00
|
|
|
} else if (asset.type === AssetType.Image) {
|
2024-10-22 22:34:44 +05:30
|
|
|
generated = await this.generateImageThumbnails(asset);
|
2024-09-28 13:47:24 -04:00
|
|
|
} else {
|
|
|
|
|
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Skipped;
|
2024-08-19 13:50:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
|
2024-09-28 13:47:24 -04:00
|
|
|
const toUpsert: UpsertFileOptions[] = [];
|
|
|
|
|
if (previewFile?.path !== generated.previewPath) {
|
2025-07-15 14:50:13 -04:00
|
|
|
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.Preview });
|
2024-04-27 18:43:05 -04:00
|
|
|
}
|
2024-08-19 13:50:00 -04:00
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
if (thumbnailFile?.path !== generated.thumbnailPath) {
|
2025-07-15 14:50:13 -04:00
|
|
|
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.Thumbnail });
|
2023-05-26 15:43:24 -04:00
|
|
|
}
|
2024-08-30 00:16:12 +02:00
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
|
2025-07-15 14:50:13 -04:00
|
|
|
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FullSize });
|
2025-04-01 01:24:28 +08:00
|
|
|
}
|
|
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
if (toUpsert.length > 0) {
|
|
|
|
|
await this.assetRepository.upsertFiles(toUpsert);
|
|
|
|
|
}
|
2023-02-25 09:12:03 -05:00
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
const pathsToDelete: string[] = [];
|
2024-09-28 13:47:24 -04:00
|
|
|
if (previewFile && previewFile.path !== generated.previewPath) {
|
|
|
|
|
this.logger.debug(`Deleting old preview for asset ${asset.id}`);
|
|
|
|
|
pathsToDelete.push(previewFile.path);
|
2023-02-25 09:12:03 -05:00
|
|
|
}
|
|
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
if (thumbnailFile && thumbnailFile.path !== generated.thumbnailPath) {
|
|
|
|
|
this.logger.debug(`Deleting old thumbnail for asset ${asset.id}`);
|
|
|
|
|
pathsToDelete.push(thumbnailFile.path);
|
2024-04-18 21:37:55 -04:00
|
|
|
}
|
|
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
if (fullsizeFile && fullsizeFile.path !== generated.fullsizePath) {
|
|
|
|
|
this.logger.debug(`Deleting old fullsize preview image for asset ${asset.id}`);
|
|
|
|
|
pathsToDelete.push(fullsizeFile.path);
|
|
|
|
|
if (!generated.fullsizePath) {
|
|
|
|
|
// did not generate a new fullsize image, delete the existing record
|
|
|
|
|
await this.assetRepository.deleteFiles([fullsizeFile]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
if (pathsToDelete.length > 0) {
|
|
|
|
|
await Promise.all(pathsToDelete.map((path) => this.storageRepository.unlink(path)));
|
2024-08-19 13:50:00 -04:00
|
|
|
}
|
|
|
|
|
|
2025-01-27 17:20:18 -05:00
|
|
|
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
|
2024-09-28 13:47:24 -04:00
|
|
|
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
|
2024-04-27 18:43:05 -04:00
|
|
|
}
|
2024-08-19 13:50:00 -04:00
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
|
2024-08-19 13:50:00 -04:00
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-02-25 09:12:03 -05:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
private async extractImage(originalPath: string, minSize: number) {
|
|
|
|
|
let extracted = await this.mediaRepository.extract(originalPath);
|
|
|
|
|
if (extracted && !(await this.shouldUseExtractedImage(extracted.buffer, minSize))) {
|
|
|
|
|
extracted = null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return extracted;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) {
|
|
|
|
|
const { image } = await this.getConfig({ withCache: true });
|
2025-07-15 14:50:13 -04:00
|
|
|
const colorspace = this.isSRGB(exifInfo) ? Colorspace.Srgb : image.colorspace;
|
2025-04-29 06:18:46 +08:00
|
|
|
const decodeOptions: DecodeToBufferOptions = {
|
|
|
|
|
colorspace,
|
|
|
|
|
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
|
|
|
|
size: targetSize,
|
|
|
|
|
orientation: exifInfo.orientation ? Number(exifInfo.orientation) : undefined,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const { info, data } = await this.mediaRepository.decodeImage(thumbSource, decodeOptions);
|
|
|
|
|
return { info, data, colorspace };
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-16 20:08:49 +02:00
|
|
|
private async generateImageThumbnails(asset: {
|
|
|
|
|
id: string;
|
|
|
|
|
ownerId: string;
|
|
|
|
|
originalFileName: string;
|
|
|
|
|
originalPath: string;
|
|
|
|
|
exifInfo: Exif;
|
|
|
|
|
}) {
|
2024-09-30 17:31:21 -04:00
|
|
|
const { image } = await this.getConfig({ withCache: true });
|
2025-07-15 14:50:13 -04:00
|
|
|
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
|
|
|
|
|
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
2024-09-28 13:47:24 -04:00
|
|
|
this.storageCore.ensureFolders(previewPath);
|
|
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
// Handle embedded preview extraction for RAW files
|
|
|
|
|
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
|
|
|
|
|
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
|
2025-10-06 16:38:41 +00:00
|
|
|
const generateFullsize =
|
|
|
|
|
(image.fullsize.enabled || asset.exifInfo.projectionType == 'EQUIRECTANGULAR') &&
|
|
|
|
|
!mimeTypes.isWebSupportedImage(asset.originalPath);
|
2025-04-29 06:18:46 +08:00
|
|
|
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
|
2025-04-01 01:24:28 +08:00
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
const { info, data, colorspace } = await this.decodeImage(
|
|
|
|
|
extracted ? extracted.buffer : asset.originalPath,
|
2025-05-01 04:33:18 +08:00
|
|
|
// only specify orientation to extracted images which don't have EXIF orientation data
|
|
|
|
|
// or it can double rotate the image
|
|
|
|
|
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
|
2025-04-29 06:18:46 +08:00
|
|
|
convertFullsize ? undefined : image.preview.size,
|
|
|
|
|
);
|
2025-04-01 01:24:28 +08:00
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
// generate final images
|
|
|
|
|
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info };
|
2025-04-01 01:24:28 +08:00
|
|
|
const promises = [
|
|
|
|
|
this.mediaRepository.generateThumbhash(data, thumbnailOptions),
|
|
|
|
|
this.mediaRepository.generateThumbnail(data, { ...image.thumbnail, ...thumbnailOptions }, thumbnailPath),
|
|
|
|
|
this.mediaRepository.generateThumbnail(data, { ...image.preview, ...thumbnailOptions }, previewPath),
|
|
|
|
|
];
|
|
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
let fullsizePath: string | undefined;
|
|
|
|
|
|
|
|
|
|
if (convertFullsize) {
|
|
|
|
|
// convert a new fullsize image from the same source as the thumbnail
|
2025-07-15 14:50:13 -04:00
|
|
|
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, image.fullsize.format);
|
2025-04-29 06:18:46 +08:00
|
|
|
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
|
2025-04-01 01:24:28 +08:00
|
|
|
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
|
2025-07-15 14:50:13 -04:00
|
|
|
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) {
|
|
|
|
|
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format);
|
2025-04-29 06:18:46 +08:00
|
|
|
this.storageCore.ensureFolders(fullsizePath);
|
|
|
|
|
|
|
|
|
|
// Write the buffer to disk with essential EXIF data
|
|
|
|
|
await this.storageRepository.createOrOverwriteFile(fullsizePath, extracted.buffer);
|
|
|
|
|
await this.mediaRepository.writeExif(
|
|
|
|
|
{
|
|
|
|
|
orientation: asset.exifInfo.orientation,
|
|
|
|
|
colorspace: asset.exifInfo.colorspace,
|
|
|
|
|
},
|
|
|
|
|
fullsizePath,
|
|
|
|
|
);
|
2025-04-01 01:24:28 +08:00
|
|
|
}
|
2025-04-29 06:18:46 +08:00
|
|
|
|
2025-04-01 01:24:28 +08:00
|
|
|
const outputs = await Promise.all(promises);
|
|
|
|
|
|
|
|
|
|
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
|
2024-09-28 13:47:24 -04:00
|
|
|
}
|
2024-04-18 21:37:55 -04:00
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.PersonGenerateThumbnail, queue: QueueName.ThumbnailGeneration })
|
|
|
|
|
async handleGeneratePersonThumbnail({ id }: JobOf<JobName.PersonGenerateThumbnail>): Promise<JobStatus> {
|
2025-05-06 14:18:22 -04:00
|
|
|
const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
|
|
|
|
|
if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Skipped;
|
2025-05-06 14:18:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
|
|
|
|
|
if (!data) {
|
|
|
|
|
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2025-05-06 14:18:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data;
|
|
|
|
|
let inputImage: string | Buffer;
|
2025-07-15 14:50:13 -04:00
|
|
|
if (data.type === AssetType.Video) {
|
2025-05-06 14:18:22 -04:00
|
|
|
if (!previewPath) {
|
|
|
|
|
this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2025-05-06 14:18:22 -04:00
|
|
|
}
|
|
|
|
|
inputImage = previewPath;
|
2025-05-21 12:15:30 -04:00
|
|
|
} else if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) {
|
2025-05-06 14:18:22 -04:00
|
|
|
const extracted = await this.extractImage(originalPath, image.preview.size);
|
|
|
|
|
inputImage = extracted ? extracted.buffer : originalPath;
|
|
|
|
|
} else {
|
|
|
|
|
inputImage = originalPath;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const { data: decodedImage, info } = await this.mediaRepository.decodeImage(inputImage, {
|
|
|
|
|
colorspace: image.colorspace,
|
|
|
|
|
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
|
|
|
|
// if this is an extracted image, it may not have orientation metadata
|
|
|
|
|
orientation: Buffer.isBuffer(inputImage) && exifOrientation ? Number(exifOrientation) : undefined,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
|
|
|
|
|
this.storageCore.ensureFolders(thumbnailPath);
|
|
|
|
|
|
|
|
|
|
const thumbnailOptions = {
|
|
|
|
|
colorspace: image.colorspace,
|
2025-07-15 14:50:13 -04:00
|
|
|
format: ImageFormat.Jpeg,
|
2025-05-06 14:18:22 -04:00
|
|
|
raw: info,
|
|
|
|
|
quality: image.thumbnail.quality,
|
|
|
|
|
crop: this.getCrop(
|
|
|
|
|
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
|
|
|
|
|
{ x1, y1, x2, y2 },
|
|
|
|
|
),
|
|
|
|
|
processInvalidImages: false,
|
|
|
|
|
size: FACE_THUMBNAIL_SIZE,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
|
|
|
|
|
await this.personRepository.update({ id, thumbnailPath });
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2025-05-06 14:18:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
|
|
|
|
|
// face bounding boxes can spill outside the image dimensions
|
|
|
|
|
const clampedX1 = clamp(x1, 0, dims.old.width);
|
|
|
|
|
const clampedY1 = clamp(y1, 0, dims.old.height);
|
|
|
|
|
const clampedX2 = clamp(x2, 0, dims.old.width);
|
|
|
|
|
const clampedY2 = clamp(y2, 0, dims.old.height);
|
|
|
|
|
|
|
|
|
|
const widthScale = dims.new.width / dims.old.width;
|
|
|
|
|
const heightScale = dims.new.height / dims.old.height;
|
|
|
|
|
|
|
|
|
|
const halfWidth = (widthScale * (clampedX2 - clampedX1)) / 2;
|
|
|
|
|
const halfHeight = (heightScale * (clampedY2 - clampedY1)) / 2;
|
|
|
|
|
|
|
|
|
|
const middleX = Math.round(widthScale * clampedX1 + halfWidth);
|
|
|
|
|
const middleY = Math.round(heightScale * clampedY1 + halfHeight);
|
|
|
|
|
|
|
|
|
|
// zoom out 10%
|
|
|
|
|
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
|
|
|
|
|
|
|
|
|
// get the longest distance from the center of the image without overflowing
|
|
|
|
|
const newHalfSize = Math.min(
|
|
|
|
|
middleX - Math.max(0, middleX - targetHalfSize),
|
|
|
|
|
middleY - Math.max(0, middleY - targetHalfSize),
|
|
|
|
|
Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX,
|
|
|
|
|
Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY,
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
left: middleX - newHalfSize,
|
|
|
|
|
top: middleY - newHalfSize,
|
|
|
|
|
width: newHalfSize * 2,
|
|
|
|
|
height: newHalfSize * 2,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-16 20:08:49 +02:00
|
|
|
private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
|
2024-09-30 17:31:21 -04:00
|
|
|
const { image, ffmpeg } = await this.getConfig({ withCache: true });
|
2025-07-15 14:50:13 -04:00
|
|
|
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
|
|
|
|
|
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
2024-09-28 13:47:24 -04:00
|
|
|
this.storageCore.ensureFolders(previewPath);
|
|
|
|
|
|
2024-11-14 02:07:04 -05:00
|
|
|
const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
|
2024-09-28 13:47:24 -04:00
|
|
|
const mainVideoStream = this.getMainStream(videoStreams);
|
|
|
|
|
if (!mainVideoStream) {
|
|
|
|
|
throw new Error(`No video streams found for asset ${asset.id}`);
|
2024-04-18 21:37:55 -04:00
|
|
|
}
|
2024-09-28 13:47:24 -04:00
|
|
|
const mainAudioStream = this.getMainStream(audioStreams);
|
2024-04-18 21:37:55 -04:00
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
|
|
|
|
|
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
|
2025-07-15 14:50:13 -04:00
|
|
|
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
|
2024-11-14 02:07:04 -05:00
|
|
|
const thumbnailOptions = thumbnailConfig.getCommand(
|
2025-07-15 14:50:13 -04:00
|
|
|
TranscodeTarget.Video,
|
2024-11-14 02:07:04 -05:00
|
|
|
mainVideoStream,
|
|
|
|
|
mainAudioStream,
|
|
|
|
|
format,
|
|
|
|
|
);
|
2024-11-20 12:20:54 -05:00
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
await this.mediaRepository.transcode(asset.originalPath, previewPath, previewOptions);
|
|
|
|
|
await this.mediaRepository.transcode(asset.originalPath, thumbnailPath, thumbnailOptions);
|
2023-06-17 23:22:31 -04:00
|
|
|
|
2024-09-28 13:47:24 -04:00
|
|
|
const thumbhash = await this.mediaRepository.generateThumbhash(previewPath, {
|
|
|
|
|
colorspace: image.colorspace,
|
|
|
|
|
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return { previewPath, thumbnailPath, thumbhash };
|
2023-06-17 23:22:31 -04:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
|
|
|
|
|
async handleQueueVideoConversion(job: JobOf<JobName.AssetEncodeVideoQueueAll>): Promise<JobStatus> {
|
2023-04-04 10:48:02 -04:00
|
|
|
const { force } = job;
|
|
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
let queue: { name: JobName.AssetEncodeVideo; data: { id: string } }[] = [];
|
2025-04-29 00:03:20 +02:00
|
|
|
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
|
2025-07-15 18:39:00 -04:00
|
|
|
queue.push({ name: JobName.AssetEncodeVideo, data: { id: asset.id } });
|
2023-05-26 15:43:24 -04:00
|
|
|
|
2025-04-29 00:03:20 +02:00
|
|
|
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
|
|
|
|
await this.jobRepository.queueAll(queue);
|
|
|
|
|
queue = [];
|
|
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
}
|
|
|
|
|
|
2025-04-29 00:03:20 +02:00
|
|
|
await this.jobRepository.queueAll(queue);
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-05-26 15:43:24 -04:00
|
|
|
}
|
2023-04-11 08:56:52 -05:00
|
|
|
|
2025-07-15 18:39:00 -04:00
|
|
|
@OnJob({ name: JobName.AssetEncodeVideo, queue: QueueName.VideoConversion })
|
|
|
|
|
async handleVideoConversion({ id }: JobOf<JobName.AssetEncodeVideo>): Promise<JobStatus> {
|
2025-04-29 00:03:20 +02:00
|
|
|
const asset = await this.assetJobRepository.getForVideoConversion(id);
|
|
|
|
|
if (!asset) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2023-04-11 08:56:52 -05:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2023-05-26 15:43:24 -04:00
|
|
|
const input = asset.originalPath;
|
2023-10-23 17:52:21 +02:00
|
|
|
const output = StorageCore.getEncodedVideoPath(asset);
|
2023-10-11 04:14:44 +02:00
|
|
|
this.storageCore.ensureFolders(output);
|
2023-05-26 15:43:24 -04:00
|
|
|
|
2024-09-27 18:10:39 -04:00
|
|
|
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
|
2025-07-15 14:50:13 -04:00
|
|
|
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
|
2024-09-27 18:10:39 -04:00
|
|
|
});
|
2024-12-10 12:11:19 -05:00
|
|
|
const videoStream = this.getMainStream(videoStreams);
|
|
|
|
|
const audioStream = this.getMainStream(audioStreams);
|
|
|
|
|
if (!videoStream || !format.formatName) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2023-05-26 15:43:24 -04:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2024-12-10 12:11:19 -05:00
|
|
|
if (!videoStream.height || !videoStream.width) {
|
2023-12-28 00:34:00 -05:00
|
|
|
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Failed;
|
2023-12-28 00:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
2024-12-10 12:11:19 -05:00
|
|
|
let { ffmpeg } = await this.getConfig({ withCache: true });
|
|
|
|
|
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
|
2025-07-15 14:50:13 -04:00
|
|
|
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
|
2023-12-28 00:34:00 -05:00
|
|
|
if (asset.encodedVideoPath) {
|
|
|
|
|
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
|
2025-07-15 18:39:00 -04:00
|
|
|
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: [asset.encodedVideoPath] } });
|
2024-03-19 22:42:10 -04:00
|
|
|
await this.assetRepository.update({ id: asset.id, encodedVideoPath: null });
|
2024-09-27 18:10:39 -04:00
|
|
|
} else {
|
|
|
|
|
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
|
2023-12-28 00:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Skipped;
|
2023-05-26 15:43:24 -04:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2024-12-10 12:11:19 -05:00
|
|
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
2025-07-15 14:50:13 -04:00
|
|
|
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
|
2024-11-22 16:08:49 +08:00
|
|
|
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
|
2024-09-27 18:10:39 -04:00
|
|
|
} else {
|
2024-11-22 16:08:49 +08:00
|
|
|
this.logger.log(
|
|
|
|
|
`Transcoding video ${asset.id} with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and${ffmpeg.accelDecode ? '' : ' software'} decoding`,
|
|
|
|
|
);
|
2024-09-27 18:10:39 -04:00
|
|
|
}
|
|
|
|
|
|
2023-08-01 21:56:10 -04:00
|
|
|
try {
|
2024-05-27 15:20:07 -04:00
|
|
|
await this.mediaRepository.transcode(input, output, command);
|
2024-09-27 18:10:39 -04:00
|
|
|
} catch (error: any) {
|
|
|
|
|
this.logger.error(`Error occurred during transcoding: ${error.message}`);
|
2025-07-15 14:50:13 -04:00
|
|
|
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
|
|
|
|
|
return JobStatus.Failed;
|
2023-08-01 21:56:10 -04:00
|
|
|
}
|
2024-11-22 16:08:49 +08:00
|
|
|
|
|
|
|
|
let partialFallbackSuccess = false;
|
|
|
|
|
if (ffmpeg.accelDecode) {
|
|
|
|
|
try {
|
|
|
|
|
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
|
2024-12-10 12:11:19 -05:00
|
|
|
ffmpeg = { ...ffmpeg, accelDecode: false };
|
|
|
|
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
2024-11-22 16:08:49 +08:00
|
|
|
await this.mediaRepository.transcode(input, output, command);
|
|
|
|
|
partialFallbackSuccess = true;
|
|
|
|
|
} catch (error: any) {
|
|
|
|
|
this.logger.error(`Error occurred during transcoding: ${error.message}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!partialFallbackSuccess) {
|
|
|
|
|
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
|
2025-07-15 14:50:13 -04:00
|
|
|
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
|
2024-12-10 12:11:19 -05:00
|
|
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
2024-11-22 16:08:49 +08:00
|
|
|
await this.mediaRepository.transcode(input, output, command);
|
|
|
|
|
}
|
2023-08-01 21:56:10 -04:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2024-02-14 11:24:39 -05:00
|
|
|
this.logger.log(`Successfully encoded ${asset.id}`);
|
2023-04-04 10:48:02 -04:00
|
|
|
|
2024-03-19 22:42:10 -04:00
|
|
|
await this.assetRepository.update({ id: asset.id, encodedVideoPath: output });
|
2023-05-26 15:43:24 -04:00
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return JobStatus.Success;
|
2023-04-04 10:48:02 -04:00
|
|
|
}
|
|
|
|
|
|
2023-08-29 05:01:42 -04:00
|
|
|
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
|
2024-10-19 00:50:08 -05:00
|
|
|
return streams
|
|
|
|
|
.filter((stream) => stream.codecName !== 'unknown')
|
2025-05-20 05:33:28 +08:00
|
|
|
.sort((stream1, stream2) => stream2.bitrate - stream1.bitrate)[0];
|
2023-04-06 04:32:59 +01:00
|
|
|
}
|
|
|
|
|
|
2024-02-14 11:24:39 -05:00
|
|
|
private getTranscodeTarget(
|
|
|
|
|
config: SystemConfigFFmpegDto,
|
2024-10-12 03:33:10 +02:00
|
|
|
videoStream: VideoStreamInfo,
|
2024-05-27 15:20:07 -04:00
|
|
|
audioStream?: AudioStreamInfo,
|
2024-02-14 11:24:39 -05:00
|
|
|
): TranscodeTarget {
|
|
|
|
|
const isAudioTranscodeRequired = this.isAudioTranscodeRequired(config, audioStream);
|
|
|
|
|
const isVideoTranscodeRequired = this.isVideoTranscodeRequired(config, videoStream);
|
|
|
|
|
|
|
|
|
|
if (isAudioTranscodeRequired && isVideoTranscodeRequired) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return TranscodeTarget.All;
|
2024-02-14 11:24:39 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (isAudioTranscodeRequired) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return TranscodeTarget.Audio;
|
2024-02-14 11:24:39 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (isVideoTranscodeRequired) {
|
2025-07-15 14:50:13 -04:00
|
|
|
return TranscodeTarget.Video;
|
2024-02-14 11:24:39 -05:00
|
|
|
}
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
return TranscodeTarget.None;
|
2024-02-14 11:24:39 -05:00
|
|
|
}
|
|
|
|
|
|
2024-05-27 15:20:07 -04:00
|
|
|
private isAudioTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream?: AudioStreamInfo): boolean {
|
|
|
|
|
if (!stream) {
|
2024-02-14 11:24:39 -05:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch (ffmpegConfig.transcode) {
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Disabled: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return false;
|
|
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.All: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return true;
|
|
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Required:
|
|
|
|
|
case TranscodePolicy.Optimal:
|
|
|
|
|
case TranscodePolicy.Bitrate: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return !ffmpegConfig.acceptedAudioCodecs.includes(stream.codecName as AudioCodec);
|
|
|
|
|
}
|
|
|
|
|
default: {
|
|
|
|
|
throw new Error(`Unsupported transcode policy: ${ffmpegConfig.transcode}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-10-12 03:33:10 +02:00
|
|
|
private isVideoTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream: VideoStreamInfo): boolean {
|
2023-06-10 00:15:12 -04:00
|
|
|
const scalingEnabled = ffmpegConfig.targetResolution !== 'original';
|
|
|
|
|
const targetRes = Number.parseInt(ffmpegConfig.targetResolution);
|
2024-02-14 11:24:39 -05:00
|
|
|
const isLargerThanTargetRes = scalingEnabled && Math.min(stream.height, stream.width) > targetRes;
|
|
|
|
|
const isLargerThanTargetBitrate = stream.bitrate > this.parseBitrateToBps(ffmpegConfig.maxBitrate);
|
|
|
|
|
|
|
|
|
|
const isTargetVideoCodec = ffmpegConfig.acceptedVideoCodecs.includes(stream.codecName as VideoCodec);
|
2024-10-31 20:48:23 -04:00
|
|
|
const isRequired = !isTargetVideoCodec || !stream.pixelFormat.endsWith('420p');
|
2023-04-04 10:48:02 -04:00
|
|
|
|
|
|
|
|
switch (ffmpegConfig.transcode) {
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Disabled: {
|
2023-04-06 04:32:59 +01:00
|
|
|
return false;
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.All: {
|
2023-04-04 10:48:02 -04:00
|
|
|
return true;
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Required: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return isRequired;
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Optimal: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return isRequired || isLargerThanTargetRes;
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
2025-07-15 14:50:13 -04:00
|
|
|
case TranscodePolicy.Bitrate: {
|
2024-02-14 11:24:39 -05:00
|
|
|
return isRequired || isLargerThanTargetBitrate;
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
|
|
|
|
default: {
|
2024-02-14 11:24:39 -05:00
|
|
|
throw new Error(`Unsupported transcode policy: ${ffmpegConfig.transcode}`);
|
2024-02-02 04:18:00 +01:00
|
|
|
}
|
2023-04-04 10:48:02 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-21 17:14:23 -04:00
|
|
|
private isRemuxRequired(ffmpegConfig: SystemConfigFFmpegDto, { formatName, formatLongName }: VideoFormat): boolean {
|
2025-07-15 14:50:13 -04:00
|
|
|
if (ffmpegConfig.transcode === TranscodePolicy.Disabled) {
|
2024-07-21 17:14:23 -04:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-15 14:50:13 -04:00
|
|
|
const name = formatLongName === 'QuickTime / MOV' ? VideoContainer.Mov : (formatName as VideoContainer);
|
|
|
|
|
return name !== VideoContainer.Mp4 && !ffmpegConfig.acceptedContainers.includes(name);
|
2024-07-21 17:14:23 -04:00
|
|
|
}
|
|
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
isSRGB({ colorspace, profileDescription, bitsPerSample }: Exif): boolean {
|
2023-09-25 19:18:47 -04:00
|
|
|
if (colorspace || profileDescription) {
|
|
|
|
|
return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb'));
|
|
|
|
|
} else if (bitsPerSample) {
|
|
|
|
|
// assume sRGB for 8-bit images with no color profile or colorspace metadata
|
|
|
|
|
return bitsPerSample === 8;
|
|
|
|
|
} else {
|
|
|
|
|
// assume sRGB for images with no relevant metadata
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-01-31 02:25:07 +01:00
|
|
|
|
2024-04-19 11:50:13 -04:00
|
|
|
private parseBitrateToBps(bitrateString: string) {
|
2024-01-31 02:25:07 +01:00
|
|
|
const bitrateValue = Number.parseInt(bitrateString);
|
|
|
|
|
|
2024-02-02 04:18:00 +01:00
|
|
|
if (Number.isNaN(bitrateValue)) {
|
2024-01-31 02:25:07 +01:00
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (bitrateString.toLowerCase().endsWith('k')) {
|
|
|
|
|
return bitrateValue * 1000; // Kilobits per second to bits per second
|
|
|
|
|
} else if (bitrateString.toLowerCase().endsWith('m')) {
|
2024-02-02 04:18:00 +01:00
|
|
|
return bitrateValue * 1_000_000; // Megabits per second to bits per second
|
2024-01-31 02:25:07 +01:00
|
|
|
} else {
|
|
|
|
|
return bitrateValue;
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-04-19 11:50:13 -04:00
|
|
|
|
2025-04-29 06:18:46 +08:00
|
|
|
private async shouldUseExtractedImage(extractedPathOrBuffer: string | Buffer, targetSize: number) {
|
|
|
|
|
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPathOrBuffer);
|
2024-04-19 11:50:13 -04:00
|
|
|
const extractedSize = Math.min(width, height);
|
|
|
|
|
return extractedSize >= targetSize;
|
|
|
|
|
}
|
2024-05-10 15:03:47 -04:00
|
|
|
|
|
|
|
|
private async getDevices() {
|
2024-12-10 12:11:19 -05:00
|
|
|
try {
|
|
|
|
|
return await this.storageRepository.readdir('/dev/dri');
|
|
|
|
|
} catch {
|
|
|
|
|
this.logger.debug('No devices found in /dev/dri.');
|
|
|
|
|
return [];
|
2024-05-10 15:03:47 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-05-27 15:20:07 -04:00
|
|
|
private async hasMaliOpenCL() {
|
2024-12-10 12:11:19 -05:00
|
|
|
try {
|
|
|
|
|
const [maliIcdStat, maliDeviceStat] = await Promise.all([
|
|
|
|
|
this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'),
|
|
|
|
|
this.storageRepository.stat('/dev/mali0'),
|
|
|
|
|
]);
|
|
|
|
|
return maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
|
|
|
|
|
} catch {
|
|
|
|
|
this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping');
|
|
|
|
|
return false;
|
2024-05-10 15:03:47 -04:00
|
|
|
}
|
|
|
|
|
}
|
2023-02-25 09:12:03 -05:00
|
|
|
}
|