mirror of
https://github.com/immich-app/immich
synced 2025-11-14 17:36:12 +00:00
feat: built-in automatic database backups (#13773)
This commit is contained in:
parent
30d42e571c
commit
7d933ec97a
41 changed files with 994 additions and 17 deletions
|
|
@ -15,6 +15,13 @@ import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
|
|||
import { ImageOptions } from 'src/interfaces/media.interface';
|
||||
|
||||
export interface SystemConfig {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: boolean;
|
||||
cronExpression: string;
|
||||
keepLastAmount: number;
|
||||
};
|
||||
};
|
||||
ffmpeg: {
|
||||
crf: number;
|
||||
threads: number;
|
||||
|
|
@ -150,6 +157,13 @@ export interface SystemConfig {
|
|||
}
|
||||
|
||||
export const defaults = Object.freeze<SystemConfig>({
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
|
||||
keepLastAmount: 14,
|
||||
},
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 23,
|
||||
threads: 0,
|
||||
|
|
|
|||
|
|
@ -97,4 +97,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
|
|||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.NOTIFICATION]!: JobStatusDto;
|
||||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.BACKUP_DATABASE]!: JobStatusDto;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,6 +46,30 @@ const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enab
|
|||
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
|
||||
const isOAuthOverrideEnabled = (config: SystemConfigOAuthDto) => config.mobileOverrideEnabled;
|
||||
const isEmailNotificationEnabled = (config: SystemConfigSmtpDto) => config.enabled;
|
||||
const isDatabaseBackupEnabled = (config: DatabaseBackupConfig) => config.enabled;
|
||||
|
||||
export class DatabaseBackupConfig {
|
||||
@ValidateBoolean()
|
||||
enabled!: boolean;
|
||||
|
||||
@ValidateIf(isDatabaseBackupEnabled)
|
||||
@IsNotEmpty()
|
||||
@Validate(CronValidator, { message: 'Invalid cron expression' })
|
||||
@IsString()
|
||||
cronExpression!: string;
|
||||
|
||||
@IsInt()
|
||||
@IsPositive()
|
||||
@IsNotEmpty()
|
||||
keepLastAmount!: number;
|
||||
}
|
||||
|
||||
export class SystemConfigBackupsDto {
|
||||
@Type(() => DatabaseBackupConfig)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
database!: DatabaseBackupConfig;
|
||||
}
|
||||
|
||||
export class SystemConfigFFmpegDto {
|
||||
@IsInt()
|
||||
|
|
@ -531,6 +555,11 @@ class SystemConfigUserDto {
|
|||
}
|
||||
|
||||
export class SystemConfigDto implements SystemConfig {
|
||||
@Type(() => SystemConfigBackupsDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
backup!: SystemConfigBackupsDto;
|
||||
|
||||
@Type(() => SystemConfigFFmpegDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
|
|
|
|||
|
|
@ -181,6 +181,7 @@ export enum StorageFolder {
|
|||
UPLOAD = 'upload',
|
||||
PROFILE = 'profile',
|
||||
THUMBNAILS = 'thumbs',
|
||||
BACKUPS = 'backups',
|
||||
}
|
||||
|
||||
export enum SystemMetadataKey {
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ export enum DatabaseLock {
|
|||
CLIPDimSize = 512,
|
||||
Library = 1337,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
}
|
||||
|
||||
export const EXTENSION_NAMES: Record<DatabaseExtension, string> = {
|
||||
|
|
|
|||
|
|
@ -15,11 +15,15 @@ export enum QueueName {
|
|||
SIDECAR = 'sidecar',
|
||||
LIBRARY = 'library',
|
||||
NOTIFICATION = 'notifications',
|
||||
BACKUP_DATABASE = 'backupDatabase',
|
||||
}
|
||||
|
||||
export type ConcurrentQueueName = Exclude<
|
||||
QueueName,
|
||||
QueueName.STORAGE_TEMPLATE_MIGRATION | QueueName.FACIAL_RECOGNITION | QueueName.DUPLICATE_DETECTION
|
||||
| QueueName.STORAGE_TEMPLATE_MIGRATION
|
||||
| QueueName.FACIAL_RECOGNITION
|
||||
| QueueName.DUPLICATE_DETECTION
|
||||
| QueueName.BACKUP_DATABASE
|
||||
>;
|
||||
|
||||
export enum JobCommand {
|
||||
|
|
@ -31,6 +35,9 @@ export enum JobCommand {
|
|||
}
|
||||
|
||||
export enum JobName {
|
||||
//backups
|
||||
BACKUP_DATABASE = 'database-backup',
|
||||
|
||||
// conversion
|
||||
QUEUE_VIDEO_CONVERSION = 'queue-video-conversion',
|
||||
VIDEO_CONVERSION = 'video-conversion',
|
||||
|
|
@ -209,6 +216,9 @@ export enum QueueCleanType {
|
|||
}
|
||||
|
||||
export type JobItem =
|
||||
// Backups
|
||||
| { name: JobName.BACKUP_DATABASE; data?: IBaseJob }
|
||||
|
||||
// Transcoding
|
||||
| { name: JobName.QUEUE_VIDEO_CONVERSION; data: IBaseJob }
|
||||
| { name: JobName.VIDEO_CONVERSION; data: IEntityJob }
|
||||
|
|
|
|||
25
server/src/interfaces/process.interface.ts
Normal file
25
server/src/interfaces/process.interface.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import { ChildProcessWithoutNullStreams, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
export interface ImmichReadStream {
|
||||
stream: Readable;
|
||||
type?: string;
|
||||
length?: number;
|
||||
}
|
||||
|
||||
export interface ImmichZipStream extends ImmichReadStream {
|
||||
addFile: (inputPath: string, filename: string) => void;
|
||||
finalize: () => Promise<void>;
|
||||
}
|
||||
|
||||
export interface DiskUsage {
|
||||
available: number;
|
||||
free: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
export const IProcessRepository = 'IProcessRepository';
|
||||
|
||||
export interface IProcessRepository {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams;
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { WatchOptions } from 'chokidar';
|
||||
import { Stats } from 'node:fs';
|
||||
import { FileReadOptions } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
|
||||
export interface ImmichReadStream {
|
||||
|
|
@ -36,6 +36,7 @@ export interface IStorageRepository {
|
|||
createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream>;
|
||||
readFile(filepath: string, options?: FileReadOptions<Buffer>): Promise<Buffer>;
|
||||
createFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
createWriteStream(filepath: string): Writable;
|
||||
createOrOverwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
overwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
realpath(filepath: string): Promise<string>;
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||
|
|
@ -59,6 +60,7 @@ import { NotificationRepository } from 'src/repositories/notification.repository
|
|||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
|
|
@ -98,6 +100,7 @@ export const repositories = [
|
|||
{ provide: IOAuthRepository, useClass: OAuthRepository },
|
||||
{ provide: IPartnerRepository, useClass: PartnerRepository },
|
||||
{ provide: IPersonRepository, useClass: PersonRepository },
|
||||
{ provide: IProcessRepository, useClass: ProcessRepository },
|
||||
{ provide: ISearchRepository, useClass: SearchRepository },
|
||||
{ provide: IServerInfoRepository, useClass: ServerInfoRepository },
|
||||
{ provide: ISessionRepository, useClass: SessionRepository },
|
||||
|
|
|
|||
|
|
@ -30,6 +30,9 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
|||
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.USER_SYNC_USAGE]: QueueName.BACKGROUND_TASK,
|
||||
|
||||
// backups
|
||||
[JobName.BACKUP_DATABASE]: QueueName.BACKUP_DATABASE,
|
||||
|
||||
// conversion
|
||||
[JobName.QUEUE_VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||
[JobName.VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||
|
|
|
|||
16
server/src/repositories/process.repository.ts
Normal file
16
server/src/repositories/process.repository.ts
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository implements IProcessRepository {
|
||||
constructor(@Inject(ILoggerRepository) private logger: ILoggerRepository) {
|
||||
this.logger.setContext(StorageRepository.name);
|
||||
}
|
||||
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
}
|
||||
|
|
@ -2,9 +2,10 @@ import { Inject, Injectable } from '@nestjs/common';
|
|||
import archiver from 'archiver';
|
||||
import chokidar, { WatchOptions } from 'chokidar';
|
||||
import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, existsSync, mkdirSync } from 'node:fs';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Writable } from 'node:stream';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import {
|
||||
|
|
@ -42,6 +43,10 @@ export class StorageRepository implements IStorageRepository {
|
|||
return fs.writeFile(filepath, buffer, { flag: 'wx' });
|
||||
}
|
||||
|
||||
createWriteStream(filepath: string): Writable {
|
||||
return createWriteStream(filepath, { flags: 'w' });
|
||||
}
|
||||
|
||||
createOrOverwriteFile(filepath: string, buffer: Buffer) {
|
||||
return fs.writeFile(filepath, buffer, { flag: 'w' });
|
||||
}
|
||||
|
|
|
|||
217
server/src/services/backup.service.spec.ts
Normal file
217
server/src/services/backup.service.spec.ts
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
import { PassThrough } from 'node:stream';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.interface';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService } from 'test/utils';
|
||||
import { describe, Mocked } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
let sut: BackupService;
|
||||
|
||||
let databaseMock: Mocked<IDatabaseRepository>;
|
||||
let jobMock: Mocked<IJobRepository>;
|
||||
let processMock: Mocked<IProcessRepository>;
|
||||
let storageMock: Mocked<IStorageRepository>;
|
||||
let systemMock: Mocked<ISystemMetadataRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
databaseMock.tryLock.mockResolvedValue(true);
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
|
||||
expect(jobMock.addCronJob).toHaveBeenCalled();
|
||||
expect(systemMock.get).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialise backup database job when running on microservices', async () => {
|
||||
await sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
systemMock.get.mockResolvedValue(defaults);
|
||||
databaseMock.tryLock.mockResolvedValue(true);
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
});
|
||||
|
||||
it('should update cron job if backup is enabled', () => {
|
||||
sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
newConfig: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 1 * * *',
|
||||
},
|
||||
},
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if oldConfig is not provided', () => {
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigValidateEvent', () => {
|
||||
it('should allow a valid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).not.toThrow(expect.stringContaining('Invalid cron expression'));
|
||||
});
|
||||
|
||||
it('should fail for an invalid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).toThrow(/Invalid cron expression.*/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupDatabaseBackups', () => {
|
||||
it('should do nothing if not reached keepLastAmount', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove failed backup files', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue([
|
||||
'immich-db-backup-123.sql.gz.tmp',
|
||||
'immich-db-backup-234.sql.gz',
|
||||
'immich-db-backup-345.sql.gz.tmp',
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`,
|
||||
);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount and failed backups', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue([
|
||||
'immich-db-backup-1.sql.gz.tmp',
|
||||
'immich-db-backup-2.sql.gz',
|
||||
'immich-db-backup-3.sql.gz',
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`,
|
||||
);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleBackupDatabase', () => {
|
||||
beforeEach(() => {
|
||||
storageMock.readdir.mockResolvedValue([]);
|
||||
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
storageMock.rename.mockResolvedValue();
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.createWriteStream.mockReturnValue(new PassThrough());
|
||||
});
|
||||
it('should run a database backup successfully', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.createWriteStream).toHaveBeenCalled();
|
||||
});
|
||||
it('should rename file on success', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.rename).toHaveBeenCalled();
|
||||
});
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
it('should fail if gzip fails', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should fail if write stream fails', async () => {
|
||||
storageMock.createWriteStream.mockImplementation(() => {
|
||||
throw new Error('error');
|
||||
});
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should fail if rename fails', async () => {
|
||||
storageMock.rename.mockRejectedValue(new Error('error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
});
|
||||
});
|
||||
157
server/src/services/backup.service.ts
Normal file
157
server/src/services/backup.service.ts
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
import { Injectable } from '@nestjs/common';
|
||||
import { default as path } from 'node:path';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
import { DatabaseLock } from 'src/interfaces/database.interface';
|
||||
import { ArgOf } from 'src/interfaces/event.interface';
|
||||
import { JobName, JobStatus } from 'src/interfaces/job.interface';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
@Injectable()
|
||||
export class BackupService extends BaseService {
|
||||
private backupLock = false;
|
||||
|
||||
@OnEvent({ name: 'app.bootstrap' })
|
||||
async onBootstrap(workerType: ImmichWorker) {
|
||||
if (workerType !== ImmichWorker.API) {
|
||||
return;
|
||||
}
|
||||
const {
|
||||
backup: { database },
|
||||
} = await this.getConfig({ withCache: true });
|
||||
|
||||
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||
|
||||
if (this.backupLock) {
|
||||
this.jobRepository.addCronJob(
|
||||
'backupDatabase',
|
||||
database.cronExpression,
|
||||
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
|
||||
database.enabled,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigUpdate({ newConfig: { backup }, oldConfig }: ArgOf<'config.update'>) {
|
||||
if (!oldConfig || !this.backupLock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.validate' })
|
||||
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||
const { database } = newConfig.backup;
|
||||
if (!validateCronExpression(database.cronExpression)) {
|
||||
throw new Error(`Invalid cron expression ${database.cronExpression}`);
|
||||
}
|
||||
}
|
||||
|
||||
async cleanupDatabaseBackups() {
|
||||
this.logger.debug(`Database Backup Cleanup Started`);
|
||||
const {
|
||||
backup: { database: config },
|
||||
} = await this.getConfig({ withCache: false });
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz$/))
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
|
||||
for (const file of toDelete) {
|
||||
await this.storageRepository.unlink(path.join(backupsFolder, file));
|
||||
}
|
||||
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
|
||||
}
|
||||
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
|
||||
const {
|
||||
database: { config },
|
||||
} = this.configRepository.getEnv();
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
const databaseParams = isUrlConnection ? [config.url] : ['-U', config.username, '-h', config.host];
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
|
||||
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
|
||||
);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
|
||||
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
|
||||
});
|
||||
|
||||
const gzip = this.processRepository.spawn(`gzip`, []);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error('Backup failed with error', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error('Gzip failed with error', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
} catch (error) {
|
||||
this.logger.error('Database Backup Failure', error);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.debug(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
}
|
||||
|
|
@ -28,6 +28,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||
|
|
@ -72,6 +73,7 @@ export class BaseService {
|
|||
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
|
||||
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
|
||||
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
|
||||
@Inject(IProcessRepository) protected processRepository: IProcessRepository,
|
||||
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
|
||||
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
|
||||
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import { AssetMediaService } from 'src/services/asset-media.service';
|
|||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
|
|
@ -48,6 +49,7 @@ export const services = [
|
|||
AssetService,
|
||||
AuditService,
|
||||
AuthService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ describe(JobService.name, () => {
|
|||
sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||
sut.onConfigUpdate({ oldConfig: defaults, newConfig: defaults });
|
||||
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(14);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
||||
|
|
@ -114,6 +114,7 @@ describe(JobService.name, () => {
|
|||
[QueueName.SIDECAR]: expectedJobStatus,
|
||||
[QueueName.LIBRARY]: expectedJobStatus,
|
||||
[QueueName.NOTIFICATION]: expectedJobStatus,
|
||||
[QueueName.BACKUP_DATABASE]: expectedJobStatus,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -220,6 +220,7 @@ export class JobService extends BaseService {
|
|||
QueueName.FACIAL_RECOGNITION,
|
||||
QueueName.STORAGE_TEMPLATE_MIGRATION,
|
||||
QueueName.DUPLICATE_DETECTION,
|
||||
QueueName.BACKUP_DATABASE,
|
||||
].includes(name);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { ArgOf } from 'src/interfaces/event.interface';
|
|||
import { IDeleteFilesJob, JobName } from 'src/interfaces/job.interface';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
import { JobService } from 'src/services/job.service';
|
||||
import { LibraryService } from 'src/services/library.service';
|
||||
|
|
@ -26,6 +27,7 @@ export class MicroservicesService {
|
|||
constructor(
|
||||
private auditService: AuditService,
|
||||
private assetService: AssetService,
|
||||
private backupService: BackupService,
|
||||
private jobService: JobService,
|
||||
private libraryService: LibraryService,
|
||||
private mediaService: MediaService,
|
||||
|
|
@ -52,6 +54,7 @@ export class MicroservicesService {
|
|||
await this.jobService.init({
|
||||
[JobName.ASSET_DELETION]: (data) => this.assetService.handleAssetDeletion(data),
|
||||
[JobName.ASSET_DELETION_CHECK]: () => this.assetService.handleAssetDeletionCheck(),
|
||||
[JobName.BACKUP_DATABASE]: () => this.backupService.handleBackupDatabase(),
|
||||
[JobName.DELETE_FILES]: (data: IDeleteFilesJob) => this.storageService.handleDeleteFiles(data),
|
||||
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
|
||||
[JobName.CLEAN_OLD_SESSION_TOKENS]: () => this.sessionService.handleCleanup(),
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ describe(StorageService.name, () => {
|
|||
|
||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||
mountChecks: {
|
||||
backups: true,
|
||||
'encoded-video': true,
|
||||
library: true,
|
||||
profile: true,
|
||||
|
|
@ -44,16 +45,19 @@ describe(StorageService.name, () => {
|
|||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/profile');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/upload');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/encoded-video/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/profile/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/thumbs/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/upload/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||
});
|
||||
|
||||
it('should enable mount folder checking for a new folder type', async () => {
|
||||
systemMock.get.mockResolvedValue({
|
||||
mountChecks: {
|
||||
backups: false,
|
||||
'encoded-video': true,
|
||||
library: false,
|
||||
profile: true,
|
||||
|
|
@ -66,6 +70,7 @@ describe(StorageService.name, () => {
|
|||
|
||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||
mountChecks: {
|
||||
backups: true,
|
||||
'encoded-video': true,
|
||||
library: true,
|
||||
profile: true,
|
||||
|
|
@ -73,10 +78,12 @@ describe(StorageService.name, () => {
|
|||
upload: true,
|
||||
},
|
||||
});
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
|
||||
expect(storageMock.createFile).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||
expect(storageMock.createFile).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||
});
|
||||
|
||||
it('should throw an error if .immich is missing', async () => {
|
||||
|
|
|
|||
|
|
@ -44,6 +44,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
|||
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
||||
[QueueName.NOTIFICATION]: { concurrency: 5 },
|
||||
},
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 02 * * *',
|
||||
keepLastAmount: 14,
|
||||
},
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 30,
|
||||
threads: 0,
|
||||
|
|
|
|||
9
server/test/fixtures/system-config.stub.ts
vendored
9
server/test/fixtures/system-config.stub.ts
vendored
|
|
@ -74,6 +74,15 @@ export const systemConfigStub = {
|
|||
},
|
||||
},
|
||||
},
|
||||
backupEnabled: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 0 * * *',
|
||||
keepLastAmount: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
machineLearningDisabled: {
|
||||
machineLearning: {
|
||||
enabled: false,
|
||||
|
|
|
|||
8
server/test/repositories/process.repository.mock.ts
Normal file
8
server/test/repositories/process.repository.mock.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newProcessRepositoryMock = (): Mocked<IProcessRepository> => {
|
||||
return {
|
||||
spawn: vitest.fn(),
|
||||
};
|
||||
};
|
||||
|
|
@ -49,6 +49,7 @@ export const newStorageRepositoryMock = (reset = true): Mocked<IStorageRepositor
|
|||
createReadStream: vitest.fn(),
|
||||
readFile: vitest.fn(),
|
||||
createFile: vitest.fn(),
|
||||
createWriteStream: vitest.fn(),
|
||||
createOrOverwriteFile: vitest.fn(),
|
||||
overwriteFile: vitest.fn(),
|
||||
unlink: vitest.fn(),
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Writable } from 'node:stream';
|
||||
import { PNG } from 'pngjs';
|
||||
import { IMetadataRepository } from 'src/interfaces/metadata.interface';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
|
|
@ -25,6 +27,7 @@ import { newNotificationRepositoryMock } from 'test/repositories/notification.re
|
|||
import { newOAuthRepositoryMock } from 'test/repositories/oauth.repository.mock';
|
||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newProcessRepositoryMock } from 'test/repositories/process.repository.mock';
|
||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||
import { newServerInfoRepositoryMock } from 'test/repositories/server-info.repository.mock';
|
||||
import { newSessionRepositoryMock } from 'test/repositories/session.repository.mock';
|
||||
|
|
@ -38,7 +41,8 @@ import { newTrashRepositoryMock } from 'test/repositories/trash.repository.mock'
|
|||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
import { newVersionHistoryRepositoryMock } from 'test/repositories/version-history.repository.mock';
|
||||
import { newViewRepositoryMock } from 'test/repositories/view.repository.mock';
|
||||
import { Mocked } from 'vitest';
|
||||
import { Readable } from 'typeorm/platform/PlatformTools';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
type RepositoryOverrides = {
|
||||
metadataRepository: IMetadataRepository;
|
||||
|
|
@ -78,6 +82,7 @@ export const newTestService = <T extends BaseService>(
|
|||
const oauthMock = newOAuthRepositoryMock();
|
||||
const partnerMock = newPartnerRepositoryMock();
|
||||
const personMock = newPersonRepositoryMock();
|
||||
const processMock = newProcessRepositoryMock();
|
||||
const searchMock = newSearchRepositoryMock();
|
||||
const serverInfoMock = newServerInfoRepositoryMock();
|
||||
const sessionMock = newSessionRepositoryMock();
|
||||
|
|
@ -117,6 +122,7 @@ export const newTestService = <T extends BaseService>(
|
|||
oauthMock,
|
||||
partnerMock,
|
||||
personMock,
|
||||
processMock,
|
||||
searchMock,
|
||||
serverInfoMock,
|
||||
sessionMock,
|
||||
|
|
@ -158,6 +164,7 @@ export const newTestService = <T extends BaseService>(
|
|||
oauthMock,
|
||||
partnerMock,
|
||||
personMock,
|
||||
processMock,
|
||||
searchMock,
|
||||
serverInfoMock,
|
||||
sessionMock,
|
||||
|
|
@ -203,3 +210,37 @@ export const newRandomImage = () => {
|
|||
|
||||
return value;
|
||||
};
|
||||
|
||||
export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
return {
|
||||
stdout: new Readable({
|
||||
read() {
|
||||
this.push(stdout); // write mock data to stdout
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stderr: new Readable({
|
||||
read() {
|
||||
this.push(stderr); // write mock data to stderr
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stdin: new Writable({
|
||||
write(chunk, encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
}),
|
||||
exitCode,
|
||||
on: vitest.fn((event, callback: any) => {
|
||||
if (event === 'close') {
|
||||
callback(0);
|
||||
}
|
||||
if (event === 'error' && error) {
|
||||
callback(error);
|
||||
}
|
||||
if (event === 'exit') {
|
||||
callback(exitCode);
|
||||
}
|
||||
}),
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue