feat: built-in automatic database backups (#13773)

This commit is contained in:
Zack Pollard
2024-10-31 11:29:42 +00:00
committed by GitHub
parent 30d42e571c
commit 7d933ec97a
41 changed files with 994 additions and 17 deletions

View File

@@ -0,0 +1,217 @@
import { PassThrough } from 'node:stream';
import { defaults, SystemConfig } from 'src/config';
import { StorageCore } from 'src/cores/storage.core';
import { ImmichWorker, StorageFolder } from 'src/enum';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
import { IProcessRepository } from 'src/interfaces/process.interface';
import { IStorageRepository } from 'src/interfaces/storage.interface';
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
import { BackupService } from 'src/services/backup.service';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { mockSpawn, newTestService } from 'test/utils';
import { describe, Mocked } from 'vitest';
describe(BackupService.name, () => {
let sut: BackupService;
let databaseMock: Mocked<IDatabaseRepository>;
let jobMock: Mocked<IJobRepository>;
let processMock: Mocked<IProcessRepository>;
let storageMock: Mocked<IStorageRepository>;
let systemMock: Mocked<ISystemMetadataRepository>;
beforeEach(() => {
({ sut, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('onBootstrapEvent', () => {
it('should init cron job and handle config changes', async () => {
databaseMock.tryLock.mockResolvedValue(true);
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
await sut.onBootstrap(ImmichWorker.API);
expect(jobMock.addCronJob).toHaveBeenCalled();
expect(systemMock.get).toHaveBeenCalled();
});
it('should not initialize backup database cron job when lock is taken', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
databaseMock.tryLock.mockResolvedValue(false);
await sut.onBootstrap(ImmichWorker.API);
expect(jobMock.addCronJob).not.toHaveBeenCalled();
});
it('should not initialise backup database job when running on microservices', async () => {
await sut.onBootstrap(ImmichWorker.MICROSERVICES);
expect(jobMock.addCronJob).not.toHaveBeenCalled();
});
});
describe('onConfigUpdateEvent', () => {
beforeEach(async () => {
systemMock.get.mockResolvedValue(defaults);
databaseMock.tryLock.mockResolvedValue(true);
await sut.onBootstrap(ImmichWorker.API);
});
it('should update cron job if backup is enabled', () => {
sut.onConfigUpdate({
oldConfig: defaults,
newConfig: {
backup: {
database: {
enabled: true,
cronExpression: '0 1 * * *',
},
},
} as SystemConfig,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
expect(jobMock.updateCronJob).toHaveBeenCalled();
});
it('should do nothing if oldConfig is not provided', () => {
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
});
it('should do nothing if instance does not have the backup database lock', async () => {
databaseMock.tryLock.mockResolvedValue(false);
await sut.onBootstrap(ImmichWorker.API);
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
});
});
describe('onConfigValidateEvent', () => {
it('should allow a valid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).not.toThrow(expect.stringContaining('Invalid cron expression'));
});
it('should fail for an invalid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).toThrow(/Invalid cron expression.*/);
});
});
describe('cleanupDatabaseBackups', () => {
it('should do nothing if not reached keepLastAmount', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(storageMock.unlink).not.toHaveBeenCalled();
});
it('should remove failed backup files', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.readdir.mockResolvedValue([
'immich-db-backup-123.sql.gz.tmp',
'immich-db-backup-234.sql.gz',
'immich-db-backup-345.sql.gz.tmp',
]);
await sut.cleanupDatabaseBackups();
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
expect(storageMock.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`,
);
expect(storageMock.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`,
);
});
it('should remove old backup files over keepLastAmount', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
expect(storageMock.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`,
);
});
it('should remove old backup files over keepLastAmount and failed backups', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.readdir.mockResolvedValue([
'immich-db-backup-1.sql.gz.tmp',
'immich-db-backup-2.sql.gz',
'immich-db-backup-3.sql.gz',
]);
await sut.cleanupDatabaseBackups();
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
expect(storageMock.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`,
);
expect(storageMock.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`,
);
});
});
describe('handleBackupDatabase', () => {
beforeEach(() => {
storageMock.readdir.mockResolvedValue([]);
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
storageMock.rename.mockResolvedValue();
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.createWriteStream.mockReturnValue(new PassThrough());
});
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(storageMock.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(storageMock.rename).toHaveBeenCalled();
});
it('should fail if pg_dumpall fails', async () => {
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should not rename file if pgdump fails and gzip succeeds', async () => {
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
expect(storageMock.rename).not.toHaveBeenCalled();
});
it('should fail if gzip fails', async () => {
processMock.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should fail if write stream fails', async () => {
storageMock.createWriteStream.mockImplementation(() => {
throw new Error('error');
});
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should fail if rename fails', async () => {
storageMock.rename.mockRejectedValue(new Error('error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
});
});

View File

@@ -0,0 +1,157 @@
import { Injectable } from '@nestjs/common';
import { default as path } from 'node:path';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent } from 'src/decorators';
import { ImmichWorker, StorageFolder } from 'src/enum';
import { DatabaseLock } from 'src/interfaces/database.interface';
import { ArgOf } from 'src/interfaces/event.interface';
import { JobName, JobStatus } from 'src/interfaces/job.interface';
import { BaseService } from 'src/services/base.service';
import { handlePromiseError } from 'src/utils/misc';
import { validateCronExpression } from 'src/validation';
@Injectable()
export class BackupService extends BaseService {
private backupLock = false;
@OnEvent({ name: 'app.bootstrap' })
async onBootstrap(workerType: ImmichWorker) {
if (workerType !== ImmichWorker.API) {
return;
}
const {
backup: { database },
} = await this.getConfig({ withCache: true });
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
if (this.backupLock) {
this.jobRepository.addCronJob(
'backupDatabase',
database.cronExpression,
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
database.enabled,
);
}
}
@OnEvent({ name: 'config.update', server: true })
onConfigUpdate({ newConfig: { backup }, oldConfig }: ArgOf<'config.update'>) {
if (!oldConfig || !this.backupLock) {
return;
}
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
}
@OnEvent({ name: 'config.validate' })
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
const { database } = newConfig.backup;
if (!validateCronExpression(database.cronExpression)) {
throw new Error(`Invalid cron expression ${database.cronExpression}`);
}
}
async cleanupDatabaseBackups() {
this.logger.debug(`Database Backup Cleanup Started`);
const {
backup: { database: config },
} = await this.getConfig({ withCache: false });
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS);
const files = await this.storageRepository.readdir(backupsFolder);
const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/));
const backups = files
.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz$/))
.sort()
.reverse();
const toDelete = backups.slice(config.keepLastAmount);
toDelete.push(...failedBackups);
for (const file of toDelete) {
await this.storageRepository.unlink(path.join(backupsFolder, file));
}
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
}
async handleBackupDatabase(): Promise<JobStatus> {
this.logger.debug(`Database Backup Started`);
const {
database: { config },
} = this.configRepository.getEnv();
const isUrlConnection = config.connectionType === 'url';
const databaseParams = isUrlConnection ? [config.url] : ['-U', config.username, '-h', config.host];
const backupFilePath = path.join(
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
);
try {
await new Promise<void>((resolve, reject) => {
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
});
const gzip = this.processRepository.spawn(`gzip`, []);
pgdump.stdout.pipe(gzip.stdin);
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
gzip.stdout.pipe(fileStream);
pgdump.on('error', (err) => {
this.logger.error('Backup failed with error', err);
reject(err);
});
gzip.on('error', (err) => {
this.logger.error('Gzip failed with error', err);
reject(err);
});
let pgdumpLogs = '';
let gzipLogs = '';
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
gzip.stderr.on('data', (data) => (gzipLogs += data));
pgdump.on('exit', (code) => {
if (code !== 0) {
this.logger.error(`Backup failed with code ${code}`);
reject(`Backup failed with code ${code}`);
this.logger.error(pgdumpLogs);
return;
}
if (pgdumpLogs) {
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
}
});
gzip.on('exit', (code) => {
if (code !== 0) {
this.logger.error(`Gzip failed with code ${code}`);
reject(`Gzip failed with code ${code}`);
this.logger.error(gzipLogs);
return;
}
if (pgdump.exitCode !== 0) {
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
return;
}
resolve();
});
});
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
} catch (error) {
this.logger.error('Database Backup Failure', error);
return JobStatus.FAILED;
}
this.logger.debug(`Database Backup Success`);
await this.cleanupDatabaseBackups();
return JobStatus.SUCCESS;
}
}

View File

@@ -28,6 +28,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
import { IPartnerRepository } from 'src/interfaces/partner.interface';
import { IPersonRepository } from 'src/interfaces/person.interface';
import { IProcessRepository } from 'src/interfaces/process.interface';
import { ISearchRepository } from 'src/interfaces/search.interface';
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
import { ISessionRepository } from 'src/interfaces/session.interface';
@@ -72,6 +73,7 @@ export class BaseService {
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
@Inject(IProcessRepository) protected processRepository: IProcessRepository,
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,

View File

@@ -6,6 +6,7 @@ import { AssetMediaService } from 'src/services/asset-media.service';
import { AssetService } from 'src/services/asset.service';
import { AuditService } from 'src/services/audit.service';
import { AuthService } from 'src/services/auth.service';
import { BackupService } from 'src/services/backup.service';
import { CliService } from 'src/services/cli.service';
import { DatabaseService } from 'src/services/database.service';
import { DownloadService } from 'src/services/download.service';
@@ -48,6 +49,7 @@ export const services = [
AssetService,
AuditService,
AuthService,
BackupService,
CliService,
DatabaseService,
DownloadService,

View File

@@ -44,7 +44,7 @@ describe(JobService.name, () => {
sut.onBootstrap(ImmichWorker.MICROSERVICES);
sut.onConfigUpdate({ oldConfig: defaults, newConfig: defaults });
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(14);
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
@@ -114,6 +114,7 @@ describe(JobService.name, () => {
[QueueName.SIDECAR]: expectedJobStatus,
[QueueName.LIBRARY]: expectedJobStatus,
[QueueName.NOTIFICATION]: expectedJobStatus,
[QueueName.BACKUP_DATABASE]: expectedJobStatus,
});
});
});

View File

@@ -220,6 +220,7 @@ export class JobService extends BaseService {
QueueName.FACIAL_RECOGNITION,
QueueName.STORAGE_TEMPLATE_MIGRATION,
QueueName.DUPLICATE_DETECTION,
QueueName.BACKUP_DATABASE,
].includes(name);
}

View File

@@ -5,6 +5,7 @@ import { ArgOf } from 'src/interfaces/event.interface';
import { IDeleteFilesJob, JobName } from 'src/interfaces/job.interface';
import { AssetService } from 'src/services/asset.service';
import { AuditService } from 'src/services/audit.service';
import { BackupService } from 'src/services/backup.service';
import { DuplicateService } from 'src/services/duplicate.service';
import { JobService } from 'src/services/job.service';
import { LibraryService } from 'src/services/library.service';
@@ -26,6 +27,7 @@ export class MicroservicesService {
constructor(
private auditService: AuditService,
private assetService: AssetService,
private backupService: BackupService,
private jobService: JobService,
private libraryService: LibraryService,
private mediaService: MediaService,
@@ -52,6 +54,7 @@ export class MicroservicesService {
await this.jobService.init({
[JobName.ASSET_DELETION]: (data) => this.assetService.handleAssetDeletion(data),
[JobName.ASSET_DELETION_CHECK]: () => this.assetService.handleAssetDeletionCheck(),
[JobName.BACKUP_DATABASE]: () => this.backupService.handleBackupDatabase(),
[JobName.DELETE_FILES]: (data: IDeleteFilesJob) => this.storageService.handleDeleteFiles(data),
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
[JobName.CLEAN_OLD_SESSION_TOKENS]: () => this.sessionService.handleCleanup(),

View File

@@ -32,6 +32,7 @@ describe(StorageService.name, () => {
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
mountChecks: {
backups: true,
'encoded-video': true,
library: true,
profile: true,
@@ -44,16 +45,19 @@ describe(StorageService.name, () => {
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/profile');
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs');
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/upload');
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
expect(storageMock.createFile).toHaveBeenCalledWith('upload/encoded-video/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/profile/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/thumbs/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/upload/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
});
it('should enable mount folder checking for a new folder type', async () => {
systemMock.get.mockResolvedValue({
mountChecks: {
backups: false,
'encoded-video': true,
library: false,
profile: true,
@@ -66,6 +70,7 @@ describe(StorageService.name, () => {
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
mountChecks: {
backups: true,
'encoded-video': true,
library: true,
profile: true,
@@ -73,10 +78,12 @@ describe(StorageService.name, () => {
upload: true,
},
});
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(1);
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(2);
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
expect(storageMock.createFile).toHaveBeenCalledTimes(1);
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
expect(storageMock.createFile).toHaveBeenCalledTimes(2);
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
});
it('should throw an error if .immich is missing', async () => {

View File

@@ -44,6 +44,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
[QueueName.NOTIFICATION]: { concurrency: 5 },
},
backup: {
database: {
enabled: true,
cronExpression: '0 02 * * *',
keepLastAmount: 14,
},
},
ffmpeg: {
crf: 30,
threads: 0,