mirror of
https://github.com/immich-app/immich.git
synced 2025-12-24 01:11:32 +03:00
chore: use backup util from backup.service.ts
test: update backup.service.ts tests with new util
This commit is contained in:
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
|||||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||||
import { BackupService } from 'src/services/backup.service';
|
import { BackupService } from 'src/services/backup.service';
|
||||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||||
import { describe } from 'vitest';
|
import { describe } from 'vitest';
|
||||||
|
|
||||||
describe(BackupService.name, () => {
|
describe(BackupService.name, () => {
|
||||||
@@ -147,6 +147,7 @@ describe(BackupService.name, () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mocks.storage.readdir.mockResolvedValue([]);
|
mocks.storage.readdir.mockResolvedValue([]);
|
||||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||||
|
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||||
mocks.storage.rename.mockResolvedValue();
|
mocks.storage.rename.mockResolvedValue();
|
||||||
mocks.storage.unlink.mockResolvedValue();
|
mocks.storage.unlink.mockResolvedValue();
|
||||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
@@ -165,21 +166,21 @@ describe(BackupService.name, () => {
|
|||||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if pg_dumpall fails', async () => {
|
it('should fail if pg_dump fails', async () => {
|
||||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if gzip fails', async () => {
|
it('should fail if gzip fails', async () => {
|
||||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if write stream fails', async () => {
|
it('should fail if write stream fails', async () => {
|
||||||
@@ -195,9 +196,9 @@ describe(BackupService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should ignore unlink failing and still return failed job status', async () => {
|
it('should ignore unlink failing and still return failed job status', async () => {
|
||||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -211,12 +212,12 @@ describe(BackupService.name, () => {
|
|||||||
${'17.15.1'} | ${17}
|
${'17.15.1'} | ${17}
|
||||||
${'18.0.0'} | ${18}
|
${'18.0.0'} | ${18}
|
||||||
`(
|
`(
|
||||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||||
async ({ postgresVersion, expectedVersion }) => {
|
async ({ postgresVersion, expectedVersion }) => {
|
||||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||||
await sut.handleBackupDatabase();
|
await sut.handleBackupDatabase();
|
||||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalledWith(
|
||||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||||
expect.any(Array),
|
expect.any(Array),
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { DateTime } from 'luxon';
|
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import semver from 'semver';
|
|
||||||
import { serverVersion } from 'src/constants';
|
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
import { OnEvent, OnJob } from 'src/decorators';
|
import { OnEvent, OnJob } from 'src/decorators';
|
||||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||||
import { ArgOf } from 'src/repositories/event.repository';
|
import { ArgOf } from 'src/repositories/event.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
|
import { createBackup, listBackups, UnsupportedPostgresError } from 'src/utils/backups';
|
||||||
import { handlePromiseError } from 'src/utils/misc';
|
import { handlePromiseError } from 'src/utils/misc';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@@ -52,17 +50,7 @@ export class BackupService extends BaseService {
|
|||||||
} = await this.getConfig({ withCache: false });
|
} = await this.getConfig({ withCache: false });
|
||||||
|
|
||||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||||
const files = await this.storageRepository.readdir(backupsFolder);
|
const { backups, failedBackups } = await listBackups(this.backupRepos);
|
||||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
|
||||||
const backups = files
|
|
||||||
.filter((file) => {
|
|
||||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
|
||||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
|
||||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
|
||||||
return oldBackupStyle || newBackupStyle;
|
|
||||||
})
|
|
||||||
.sort()
|
|
||||||
.toReversed();
|
|
||||||
|
|
||||||
const toDelete = backups.slice(config.keepLastAmount);
|
const toDelete = backups.slice(config.keepLastAmount);
|
||||||
toDelete.push(...failedBackups);
|
toDelete.push(...failedBackups);
|
||||||
@@ -75,115 +63,27 @@ export class BackupService extends BaseService {
|
|||||||
|
|
||||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||||
async handleBackupDatabase(): Promise<JobStatus> {
|
async handleBackupDatabase(): Promise<JobStatus> {
|
||||||
this.logger.debug(`Database Backup Started`);
|
|
||||||
const { database } = this.configRepository.getEnv();
|
|
||||||
const config = database.config;
|
|
||||||
|
|
||||||
const isUrlConnection = config.connectionType === 'url';
|
|
||||||
|
|
||||||
const databaseParams = isUrlConnection
|
|
||||||
? ['--dbname', config.url]
|
|
||||||
: [
|
|
||||||
'--username',
|
|
||||||
config.username,
|
|
||||||
'--host',
|
|
||||||
config.host,
|
|
||||||
'--port',
|
|
||||||
`${config.port}`,
|
|
||||||
'--database',
|
|
||||||
config.database,
|
|
||||||
];
|
|
||||||
|
|
||||||
databaseParams.push('--clean', '--if-exists');
|
|
||||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
|
||||||
const backupFilePath = path.join(
|
|
||||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
|
||||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
|
||||||
);
|
|
||||||
const databaseSemver = semver.coerce(databaseVersion);
|
|
||||||
const databaseMajorVersion = databaseSemver?.major;
|
|
||||||
|
|
||||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
|
||||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
|
||||||
return JobStatus.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await new Promise<void>((resolve, reject) => {
|
await createBackup(this.backupRepos);
|
||||||
const pgdump = this.processRepository.spawn(
|
|
||||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
|
||||||
databaseParams,
|
|
||||||
{
|
|
||||||
env: {
|
|
||||||
PATH: process.env.PATH,
|
|
||||||
PGPASSWORD: isUrlConnection ? new URL(config.url).password : config.password,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
|
||||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
|
||||||
pgdump.stdout.pipe(gzip.stdin);
|
|
||||||
|
|
||||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
|
||||||
|
|
||||||
gzip.stdout.pipe(fileStream);
|
|
||||||
|
|
||||||
pgdump.on('error', (err) => {
|
|
||||||
this.logger.error(`Backup failed with error: ${err}`);
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
gzip.on('error', (err) => {
|
|
||||||
this.logger.error(`Gzip failed with error: ${err}`);
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
let pgdumpLogs = '';
|
|
||||||
let gzipLogs = '';
|
|
||||||
|
|
||||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
|
||||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
|
||||||
|
|
||||||
pgdump.on('exit', (code) => {
|
|
||||||
if (code !== 0) {
|
|
||||||
this.logger.error(`Backup failed with code ${code}`);
|
|
||||||
reject(`Backup failed with code ${code}`);
|
|
||||||
this.logger.error(pgdumpLogs);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (pgdumpLogs) {
|
|
||||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
gzip.on('exit', (code) => {
|
|
||||||
if (code !== 0) {
|
|
||||||
this.logger.error(`Gzip failed with code ${code}`);
|
|
||||||
reject(`Gzip failed with code ${code}`);
|
|
||||||
this.logger.error(gzipLogs);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (pgdump.exitCode !== 0) {
|
|
||||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error(`Database Backup Failure: ${error}`);
|
if (error instanceof UnsupportedPostgresError) {
|
||||||
await this.storageRepository
|
return JobStatus.Failed;
|
||||||
.unlink(backupFilePath)
|
}
|
||||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.logger.log(`Database Backup Success`);
|
|
||||||
await this.cleanupDatabaseBackups();
|
await this.cleanupDatabaseBackups();
|
||||||
return JobStatus.Success;
|
return JobStatus.Success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private get backupRepos() {
|
||||||
|
return {
|
||||||
|
logger: this.logger,
|
||||||
|
storage: this.storageRepository,
|
||||||
|
config: this.configRepository,
|
||||||
|
process: this.processRepository,
|
||||||
|
database: this.databaseRepository,
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { NextFunction } from 'express';
|
|||||||
import { Kysely } from 'kysely';
|
import { Kysely } from 'kysely';
|
||||||
import multer from 'multer';
|
import multer from 'multer';
|
||||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||||
import { Readable, Writable } from 'node:stream';
|
import { Duplex, Readable, Writable } from 'node:stream';
|
||||||
import { PNG } from 'pngjs';
|
import { PNG } from 'pngjs';
|
||||||
import postgres from 'postgres';
|
import postgres from 'postgres';
|
||||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||||
@@ -492,6 +492,35 @@ export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: st
|
|||||||
} as unknown as ChildProcessWithoutNullStreams;
|
} as unknown as ChildProcessWithoutNullStreams;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const mockDuplex = vitest.fn(
|
||||||
|
(command: string, exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||||
|
const duplex = new Duplex({
|
||||||
|
write(_chunk, _encoding, callback) {
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
|
||||||
|
read() {},
|
||||||
|
|
||||||
|
final(callback) {
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
setImmediate(() => {
|
||||||
|
if (error) {
|
||||||
|
duplex.destroy(error as Error);
|
||||||
|
} else if (exitCode !== 0) {
|
||||||
|
duplex.destroy(new Error(`${command} non-zero exit code (${exitCode})\n${stderr}`));
|
||||||
|
} else {
|
||||||
|
duplex.push(stdout);
|
||||||
|
duplex.push(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return duplex;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T> {
|
export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T> {
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
await Promise.resolve();
|
await Promise.resolve();
|
||||||
|
|||||||
Reference in New Issue
Block a user