mirror of
https://github.com/immich-app/immich.git
synced 2025-12-22 01:11:20 +03:00
feat(server): Automatic watching of library folders (#6192)
* feat: initial watch support * allow offline files * chore: ignore query errors when resetting e2e db * revert db query * add savepoint * guard the user query * chore: openapi and db migration * wip * support multiple libraries * fix tests * wip * can now cleanup chokidar watchers * fix unit tests * add library watch queue * add missing init from merge * wip * can now filter file extensions * remove watch api from non job client * Fix e2e test * watch library with updated import path and exclusion pattern * add library watch frontend ui * case sensitive watching extensions * can auto watch libraries * move watcher e2e tests to separate file * don't watch libraries from a queue * use event emitters * shorten e2e test timeout * refactor chokidar code to filesystem provider * expose chokidar parameters to config file * fix storage mock * set default config for library watching * add fs provider mocks * cleanup * add more unit tests for watcher * chore: fix format + sql * add more tests * move unwatch feature back to library service * add file event unit tests * chore: formatting * add documentation * fix e2e tests * chore: fix e2e tests * fix library updating * test cleanup * fix typo * cleanup * fixing as per pr comments * reduce library watch config file * update storage config and mocks * move negative event tests to unit tests * fix library watcher e2e * make watch configuration global * remove the feature flag * refactor watcher teardown * fix microservices init * centralize asset scan job queue * improve docs * add more tests * chore: open api * initialize app service * fix docs * fix library watch feature flag * Update docs/docs/features/libraries.md Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> * fix: import right app service * don't be truthy * fix test speling * stricter library update tests * move fs watcher mock to external file * subscribe to config changes * docker does not need polling * make library watch() private * feat: add configuration ui --------- Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
committed by
GitHub
parent
4079e92bbf
commit
068e703e88
@@ -26,6 +26,10 @@ export class CreateLibraryDto {
|
||||
@IsString({ each: true })
|
||||
@IsNotEmpty({ each: true })
|
||||
exclusionPatterns?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
isWatched?: boolean;
|
||||
}
|
||||
|
||||
export class UpdateLibraryDto {
|
||||
|
||||
@@ -2,9 +2,9 @@ import { AssetType, LibraryType, SystemConfig, SystemConfigKey, UserEntity } fro
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
IAccessRepositoryMock,
|
||||
assetStub,
|
||||
authStub,
|
||||
IAccessRepositoryMock,
|
||||
libraryStub,
|
||||
newAccessRepositoryMock,
|
||||
newAssetRepositoryMock,
|
||||
@@ -14,8 +14,11 @@ import {
|
||||
newStorageRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
newUserRepositoryMock,
|
||||
systemConfigStub,
|
||||
userStub,
|
||||
} from '@test';
|
||||
|
||||
import { newFSWatcherMock } from '@test/mocks';
|
||||
import { Stats } from 'fs';
|
||||
import { ILibraryFileJob, ILibraryRefreshJob, JobName } from '../job';
|
||||
import {
|
||||
@@ -28,6 +31,7 @@ import {
|
||||
IUserRepository,
|
||||
} from '../repositories';
|
||||
import { SystemConfigCore } from '../system-config/system-config.core';
|
||||
import { mapLibrary } from './library.dto';
|
||||
import { LibraryService } from './library.service';
|
||||
|
||||
describe(LibraryService.name, () => {
|
||||
@@ -94,11 +98,60 @@ describe(LibraryService.name, () => {
|
||||
enabled: true,
|
||||
cronExpression: '0 1 * * *',
|
||||
},
|
||||
watch: { enabled: false },
|
||||
},
|
||||
} as SystemConfig);
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith('libraryScan', '0 1 * * *', true);
|
||||
});
|
||||
|
||||
it('should initialize watcher for all external libraries', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([
|
||||
libraryStub.externalLibraryWithImportPaths1,
|
||||
libraryStub.externalLibraryWithImportPaths2,
|
||||
]);
|
||||
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
libraryMock.get.mockImplementation(async (id) => {
|
||||
switch (id) {
|
||||
case libraryStub.externalLibraryWithImportPaths1.id:
|
||||
return libraryStub.externalLibraryWithImportPaths1;
|
||||
case libraryStub.externalLibraryWithImportPaths2.id:
|
||||
return libraryStub.externalLibraryWithImportPaths2;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await sut.init();
|
||||
|
||||
expect(storageMock.watch.mock.calls).toEqual(
|
||||
expect.arrayContaining([
|
||||
(libraryStub.externalLibrary1.importPaths, expect.anything()),
|
||||
(libraryStub.externalLibrary2.importPaths, expect.anything()),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not initialize when watching is disabled', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchDisabled);
|
||||
|
||||
await sut.init();
|
||||
|
||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueAssetRefresh', () => {
|
||||
@@ -148,6 +201,34 @@ describe(LibraryService.name, () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it('should force queue new assets', async () => {
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
refreshModifiedFiles: false,
|
||||
refreshAllFiles: true,
|
||||
};
|
||||
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
storageMock.crawl.mockResolvedValue(['/data/user1/photo.jpg']);
|
||||
assetMock.getByLibraryId.mockResolvedValue([]);
|
||||
libraryMock.getOnlineAssetPaths.mockResolvedValue([]);
|
||||
userMock.get.mockResolvedValue(userStub.externalPath1);
|
||||
|
||||
await sut.handleQueueAssetRefresh(mockLibraryJob);
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: libraryStub.externalLibrary1.owner.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
force: true,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should mark assets outside of the user's external path as offline", async () => {
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
@@ -564,7 +645,7 @@ describe(LibraryService.name, () => {
|
||||
expect(createdAsset.fileModifiedAt).toEqual(filemtime);
|
||||
});
|
||||
|
||||
it('should error when asset does not exist', async () => {
|
||||
it('should throw error when asset does not exist', async () => {
|
||||
storageMock.stat.mockRejectedValue(new Error("ENOENT, no such file or directory '/data/user1/photo.jpg'"));
|
||||
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
@@ -625,6 +706,31 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.softDelete).toHaveBeenCalledWith(libraryStub.externalLibrary1.id);
|
||||
});
|
||||
|
||||
it('should unwatch an external library when deleted', async () => {
|
||||
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
|
||||
libraryMock.getUploadLibraryCount.mockResolvedValue(1);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await sut.init();
|
||||
|
||||
await sut.delete(authStub.admin, libraryStub.externalLibraryWithImportPaths1.id);
|
||||
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCount', () => {
|
||||
@@ -638,7 +744,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('can return a library', async () => {
|
||||
it('should return a library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
await expect(sut.get(authStub.admin, libraryStub.uploadLibrary1.id)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
@@ -659,7 +765,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('getAllForUser', () => {
|
||||
it('can return all libraries for user', async () => {
|
||||
it('should return all libraries for user', async () => {
|
||||
libraryMock.getAllByUserId.mockResolvedValue([libraryStub.uploadLibrary1, libraryStub.externalLibrary1]);
|
||||
await expect(sut.getAllForUser(authStub.admin)).resolves.toEqual([
|
||||
expect.objectContaining({
|
||||
@@ -679,7 +785,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('getStatistics', () => {
|
||||
it('can return library statistics', async () => {
|
||||
it('should return library statistics', async () => {
|
||||
libraryMock.getStatistics.mockResolvedValue({ photos: 10, videos: 0, total: 10, usage: 1337 });
|
||||
await expect(sut.getStatistics(authStub.admin, libraryStub.uploadLibrary1.id)).resolves.toEqual({
|
||||
photos: 10,
|
||||
@@ -694,7 +800,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
describe('create', () => {
|
||||
describe('external library', () => {
|
||||
it('can create with default settings', async () => {
|
||||
it('should create with default settings', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -717,7 +823,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'New External Library',
|
||||
name: expect.any(String),
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [],
|
||||
exclusionPatterns: [],
|
||||
@@ -726,7 +832,7 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can create with name', async () => {
|
||||
it('should create with name', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -759,7 +865,7 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can create invisible', async () => {
|
||||
it('should create invisible', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -783,7 +889,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'New External Library',
|
||||
name: expect.any(String),
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [],
|
||||
exclusionPatterns: [],
|
||||
@@ -792,7 +898,7 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can create with import paths', async () => {
|
||||
it('should create with import paths', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -816,7 +922,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'New External Library',
|
||||
name: expect.any(String),
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: ['/data/images', '/data/videos'],
|
||||
exclusionPatterns: [],
|
||||
@@ -825,7 +931,35 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can create with exclusion patterns', async () => {
|
||||
it('should create watched with import paths', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([]);
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await sut.init();
|
||||
await sut.create(authStub.admin, {
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
});
|
||||
|
||||
expect(storageMock.watch).toHaveBeenCalledWith(
|
||||
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create with exclusion patterns', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -849,7 +983,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'New External Library',
|
||||
name: expect.any(String),
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [],
|
||||
exclusionPatterns: ['*.tmp', '*.bak'],
|
||||
@@ -860,7 +994,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('upload library', () => {
|
||||
it('can create with default settings', async () => {
|
||||
it('should create with default settings', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -892,7 +1026,7 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can create with name', async () => {
|
||||
it('should create with name', async () => {
|
||||
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
@@ -925,7 +1059,7 @@ describe(LibraryService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('can not create with import paths', async () => {
|
||||
it('should not create with import paths', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
type: LibraryType.UPLOAD,
|
||||
@@ -936,7 +1070,7 @@ describe(LibraryService.name, () => {
|
||||
expect(libraryMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('can not create with exclusion patterns', async () => {
|
||||
it('should not create with exclusion patterns', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
type: LibraryType.UPLOAD,
|
||||
@@ -946,11 +1080,22 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(libraryMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not create watched', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
type: LibraryType.UPLOAD,
|
||||
isWatched: true,
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueCleanup', () => {
|
||||
it('can queue cleanup jobs', async () => {
|
||||
it('should queue cleanup jobs', async () => {
|
||||
libraryMock.getAllDeleted.mockResolvedValue([libraryStub.uploadLibrary1, libraryStub.externalLibrary1]);
|
||||
await expect(sut.handleQueueCleanup()).resolves.toBe(true);
|
||||
|
||||
@@ -962,19 +1107,357 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('can update library ', async () => {
|
||||
beforeEach(async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.getAll.mockResolvedValue([]);
|
||||
|
||||
await sut.init();
|
||||
});
|
||||
|
||||
it('should update library', async () => {
|
||||
libraryMock.update.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, {})).resolves.toBeTruthy();
|
||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, {})).resolves.toEqual(
|
||||
mapLibrary(libraryStub.uploadLibrary1),
|
||||
);
|
||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: authStub.admin.user.id,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should re-watch library when updating import paths', async () => {
|
||||
libraryMock.update.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, { importPaths: ['/foo'] })).resolves.toEqual(
|
||||
mapLibrary(libraryStub.externalLibraryWithImportPaths1),
|
||||
);
|
||||
|
||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: authStub.admin.user.id,
|
||||
}),
|
||||
);
|
||||
expect(storageMock.watch).toHaveBeenCalledWith(
|
||||
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should re-watch library when updating exclusion patterns', async () => {
|
||||
libraryMock.update.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, { exclusionPatterns: ['bar'] })).resolves.toEqual(
|
||||
mapLibrary(libraryStub.externalLibraryWithImportPaths1),
|
||||
);
|
||||
|
||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: authStub.admin.user.id,
|
||||
}),
|
||||
);
|
||||
expect(storageMock.watch).toHaveBeenCalledWith(expect.arrayContaining([expect.any(String)]), expect.anything());
|
||||
});
|
||||
});
|
||||
|
||||
describe('watchAll new', () => {
|
||||
describe('watching disabled', () => {
|
||||
beforeEach(async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchDisabled);
|
||||
|
||||
await sut.init();
|
||||
});
|
||||
|
||||
it('should not watch library', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('watching enabled', () => {
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
beforeEach(async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.getAll.mockResolvedValue([]);
|
||||
await sut.init();
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
});
|
||||
|
||||
it('should watch library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
let isReady = false;
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
isReady = true;
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(storageMock.watch).toHaveBeenCalledWith(
|
||||
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
expect(isReady).toBe(true);
|
||||
});
|
||||
|
||||
it('should watch and unwatch library', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
await sut.unwatch(libraryStub.externalLibraryWithImportPaths1.id);
|
||||
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not watch library without import paths', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw error when watching upload library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.uploadLibrary1]);
|
||||
|
||||
await expect(sut.watchAll()).rejects.toThrow('Can only watch external libraries');
|
||||
|
||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle a new file event', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'add') {
|
||||
callback('/foo/photo.jpg');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
assetPath: '/foo/photo.jpg',
|
||||
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
|
||||
force: false,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle a file change event', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'change') {
|
||||
callback('/foo/photo.jpg');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
assetPath: '/foo/photo.jpg',
|
||||
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
|
||||
force: false,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle a file unlink event', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.external);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'unlink') {
|
||||
callback('/foo/photo.jpg');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(assetMock.save).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
|
||||
});
|
||||
|
||||
it('should handle an error event', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.external);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
let didError = false;
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'error') {
|
||||
didError = true;
|
||||
callback('Error!');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(didError).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore unknown extensions', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'add') {
|
||||
callback('/foo/photo.txt');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should ignore excluded paths', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.patternPath);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.patternPath]);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'add') {
|
||||
callback('/dir1/photo.txt');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should ignore excluded paths without case sensitivity', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.patternPath);
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.patternPath]);
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
} else if (event === 'add') {
|
||||
callback('/DIR1/photo.txt');
|
||||
}
|
||||
});
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('tearDown', () => {
|
||||
it('should tear down all watchers', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([
|
||||
libraryStub.externalLibraryWithImportPaths1,
|
||||
libraryStub.externalLibraryWithImportPaths2,
|
||||
]);
|
||||
|
||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
libraryMock.get.mockImplementation(async (id) => {
|
||||
switch (id) {
|
||||
case libraryStub.externalLibraryWithImportPaths1.id:
|
||||
return libraryStub.externalLibraryWithImportPaths1;
|
||||
case libraryStub.externalLibraryWithImportPaths2.id:
|
||||
return libraryStub.externalLibraryWithImportPaths2;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
const mockWatcher = newFSWatcherMock();
|
||||
|
||||
mockWatcher.on.mockImplementation((event, callback) => {
|
||||
if (event === 'ready') {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
storageMock.watch.mockReturnValue(mockWatcher);
|
||||
|
||||
await sut.init();
|
||||
await sut.unwatchAll();
|
||||
|
||||
expect(mockWatcher.close).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleDeleteLibrary', () => {
|
||||
it('can not delete a nonexistent library', async () => {
|
||||
it('should not delete a nonexistent library', async () => {
|
||||
libraryMock.get.mockImplementation(async () => {
|
||||
return null;
|
||||
});
|
||||
@@ -984,7 +1467,7 @@ describe(LibraryService.name, () => {
|
||||
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('can delete an empty library', async () => {
|
||||
it('should delete an empty library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
libraryMock.getAssetIds.mockResolvedValue([]);
|
||||
libraryMock.delete.mockImplementation(async () => {});
|
||||
@@ -992,7 +1475,7 @@ describe(LibraryService.name, () => {
|
||||
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('can delete a library with assets', async () => {
|
||||
it('should delete a library with assets', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
libraryMock.getAssetIds.mockResolvedValue([assetStub.image1.id]);
|
||||
libraryMock.delete.mockImplementation(async () => {});
|
||||
@@ -1004,7 +1487,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('queueScan', () => {
|
||||
it('can queue a library scan of external library', async () => {
|
||||
it('should queue a library scan of external library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, {});
|
||||
@@ -1023,7 +1506,7 @@ describe(LibraryService.name, () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it('can not queue a library scan of upload library', async () => {
|
||||
it('should not queue a library scan of upload library', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
|
||||
await expect(sut.queueScan(authStub.admin, libraryStub.uploadLibrary1.id, {})).rejects.toBeInstanceOf(
|
||||
@@ -1033,7 +1516,7 @@ describe(LibraryService.name, () => {
|
||||
expect(jobMock.queue).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('can queue a library scan of all modified assets', async () => {
|
||||
it('should queue a library scan of all modified assets', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, { refreshModifiedFiles: true });
|
||||
@@ -1052,7 +1535,7 @@ describe(LibraryService.name, () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it('can queue a forced library scan', async () => {
|
||||
it('should queue a forced library scan', async () => {
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, { refreshAllFiles: true });
|
||||
@@ -1073,7 +1556,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('queueEmptyTrash', () => {
|
||||
it('can queue the trash job', async () => {
|
||||
it('should queue the trash job', async () => {
|
||||
await sut.queueRemoveOffline(authStub.admin, libraryStub.externalLibrary1.id);
|
||||
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
@@ -1090,7 +1573,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('handleQueueAllScan', () => {
|
||||
it('can queue the refresh job', async () => {
|
||||
it('should queue the refresh job', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
|
||||
|
||||
await expect(sut.handleQueueAllScan({})).resolves.toBe(true);
|
||||
@@ -1115,19 +1598,16 @@ describe(LibraryService.name, () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it('can queue the force refresh job', async () => {
|
||||
it('should queue the force refresh job', async () => {
|
||||
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
|
||||
|
||||
await expect(sut.handleQueueAllScan({ force: true })).resolves.toBe(true);
|
||||
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.LIBRARY_QUEUE_CLEANUP,
|
||||
data: {},
|
||||
},
|
||||
],
|
||||
]);
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_QUEUE_CLEANUP,
|
||||
data: {},
|
||||
});
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SCAN,
|
||||
@@ -1142,7 +1622,7 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('handleRemoveOfflineFiles', () => {
|
||||
it('can queue trash deletion jobs', async () => {
|
||||
it('should queue trash deletion jobs', async () => {
|
||||
assetMock.getWith.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
|
||||
assetMock.getById.mockResolvedValue(assetStub.image1);
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { AssetType, LibraryType } from '@app/infra/entities';
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import picomatch from 'picomatch';
|
||||
|
||||
import { R_OK } from 'node:constants';
|
||||
import { Stats } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
@@ -11,6 +13,7 @@ import { usePagination, validateCronExpression } from '../domain.util';
|
||||
import { IBaseJob, IEntityJob, ILibraryFileJob, ILibraryRefreshJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||
|
||||
import { ImmichLogger } from '@app/infra/logger';
|
||||
import { EventEmitter } from 'events';
|
||||
import {
|
||||
IAccessRepository,
|
||||
IAssetRepository,
|
||||
@@ -33,11 +36,15 @@ import {
|
||||
} from './library.dto';
|
||||
|
||||
@Injectable()
|
||||
export class LibraryService {
|
||||
export class LibraryService extends EventEmitter {
|
||||
readonly logger = new ImmichLogger(LibraryService.name);
|
||||
private access: AccessCore;
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
private watchLibraries = false;
|
||||
|
||||
private watchers: Record<string, () => Promise<void>> = {};
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@@ -48,6 +55,7 @@ export class LibraryService {
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
) {
|
||||
super();
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.configCore.addValidator((config) => {
|
||||
@@ -59,6 +67,7 @@ export class LibraryService {
|
||||
|
||||
async init() {
|
||||
const config = await this.configCore.getConfig();
|
||||
this.watchLibraries = config.library.watch.enabled;
|
||||
this.jobRepository.addCronJob(
|
||||
'libraryScan',
|
||||
config.library.scan.cronExpression,
|
||||
@@ -66,11 +75,128 @@ export class LibraryService {
|
||||
config.library.scan.enabled,
|
||||
);
|
||||
|
||||
this.configCore.config$.subscribe((config) => {
|
||||
if (this.watchLibraries) {
|
||||
await this.watchAll();
|
||||
}
|
||||
|
||||
this.configCore.config$.subscribe(async (config) => {
|
||||
this.jobRepository.updateCronJob('libraryScan', config.library.scan.cronExpression, config.library.scan.enabled);
|
||||
|
||||
if (config.library.watch.enabled !== this.watchLibraries) {
|
||||
this.watchLibraries = config.library.watch.enabled;
|
||||
if (this.watchLibraries) {
|
||||
await this.watchAll();
|
||||
} else {
|
||||
await this.unwatchAll();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async watch(id: string): Promise<boolean> {
|
||||
if (!this.watchLibraries) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const library = await this.findOrFail(id);
|
||||
|
||||
if (library.type !== LibraryType.EXTERNAL) {
|
||||
throw new BadRequestException('Can only watch external libraries');
|
||||
} else if (library.importPaths.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await this.unwatch(id);
|
||||
|
||||
this.logger.log(`Starting to watch library ${library.id} with import path(s) ${library.importPaths}`);
|
||||
|
||||
const matcher = picomatch(`**/*{${mimeTypes.getSupportedFileExtensions().join(',')}}`, {
|
||||
nocase: true,
|
||||
ignore: library.exclusionPatterns,
|
||||
});
|
||||
|
||||
const config = await this.configCore.getConfig();
|
||||
|
||||
this.logger.debug(
|
||||
`Settings for watcher: usePolling: ${config.library.watch.usePolling}, interval: ${config.library.watch.interval}`,
|
||||
);
|
||||
|
||||
const watcher = this.storageRepository.watch(library.importPaths, {
|
||||
usePolling: config.library.watch.usePolling,
|
||||
interval: config.library.watch.interval,
|
||||
binaryInterval: config.library.watch.interval,
|
||||
ignoreInitial: true,
|
||||
});
|
||||
|
||||
this.watchers[id] = async () => {
|
||||
await watcher.close();
|
||||
};
|
||||
|
||||
watcher.on('add', async (path) => {
|
||||
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
|
||||
if (matcher(path)) {
|
||||
await this.scanAssets(library.id, [path], library.ownerId, false);
|
||||
}
|
||||
this.emit('add', path);
|
||||
});
|
||||
|
||||
watcher.on('change', async (path) => {
|
||||
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
|
||||
|
||||
if (matcher(path)) {
|
||||
// Note: if the changed file was not previously imported, it will be imported now.
|
||||
await this.scanAssets(library.id, [path], library.ownerId, false);
|
||||
}
|
||||
this.emit('change', path);
|
||||
});
|
||||
|
||||
watcher.on('unlink', async (path) => {
|
||||
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
|
||||
const existingAssetEntity = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
||||
|
||||
if (existingAssetEntity && matcher(path)) {
|
||||
await this.assetRepository.save({ id: existingAssetEntity.id, isOffline: true });
|
||||
}
|
||||
|
||||
this.emit('unlink', path);
|
||||
});
|
||||
|
||||
watcher.on('error', async (error) => {
|
||||
// TODO: should we log, or throw an exception?
|
||||
this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`);
|
||||
});
|
||||
|
||||
// Wait for the watcher to initialize before returning
|
||||
await new Promise<void>((resolve) => {
|
||||
watcher.on('ready', async () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async unwatch(id: string) {
|
||||
if (this.watchers.hasOwnProperty(id)) {
|
||||
await this.watchers[id]();
|
||||
delete this.watchers[id];
|
||||
}
|
||||
}
|
||||
|
||||
async unwatchAll() {
|
||||
for (const id in this.watchers) {
|
||||
await this.unwatch(id);
|
||||
}
|
||||
}
|
||||
|
||||
async watchAll() {
|
||||
const libraries = await this.repository.getAll(false, LibraryType.EXTERNAL);
|
||||
|
||||
for (const library of libraries) {
|
||||
await this.watch(library.id);
|
||||
}
|
||||
}
|
||||
|
||||
async getStatistics(auth: AuthDto, id: string): Promise<LibraryStatsResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.LIBRARY_READ, id);
|
||||
return this.repository.getStatistics(id);
|
||||
@@ -117,6 +243,9 @@ export class LibraryService {
|
||||
if (dto.exclusionPatterns && dto.exclusionPatterns.length > 0) {
|
||||
throw new BadRequestException('Upload libraries cannot have exclusion patterns');
|
||||
}
|
||||
if (dto.isWatched) {
|
||||
throw new BadRequestException('Upload libraries cannot be watched');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -129,12 +258,38 @@ export class LibraryService {
|
||||
isVisible: dto.isVisible ?? true,
|
||||
});
|
||||
|
||||
this.logger.log(`Creating ${dto.type} library for user ${auth.user.name}`);
|
||||
|
||||
if (dto.type === LibraryType.EXTERNAL && this.watchLibraries) {
|
||||
await this.watch(library.id);
|
||||
}
|
||||
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
private async scanAssets(libraryId: string, assetPaths: string[], ownerId: string, force = false) {
|
||||
await this.jobRepository.queueAll(
|
||||
assetPaths.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryId,
|
||||
assetPath: path.normalize(assetPath),
|
||||
ownerId,
|
||||
force,
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
||||
const library = await this.repository.update({ id, ...dto });
|
||||
|
||||
if (dto.importPaths || dto.exclusionPatterns) {
|
||||
// Re-watch library to use new paths and/or exclusion patterns
|
||||
await this.watch(id);
|
||||
}
|
||||
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
@@ -147,6 +302,10 @@ export class LibraryService {
|
||||
throw new BadRequestException('Cannot delete the last upload library');
|
||||
}
|
||||
|
||||
if (this.watchLibraries) {
|
||||
await this.unwatch(id);
|
||||
}
|
||||
|
||||
await this.repository.softDelete(id);
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } });
|
||||
}
|
||||
@@ -245,8 +404,6 @@ export class LibraryService {
|
||||
|
||||
const deviceAssetId = `${basename(assetPath)}`.replace(/\s+/g, '');
|
||||
|
||||
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
|
||||
|
||||
let assetId;
|
||||
if (doImport) {
|
||||
const library = await this.repository.get(job.id, true);
|
||||
@@ -255,6 +412,8 @@ export class LibraryService {
|
||||
return false;
|
||||
}
|
||||
|
||||
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
|
||||
|
||||
// TODO: In wait of refactoring the domain asset service, this function is just manually written like this
|
||||
const addedAsset = await this.assetRepository.create({
|
||||
ownerId: job.ownerId,
|
||||
@@ -387,7 +546,7 @@ export class LibraryService {
|
||||
assetPath.match(new RegExp(`^${user.externalPath}`)),
|
||||
);
|
||||
|
||||
this.logger.debug(`Found ${crawledAssetPaths.length} assets when crawling import paths ${library.importPaths}`);
|
||||
this.logger.debug(`Found ${crawledAssetPaths.length} asset(s) when crawling import paths ${library.importPaths}`);
|
||||
const assetsInLibrary = await this.assetRepository.getByLibraryId([job.id]);
|
||||
const onlineFiles = new Set(crawledAssetPaths);
|
||||
const offlineAssetIds = assetsInLibrary
|
||||
@@ -411,17 +570,7 @@ export class LibraryService {
|
||||
this.logger.debug(`Will import ${filteredPaths.length} new asset(s)`);
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(
|
||||
filteredPaths.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: job.id,
|
||||
assetPath: path.normalize(assetPath),
|
||||
ownerId: library.ownerId,
|
||||
force: job.refreshAllFiles ?? false,
|
||||
},
|
||||
})),
|
||||
);
|
||||
await this.scanAssets(job.id, filteredPaths, library.ownerId, job.refreshAllFiles ?? false);
|
||||
}
|
||||
|
||||
await this.repository.update({ id: job.id, refreshedAt: new Date() });
|
||||
|
||||
@@ -78,7 +78,7 @@ export type JobItem =
|
||||
// Filesystem
|
||||
| { name: JobName.DELETE_FILES; data: IDeleteFilesJob }
|
||||
|
||||
// Audit log cleanup
|
||||
// Audit Log Cleanup
|
||||
| { name: JobName.CLEAN_OLD_AUDIT_LOGS; data?: IBaseJob }
|
||||
|
||||
// Asset Deletion
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { FSWatcher, WatchOptions } from 'chokidar';
|
||||
import { Stats } from 'fs';
|
||||
import { FileReadOptions } from 'fs/promises';
|
||||
import { Readable } from 'stream';
|
||||
@@ -22,6 +23,8 @@ export interface DiskUsage {
|
||||
|
||||
export const IStorageRepository = 'IStorageRepository';
|
||||
|
||||
export interface ImmichWatcher extends FSWatcher {}
|
||||
|
||||
export interface IStorageRepository {
|
||||
createZipStream(): ImmichZipStream;
|
||||
createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream>;
|
||||
@@ -38,4 +41,5 @@ export interface IStorageRepository {
|
||||
crawl(crawlOptions: CrawlOptionsDto): Promise<string[]>;
|
||||
copyFile(source: string, target: string): Promise<void>;
|
||||
rename(source: string, target: string): Promise<void>;
|
||||
watch(paths: string[], options: WatchOptions): ImmichWatcher;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { validateCronExpression } from '@app/domain';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Type } from 'class-transformer';
|
||||
import {
|
||||
IsBoolean,
|
||||
IsInt,
|
||||
IsNotEmpty,
|
||||
IsObject,
|
||||
IsPositive,
|
||||
IsString,
|
||||
Validate,
|
||||
ValidateIf,
|
||||
@@ -32,9 +35,27 @@ export class SystemConfigLibraryScanDto {
|
||||
cronExpression!: string;
|
||||
}
|
||||
|
||||
export class SystemConfigLibraryWatchDto {
|
||||
@IsBoolean()
|
||||
enabled!: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
usePolling!: boolean;
|
||||
|
||||
@IsInt()
|
||||
@IsPositive()
|
||||
@ApiProperty({ type: 'integer' })
|
||||
interval!: number;
|
||||
}
|
||||
|
||||
export class SystemConfigLibraryDto {
|
||||
@Type(() => SystemConfigLibraryScanDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
scan!: SystemConfigLibraryScanDto;
|
||||
|
||||
@Type(() => SystemConfigLibraryWatchDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
watch!: SystemConfigLibraryWatchDto;
|
||||
}
|
||||
|
||||
@@ -129,6 +129,11 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
enabled: true,
|
||||
cronExpression: CronExpression.EVERY_DAY_AT_MIDNIGHT,
|
||||
},
|
||||
watch: {
|
||||
enabled: false,
|
||||
usePolling: false,
|
||||
interval: 10000,
|
||||
},
|
||||
},
|
||||
server: {
|
||||
externalDomain: '',
|
||||
|
||||
@@ -133,6 +133,11 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
enabled: true,
|
||||
cronExpression: '0 0 * * *',
|
||||
},
|
||||
watch: {
|
||||
enabled: false,
|
||||
usePolling: false,
|
||||
interval: 10000,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -71,6 +71,10 @@ export class AppService {
|
||||
this.logger.log(`Feature Flags: ${JSON.stringify(await this.serverService.getFeatures(), null, 2)}`);
|
||||
}
|
||||
|
||||
async teardown() {
|
||||
await this.libraryService.unwatchAll();
|
||||
}
|
||||
|
||||
ssr(excludePaths: string[]) {
|
||||
let index = '';
|
||||
try {
|
||||
|
||||
@@ -50,6 +50,13 @@ export enum SystemConfigKey {
|
||||
LIBRARY_SCAN_ENABLED = 'library.scan.enabled',
|
||||
LIBRARY_SCAN_CRON_EXPRESSION = 'library.scan.cronExpression',
|
||||
|
||||
LIBRARY_WATCH_ENABLED = 'library.watch.enabled',
|
||||
LIBRARY_WATCH_USE_POLLING = 'library.watch.usePolling',
|
||||
LIBRARY_WATCH_INTERVAL = 'library.watch.interval',
|
||||
LIBRARY_WATCH_BINARY_INTERVAL = 'library.watch.binaryInterval',
|
||||
LIBRARY_WATCH_WRITE_STABILITY_THRESHOLD = 'library.watch.awaitWriteFinish.stabilityThreshold',
|
||||
LIBRARY_WATCH_WRITE_POLL_INTERVAL = 'library.watch.awaitWriteFinish.pollInterval',
|
||||
|
||||
LOGGING_ENABLED = 'logging.enabled',
|
||||
LOGGING_LEVEL = 'logging.level',
|
||||
|
||||
@@ -253,6 +260,11 @@ export interface SystemConfig {
|
||||
enabled: boolean;
|
||||
cronExpression: string;
|
||||
};
|
||||
watch: {
|
||||
enabled: boolean;
|
||||
usePolling: boolean;
|
||||
interval: number;
|
||||
};
|
||||
};
|
||||
server: {
|
||||
externalDomain: string;
|
||||
|
||||
@@ -180,7 +180,11 @@ const tests: Test[] = [
|
||||
];
|
||||
|
||||
describe(FilesystemProvider.name, () => {
|
||||
const sut = new FilesystemProvider();
|
||||
let sut: FilesystemProvider;
|
||||
|
||||
beforeEach(() => {
|
||||
sut = new FilesystemProvider();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
|
||||
@@ -2,12 +2,14 @@ import {
|
||||
CrawlOptionsDto,
|
||||
DiskUsage,
|
||||
ImmichReadStream,
|
||||
ImmichWatcher,
|
||||
ImmichZipStream,
|
||||
IStorageRepository,
|
||||
mimeTypes,
|
||||
} from '@app/domain';
|
||||
import { ImmichLogger } from '@app/infra/logger';
|
||||
import archiver from 'archiver';
|
||||
import chokidar, { WatchOptions } from 'chokidar';
|
||||
import { constants, createReadStream, existsSync, mkdirSync } from 'fs';
|
||||
import fs, { copyFile, readdir, rename, writeFile } from 'fs/promises';
|
||||
import { glob } from 'glob';
|
||||
@@ -132,5 +134,9 @@ export class FilesystemProvider implements IStorageRepository {
|
||||
});
|
||||
}
|
||||
|
||||
watch(paths: string[], options: WatchOptions): ImmichWatcher {
|
||||
return chokidar.watch(paths, options);
|
||||
}
|
||||
|
||||
readdir = readdir;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
AuditService,
|
||||
DatabaseService,
|
||||
IDeleteFilesJob,
|
||||
IStorageRepository,
|
||||
JobName,
|
||||
JobService,
|
||||
LibraryService,
|
||||
@@ -15,7 +16,7 @@ import {
|
||||
SystemConfigService,
|
||||
UserService,
|
||||
} from '@app/domain';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
|
||||
@Injectable()
|
||||
export class AppService {
|
||||
@@ -33,6 +34,7 @@ export class AppService {
|
||||
private storageService: StorageService,
|
||||
private userService: UserService,
|
||||
private databaseService: DatabaseService,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
) {}
|
||||
|
||||
async init() {
|
||||
|
||||
@@ -8,8 +8,10 @@ import { DateTime } from 'luxon';
|
||||
import * as fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Server } from 'node:tls';
|
||||
import { EventEmitter } from 'stream';
|
||||
import { EntityTarget, ObjectLiteral } from 'typeorm';
|
||||
import { AppService } from '../microservices/app.service';
|
||||
import { AppService } from '../immich/app.service';
|
||||
import { AppService as MicroAppService } from '../microservices/app.service';
|
||||
|
||||
export const IMMICH_TEST_ASSET_PATH = process.env.IMMICH_TEST_ASSET_PATH as string;
|
||||
export const IMMICH_TEST_ASSET_TEMP_PATH = path.normalize(`${IMMICH_TEST_ASSET_PATH}/temp/`);
|
||||
@@ -95,7 +97,10 @@ let app: INestApplication;
|
||||
|
||||
export const testApp = {
|
||||
create: async (): Promise<INestApplication> => {
|
||||
const moduleFixture = await Test.createTestingModule({ imports: [AppModule], providers: [AppService] })
|
||||
const moduleFixture = await Test.createTestingModule({
|
||||
imports: [AppModule],
|
||||
providers: [AppService, MicroAppService],
|
||||
})
|
||||
.overrideModule(InfraModule)
|
||||
.useModule(InfraTestModule)
|
||||
.overrideProvider(IJobRepository)
|
||||
@@ -106,7 +111,9 @@ export const testApp = {
|
||||
|
||||
app = await moduleFixture.createNestApplication().init();
|
||||
await app.listen(0);
|
||||
await db.reset();
|
||||
await app.get(AppService).init();
|
||||
await app.get(MicroAppService).init();
|
||||
|
||||
const port = app.getHttpServer().address().port;
|
||||
const protocol = app instanceof Server ? 'https' : 'http';
|
||||
@@ -115,11 +122,15 @@ export const testApp = {
|
||||
return app;
|
||||
},
|
||||
reset: async (options?: ResetOptions) => {
|
||||
await app.get(AppService).init();
|
||||
await db.reset(options);
|
||||
await app.get(AppService).init();
|
||||
|
||||
await app.get(MicroAppService).init();
|
||||
},
|
||||
get: (member: any) => app.get(member),
|
||||
teardown: async () => {
|
||||
if (app) {
|
||||
await app.get(MicroAppService).teardown();
|
||||
await app.get(AppService).teardown();
|
||||
await app.close();
|
||||
}
|
||||
@@ -127,6 +138,21 @@ export const testApp = {
|
||||
},
|
||||
};
|
||||
|
||||
export function waitForEvent<T>(emitter: EventEmitter, event: string): Promise<T> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const success = (val: T) => {
|
||||
emitter.off('error', fail);
|
||||
resolve(val);
|
||||
};
|
||||
const fail = (err: Error) => {
|
||||
emitter.off(event, success);
|
||||
reject(err);
|
||||
};
|
||||
emitter.once(event, success);
|
||||
emitter.once('error', fail);
|
||||
});
|
||||
}
|
||||
|
||||
const directoryExists = async (dirPath: string) =>
|
||||
await fs.promises
|
||||
.access(dirPath)
|
||||
|
||||
Reference in New Issue
Block a user