Merge branch 'main' of https://github.com/immich-app/immich into feat/sidecar-asset-files

This commit is contained in:
Jonathan Jogenfors
2025-10-30 23:19:18 +01:00
739 changed files with 22252 additions and 13205 deletions

View File

@@ -859,4 +859,43 @@ export const assetStub = {
stackId: null,
visibility: AssetVisibility.Timeline,
}),
panoramaTif: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.tif',
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.tif',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
projectionType: 'EQUIRECTANGULAR',
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
}),
};

View File

@@ -261,4 +261,15 @@ export const probeStub = {
bitrate: 0,
},
}),
videoStreamReserved: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
...probeStubDefaultVideoStream[0],
colorPrimaries: 'reserved',
colorSpace: 'reserved',
colorTransfer: 'reserved',
},
],
}),
};

View File

@@ -0,0 +1,14 @@
import { NotificationLevel, NotificationType } from 'src/enum';
export const notificationStub = {
albumEvent: {
id: 'notification-album-event',
type: NotificationType.AlbumInvite,
description: 'You have been invited to a shared album',
title: 'Album Invitation',
createdAt: new Date('2024-01-01'),
data: { albumId: 'album-id' },
level: NotificationLevel.Success,
readAt: null,
},
};

View File

@@ -27,23 +27,29 @@ import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { TagRepository } from 'src/repositories/tag.repository';
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { DB } from 'src/schema';
import { AlbumTable } from 'src/schema/tables/album.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
@@ -51,9 +57,12 @@ import { MemoryTable } from 'src/schema/tables/memory.table';
import { PersonTable } from 'src/schema/tables/person.table';
import { SessionTable } from 'src/schema/tables/session.table';
import { StackTable } from 'src/schema/tables/stack.table';
import { TagAssetTable } from 'src/schema/tables/tag-asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { UserTable } from 'src/schema/tables/user.table';
import { BASE_SERVICE_DEPENDENCIES, BaseService } from 'src/services/base.service';
import { SyncService } from 'src/services/sync.service';
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, wait } from 'test/utils';
import { Mocked } from 'vitest';
@@ -163,6 +172,11 @@ export class MediumTestContext<S extends BaseService = BaseService> {
return { asset, result };
}
async newAssetFile(dto: Insertable<AssetFileTable>) {
const result = await this.get(AssetRepository).upsertFile(dto);
return { result };
}
async newAssetFace(dto: Partial<Insertable<AssetFace>> & { assetId: string }) {
const assetFace = mediumFactory.assetFaceInsert(dto);
const result = await this.get(PersonRepository).createAssetFace(assetFace);
@@ -238,6 +252,18 @@ export class MediumTestContext<S extends BaseService = BaseService> {
user,
};
}
async newTagAsset(tagBulkAssets: { tagIds: string[]; assetIds: string[] }) {
const tagsAssets: Insertable<TagAssetTable>[] = [];
for (const tagsId of tagBulkAssets.tagIds) {
for (const assetsId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagsId, assetsId });
}
}
const result = await this.get(TagRepository).upsertAssetIds(tagsAssets);
return { tagsAssets, result };
}
}
export class SyncTestContext extends MediumTestContext<SyncService> {
@@ -289,11 +315,13 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
case AssetJobRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case SearchRepository:
case SessionRepository:
case SharedLinkRepository:
case SharedLinkAssetRepository:
case StackRepository:
case SyncRepository:
case SyncCheckpointRepository:
@@ -316,6 +344,10 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
return new key(LoggingRepository.create());
}
case TagRepository: {
return new key(db, LoggingRepository.create());
}
case LoggingRepository as unknown as ClassConstructor<LoggingRepository>: {
return new key() as unknown as T;
}
@@ -336,6 +368,7 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
case CryptoRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case SessionRepository:
@@ -343,10 +376,15 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
case SyncCheckpointRepository:
case SystemMetadataRepository:
case UserRepository:
case VersionHistoryRepository: {
case VersionHistoryRepository:
case TagRepository: {
return automock(key);
}
case TelemetryRepository: {
return newTelemetryRepositoryMock();
}
case DatabaseRepository: {
return automock(DatabaseRepository, {
args: [undefined, { setContext: () => {} }, { getEnv: () => ({ database: { vectorExtension: '' } }) }],
@@ -379,6 +417,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
return automock(LoggingRepository, { args: [undefined, configMock], strict: false });
}
case MachineLearningRepository: {
return automock(MachineLearningRepository, { args: [{ setContext: () => {} }] });
}
case StorageRepository: {
return automock(StorageRepository, { args: [{ setContext: () => {} }] });
}
@@ -561,6 +603,23 @@ const memoryInsert = (memory: Partial<Insertable<MemoryTable>> = {}) => {
return { ...defaults, ...memory, id };
};
const tagInsert = (tag: Partial<Insertable<TagTable>>) => {
const id = tag.id || newUuid();
const defaults: Insertable<TagTable> = {
id,
userId: '',
value: '',
createdAt: newDate(),
updatedAt: newDate(),
color: '',
parentId: null,
updateId: newUuid(),
};
return { ...defaults, ...tag, id };
};
class CustomWritable extends Writable {
private data = '';
@@ -583,7 +642,7 @@ const syncStream = () => {
};
const loginDetails = () => {
return { isSecure: false, clientIp: '', deviceType: '', deviceOS: '' };
return { isSecure: false, clientIp: '', deviceType: '', deviceOS: '', appVersion: null };
};
const loginResponse = (): LoginResponseDto => {
@@ -613,4 +672,5 @@ export const mediumFactory = {
memoryInsert,
loginDetails,
loginResponse,
tagInsert,
};

View File

@@ -1,6 +1,14 @@
import { Kysely } from 'kysely';
import { JobName, SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { DB } from 'src/schema';
import { AssetService } from 'src/services/asset.service';
import { newMediumService } from 'test/medium.factory';
@@ -12,8 +20,8 @@ let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AssetService, {
database: db || defaultDatabase,
real: [AssetRepository],
mock: [LoggingRepository],
real: [AssetRepository, AlbumRepository, AccessRepository, SharedLinkAssetRepository, StackRepository],
mock: [LoggingRepository, JobRepository, StorageRepository],
});
};
@@ -32,4 +40,166 @@ describe(AssetService.name, () => {
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
});
});
describe('copy', () => {
it('should copy albums', async () => {
const { sut, ctx } = setup();
const albumRepo = ctx.get(AlbumRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
const { album } = await ctx.newAlbum({ ownerId: user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: oldAsset.id });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(albumRepo.getAssetIds(album.id, [oldAsset.id, newAsset.id])).resolves.toEqual(
new Set([oldAsset.id, newAsset.id]),
);
});
it('should copy shared links', async () => {
const { sut, ctx } = setup();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const { id: sharedLinkId } = await sharedLinkRepo.create({
allowUpload: false,
key: Buffer.from('123'),
type: SharedLinkType.Individual,
userId: user.id,
assetIds: [oldAsset.id],
});
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(sharedLinkRepo.get(user.id, sharedLinkId)).resolves.toEqual(
expect.objectContaining({
assets: [expect.objectContaining({ id: oldAsset.id }), expect.objectContaining({ id: newAsset.id })],
}),
);
});
it('should merge stacks', async () => {
const { sut, ctx } = setup();
const stackRepo = ctx.get(StackRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
await ctx.newExif({ assetId: asset2.id, description: 'foo' });
await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
const {
stack: { id: newStackId },
} = await ctx.newStack({ ownerId: user.id }, [newAsset.id, asset2.id]);
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(stackRepo.getById(oldAsset.id)).resolves.toEqual(undefined);
const newStack = await stackRepo.getById(newStackId);
expect(newStack).toEqual(
expect.objectContaining({
primaryAssetId: newAsset.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset2.id })]),
}),
);
expect(newStack!.assets.length).toEqual(4);
});
it('should copy stack', async () => {
const { sut, ctx } = setup();
const stackRepo = ctx.get(StackRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const {
stack: { id: stackId },
} = await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
const stack = await stackRepo.getById(stackId);
expect(stack).toEqual(
expect.objectContaining({
primaryAssetId: oldAsset.id,
assets: expect.arrayContaining([expect.objectContaining({ id: newAsset.id })]),
}),
);
expect(stack!.assets.length).toEqual(3);
});
it('should copy favorite status', async () => {
const { sut, ctx } = setup();
const assetRepo = ctx.get(AssetRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, isFavorite: true });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(assetRepo.getById(newAsset.id)).resolves.toEqual(expect.objectContaining({ isFavorite: true }));
});
it('should copy sidecar file', async () => {
const { sut, ctx } = setup();
const storageRepo = ctx.getMock(StorageRepository);
const jobRepo = ctx.getMock(JobRepository);
storageRepo.copyFile.mockResolvedValue();
jobRepo.queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, sidecarPath: '/path/to/my/sidecar.xmp' });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
expect(storageRepo.copyFile).toHaveBeenCalledWith('/path/to/my/sidecar.xmp', `${newAsset.originalPath}.xmp`);
expect(jobRepo.queue).toHaveBeenCalledWith({
name: JobName.AssetExtractMetadata,
data: { id: newAsset.id },
});
});
});
});

View File

@@ -11,6 +11,7 @@ import { LoggingRepository } from 'src/repositories/logging.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { AuthService } from 'src/services/auth.service';
@@ -32,7 +33,7 @@ const setup = (db?: Kysely<DB>) => {
SystemMetadataRepository,
UserRepository,
],
mock: [LoggingRepository, StorageRepository, EventRepository],
mock: [LoggingRepository, StorageRepository, EventRepository, TelemetryRepository],
});
};
@@ -43,7 +44,8 @@ beforeAll(async () => {
describe(AuthService.name, () => {
describe('adminSignUp', () => {
it(`should sign up the admin`, async () => {
const { sut } = setup();
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
await expect(sut.adminSignUp(dto)).resolves.toEqual(
@@ -129,6 +131,7 @@ describe(AuthService.name, () => {
describe('changePassword', () => {
it('should change the password and login with it', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = { password: 'password', newPassword: 'new-password' };
const passwordHashed = await hash(dto.password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });

View File

@@ -0,0 +1,243 @@
import { Kysely } from 'kysely';
import { AssetFileType, JobStatus } from 'src/enum';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { DB } from 'src/schema';
import { OcrService } from 'src/services/ocr.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(OcrService, {
database: db || defaultDatabase,
real: [AssetRepository, AssetJobRepository, ConfigRepository, OcrRepository, SystemMetadataRepository],
mock: [JobRepository, LoggingRepository, MachineLearningRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(OcrService.name, () => {
it('should work', () => {
const { sut } = setup();
expect(sut).toBeDefined();
});
it('should parse asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.99,
id: expect.any(String),
text: 'Test OCR',
textScore: 0.95,
x1: 10,
y1: 10,
x2: 50,
y2: 10,
x3: 50,
y3: 50,
x4: 10,
y4: 50,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'Test OCR',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle multiple boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: Array.from({ length: 8 * 5 }, (_, i) => i),
boxScore: [0.7, 0.67, 0.65, 0.62, 0.6],
text: ['One', 'Two', 'Three', 'Four', 'Five'],
textScore: [0.9, 0.89, 0.88, 0.87, 0.86],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.7,
id: expect.any(String),
text: 'One',
textScore: 0.9,
x1: 0,
y1: 1,
x2: 2,
y2: 3,
x3: 4,
y3: 5,
x4: 6,
y4: 7,
},
{
assetId: asset.id,
boxScore: 0.67,
id: expect.any(String),
text: 'Two',
textScore: 0.89,
x1: 8,
y1: 9,
x2: 10,
y2: 11,
x3: 12,
y3: 13,
x4: 14,
y4: 15,
},
{
assetId: asset.id,
boxScore: 0.65,
id: expect.any(String),
text: 'Three',
textScore: 0.88,
x1: 16,
y1: 17,
x2: 18,
y2: 19,
x3: 20,
y3: 21,
x4: 22,
y4: 23,
},
{
assetId: asset.id,
boxScore: 0.62,
id: expect.any(String),
text: 'Four',
textScore: 0.87,
x1: 24,
y1: 25,
x2: 26,
y2: 27,
x3: 28,
y3: 29,
x4: 30,
y4: 31,
},
{
assetId: asset.id,
boxScore: 0.6,
id: expect.any(String),
text: 'Five',
textScore: 0.86,
x1: 32,
y1: 33,
x2: 34,
y2: 35,
x3: 36,
y3: 37,
x4: 38,
y4: 39,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'One Two Three Four Five',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle no boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should update existing results', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
});
});

View File

@@ -4,6 +4,7 @@ import { SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { DB } from 'src/schema';
@@ -17,7 +18,7 @@ let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SharedLinkService, {
database: db || defaultDatabase,
real: [AccessRepository, DatabaseRepository, SharedLinkRepository],
real: [AccessRepository, DatabaseRepository, SharedLinkRepository, SharedLinkAssetRepository],
mock: [LoggingRepository, StorageRepository],
});
};
@@ -62,4 +63,65 @@ describe(SharedLinkService.name, () => {
});
});
});
it('should share individually assets', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const assets = await Promise.all([
ctx.newAsset({ ownerId: user.id }),
ctx.newAsset({ ownerId: user.id }),
ctx.newAsset({ ownerId: user.id }),
]);
for (const { asset } of assets) {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
}
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(16),
id: factory.uuid(),
userId: user.id,
allowUpload: false,
type: SharedLinkType.Individual,
assetIds: assets.map(({ asset }) => asset.id),
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
assets: assets.map(({ asset }) => expect.objectContaining({ id: asset.id })),
});
});
it('should remove individually shared asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(16),
id: factory.uuid(),
userId: user.id,
allowUpload: false,
type: SharedLinkType.Individual,
assetIds: [asset.id],
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
assets: [expect.objectContaining({ id: asset.id })],
});
await sut.removeAssets(auth, sharedLink.id, {
assetIds: [asset.id],
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toHaveProperty('assets', []);
});
});

View File

@@ -0,0 +1,116 @@
import { Kysely } from 'kysely';
import { JobStatus } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { TagRepository } from 'src/repositories/tag.repository';
import { DB } from 'src/schema';
import { TagService } from 'src/services/tag.service';
import { upsertTags } from 'src/utils/tag';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(TagService, {
database: db || defaultDatabase,
real: [TagRepository, AccessRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(TagService.name, () => {
describe('deleteEmptyTags', () => {
it('single tag exists, not connected to any assets, and is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const tagRepo = ctx.get(TagRepository);
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toBeUndefined();
});
it('single tag exists, connected to one asset, and is not deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
await ctx.newTagAsset({ tagIds: [tag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
});
it('hierarchical tag exists, and the parent is connected to an asset, and the child is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await ctx.newTagAsset({ tagIds: [parentTag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
});
it('hierarchical tag exists, and only the child is connected to an asset, and nothing is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await ctx.newTagAsset({ tagIds: [childTag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
});
it('hierarchical tag exists, and neither parent nor child is connected to an asset, and both are deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toBeUndefined();
});
});
});

View File

@@ -4,6 +4,7 @@ import { AssetVisibility } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { DB } from 'src/schema';
import { TimelineService } from 'src/services/timeline.service';
import { newMediumService } from 'test/medium.factory';
@@ -15,7 +16,7 @@ let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(TimelineService, {
database: db || defaultDatabase,
real: [AssetRepository, AccessRepository],
real: [AssetRepository, AccessRepository, PartnerRepository],
mock: [LoggingRepository],
});
};
@@ -155,5 +156,54 @@ describe(TimelineService.name, () => {
const response = JSON.parse(rawResponse);
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
});
it('should return false for favorite status unless asset owner', async () => {
const { sut, ctx } = setup();
const [{ asset: asset1 }, { asset: asset2 }] = await Promise.all([
ctx.newUser().then(async ({ user }) => {
const result = await ctx.newAsset({
ownerId: user.id,
fileCreatedAt: new Date('1970-02-12'),
localDateTime: new Date('1970-02-12'),
isFavorite: true,
});
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
return result;
}),
ctx.newUser().then(async ({ user }) => {
const result = await ctx.newAsset({
ownerId: user.id,
fileCreatedAt: new Date('1970-02-13'),
localDateTime: new Date('1970-02-13'),
isFavorite: true,
});
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
return result;
}),
]);
await Promise.all([
ctx.newPartner({ sharedById: asset1.ownerId, sharedWithId: asset2.ownerId }),
ctx.newPartner({ sharedById: asset2.ownerId, sharedWithId: asset1.ownerId }),
]);
const auth1 = factory.auth({ user: { id: asset1.ownerId } });
const rawResponse1 = await sut.getTimeBucket(auth1, {
timeBucket: '1970-02-01',
withPartners: true,
visibility: AssetVisibility.Timeline,
});
const response1 = JSON.parse(rawResponse1);
expect(response1).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [false, true] }));
const auth2 = factory.auth({ user: { id: asset2.ownerId } });
const rawResponse2 = await sut.getTimeBucket(auth2, {
timeBucket: '1970-02-01',
withPartners: true,
visibility: AssetVisibility.Timeline,
});
const response2 = JSON.parse(rawResponse2);
expect(response2).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [true, false] }));
});
});
});

View File

@@ -3,6 +3,7 @@ import { DateTime } from 'luxon';
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
@@ -21,7 +22,7 @@ const setup = (db?: Kysely<DB>) => {
return newMediumService(UserService, {
database: db || defaultDatabase,
real: [CryptoRepository, ConfigRepository, SystemMetadataRepository, UserRepository],
mock: [LoggingRepository, JobRepository],
mock: [LoggingRepository, JobRepository, EventRepository],
});
};
@@ -34,7 +35,8 @@ beforeAll(async () => {
describe(UserService.name, () => {
describe('create', () => {
it('should create a user', async () => {
const { sut } = setup();
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
expect.objectContaining({ name: user.name, email: user.email }),
@@ -42,14 +44,16 @@ describe(UserService.name, () => {
});
it('should reject user with duplicate email', async () => {
const { sut } = setup();
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
});
it('should not return password', async () => {
const { sut } = setup();
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = mediumFactory.userInsert({ password: 'password' });
const user = await sut.createUser({ email: dto.email, password: 'password' });
expect((user as any).password).toBeUndefined();

View File

@@ -144,6 +144,7 @@ const sessionFactory = (session: Partial<Session> = {}) => ({
userId: newUuid(),
pinExpiresAt: newDate(),
isPendingSyncReset: false,
appVersion: session.appVersion ?? null,
...session,
});
@@ -324,10 +325,44 @@ const assetSidecarWriteFactory = (asset: Partial<SidecarWriteAsset> = {}) => ({
...asset,
});
const assetOcrFactory = (
ocr: {
id?: string;
assetId?: string;
x1?: number;
y1?: number;
x2?: number;
y2?: number;
x3?: number;
y3?: number;
x4?: number;
y4?: number;
boxScore?: number;
textScore?: number;
text?: string;
} = {},
) => ({
id: newUuid(),
assetId: newUuid(),
x1: 0.1,
y1: 0.2,
x2: 0.3,
y2: 0.2,
x3: 0.3,
y3: 0.4,
x4: 0.1,
y4: 0.4,
boxScore: 0.95,
textScore: 0.92,
text: 'Sample Text',
...ocr,
});
export const factory = {
activity: activityFactory,
apiKey: apiKeyFactory,
asset: assetFactory,
assetOcr: assetOcrFactory,
auth: authFactory,
authApiKey: authApiKeyFactory,
authUser: authUserFactory,

View File

@@ -41,12 +41,14 @@ import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
@@ -59,6 +61,7 @@ import { TrashRepository } from 'src/repositories/trash.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { ViewRepository } from 'src/repositories/view-repository';
import { WebsocketRepository } from 'src/repositories/websocket.repository';
import { DB } from 'src/schema';
import { AuthService } from 'src/services/auth.service';
import { BaseService } from 'src/services/base.service';
@@ -228,6 +231,7 @@ export type ServiceOverrides = {
metadata: MetadataRepository;
move: MoveRepository;
notification: NotificationRepository;
ocr: OcrRepository;
oauth: OAuthRepository;
partner: PartnerRepository;
person: PersonRepository;
@@ -236,6 +240,7 @@ export type ServiceOverrides = {
serverInfo: ServerInfoRepository;
session: SessionRepository;
sharedLink: SharedLinkRepository;
sharedLinkAsset: SharedLinkAssetRepository;
stack: StackRepository;
storage: StorageRepository;
sync: SyncRepository;
@@ -247,6 +252,7 @@ export type ServiceOverrides = {
user: UserRepository;
versionHistory: VersionHistoryRepository;
view: ViewRepository;
websocket: WebsocketRepository;
};
type As<T> = T extends RepositoryInterface<infer U> ? U : never;
@@ -298,6 +304,7 @@ export const newTestService = <T extends BaseService>(
metadata: newMetadataRepositoryMock(),
move: automock(MoveRepository, { strict: false }),
notification: automock(NotificationRepository),
ocr: automock(OcrRepository, { strict: false }),
oauth: automock(OAuthRepository, { args: [loggerMock] }),
partner: automock(PartnerRepository, { strict: false }),
person: automock(PersonRepository, { strict: false }),
@@ -307,6 +314,7 @@ export const newTestService = <T extends BaseService>(
serverInfo: automock(ServerInfoRepository, { args: [, loggerMock], strict: false }),
session: automock(SessionRepository),
sharedLink: automock(SharedLinkRepository),
sharedLinkAsset: automock(SharedLinkAssetRepository),
stack: automock(StackRepository),
storage: newStorageRepositoryMock(),
sync: automock(SyncRepository),
@@ -320,6 +328,8 @@ export const newTestService = <T extends BaseService>(
user: automock(UserRepository, { strict: false }),
versionHistory: automock(VersionHistoryRepository),
view: automock(ViewRepository),
// eslint-disable-next-line no-sparse-arrays
websocket: automock(WebsocketRepository, { args: [, loggerMock], strict: false }),
};
const sut = new Service(
@@ -350,6 +360,7 @@ export const newTestService = <T extends BaseService>(
overrides.move || (mocks.move as As<MoveRepository>),
overrides.notification || (mocks.notification as As<NotificationRepository>),
overrides.oauth || (mocks.oauth as As<OAuthRepository>),
overrides.ocr || (mocks.ocr as As<OcrRepository>),
overrides.partner || (mocks.partner as As<PartnerRepository>),
overrides.person || (mocks.person as As<PersonRepository>),
overrides.process || (mocks.process as As<ProcessRepository>),
@@ -357,6 +368,7 @@ export const newTestService = <T extends BaseService>(
overrides.serverInfo || (mocks.serverInfo as As<ServerInfoRepository>),
overrides.session || (mocks.session as As<SessionRepository>),
overrides.sharedLink || (mocks.sharedLink as As<SharedLinkRepository>),
overrides.sharedLinkAsset || (mocks.sharedLinkAsset as As<SharedLinkAssetRepository>),
overrides.stack || (mocks.stack as As<StackRepository>),
overrides.storage || (mocks.storage as As<StorageRepository>),
overrides.sync || (mocks.sync as As<SyncRepository>),
@@ -368,6 +380,7 @@ export const newTestService = <T extends BaseService>(
overrides.user || (mocks.user as As<UserRepository>),
overrides.versionHistory || (mocks.versionHistory as As<VersionHistoryRepository>),
overrides.view || (mocks.view as As<ViewRepository>),
overrides.websocket || (mocks.websocket as As<WebsocketRepository>),
);
return {