mirror of
https://github.com/immich-app/immich.git
synced 2025-12-25 09:14:58 +03:00
merge main
This commit is contained in:
@@ -85,19 +85,6 @@ describe(AssetMediaController.name, () => {
|
||||
expect(body).toEqual(factory.responses.badRequest(['metadata must be valid JSON']));
|
||||
});
|
||||
|
||||
it('should validate iCloudId is a string', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/assets')
|
||||
.attach('assetData', assetData, filename)
|
||||
.field({
|
||||
...makeUploadDto(),
|
||||
metadata: JSON.stringify([{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 123 } }]),
|
||||
});
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(['metadata.0.value.iCloudId must be a string']));
|
||||
});
|
||||
|
||||
it('should require `deviceAssetId`', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/assets')
|
||||
|
||||
@@ -305,7 +305,7 @@ export class StorageCore {
|
||||
return this.assetRepository.update({ id, encodedVideoPath: newPath });
|
||||
}
|
||||
case AssetPathType.Sidecar: {
|
||||
return this.assetRepository.update({ id, sidecarPath: newPath });
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
|
||||
}
|
||||
case PersonPathType.Face: {
|
||||
return this.personRepository.update({ id, thumbnailPath: newPath });
|
||||
|
||||
@@ -122,7 +122,6 @@ export type Asset = {
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
@@ -156,13 +155,6 @@ export type StorageAsset = {
|
||||
encodedVideoPath: string | null;
|
||||
};
|
||||
|
||||
export type SidecarWriteAsset = {
|
||||
id: string;
|
||||
sidecarPath: string | null;
|
||||
originalPath: string;
|
||||
tags: Array<{ value: string }>;
|
||||
};
|
||||
|
||||
export type Stack = {
|
||||
id: string;
|
||||
primaryAssetId: string;
|
||||
@@ -309,14 +301,14 @@ export type Workflow = Selectable<WorkflowTable> & {
|
||||
|
||||
export type WorkflowFilter = Selectable<WorkflowFilterTable> & {
|
||||
workflowId: string;
|
||||
filterId: string;
|
||||
pluginFilterId: string;
|
||||
filterConfig: FilterConfig | null;
|
||||
order: number;
|
||||
};
|
||||
|
||||
export type WorkflowAction = Selectable<WorkflowActionTable> & {
|
||||
workflowId: string;
|
||||
actionId: string;
|
||||
pluginActionId: string;
|
||||
actionConfig: ActionConfig | null;
|
||||
order: number;
|
||||
};
|
||||
@@ -347,7 +339,6 @@ export const columns = {
|
||||
'asset.originalFileName',
|
||||
'asset.originalPath',
|
||||
'asset.ownerId',
|
||||
'asset.sidecarPath',
|
||||
'asset.type',
|
||||
],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
|
||||
@@ -124,7 +124,6 @@ export type MapAsset = {
|
||||
originalPath: string;
|
||||
owner?: User | null;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
tags?: Tag[];
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AssetMetadataKey, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { AssetStats } from 'src/repositories/asset.repository';
|
||||
import { AssetMetadata, AssetMetadataItem } from 'src/types';
|
||||
import { IsNotSiblingOf, Optional, ValidateBoolean, ValidateEnum, ValidateUUID } from 'src/validation';
|
||||
|
||||
export class DeviceIdDto {
|
||||
@@ -154,23 +153,12 @@ export class AssetMetadataUpsertDto {
|
||||
items!: AssetMetadataUpsertItemDto[];
|
||||
}
|
||||
|
||||
export class AssetMetadataUpsertItemDto implements AssetMetadataItem {
|
||||
export class AssetMetadataUpsertItemDto {
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
|
||||
@IsObject()
|
||||
@ValidateNested()
|
||||
@Type((options) => {
|
||||
switch (options?.object.key) {
|
||||
case AssetMetadataKey.MobileApp: {
|
||||
return AssetMetadataMobileAppDto;
|
||||
}
|
||||
default: {
|
||||
return Object;
|
||||
}
|
||||
}
|
||||
})
|
||||
value!: AssetMetadata[AssetMetadataKey];
|
||||
value!: object;
|
||||
}
|
||||
|
||||
export class AssetMetadataMobileAppDto {
|
||||
|
||||
@@ -50,6 +50,7 @@ export class AssetDeltaSyncResponseDto {
|
||||
export const extraSyncModels: Function[] = [];
|
||||
|
||||
export const ExtraModel = (): ClassDecorator => {
|
||||
// eslint-disable-next-line unicorn/consistent-function-scoping
|
||||
return (object: Function) => {
|
||||
extraSyncModels.push(object);
|
||||
};
|
||||
|
||||
@@ -7,7 +7,7 @@ import { Optional, ValidateBoolean, ValidateEnum } from 'src/validation';
|
||||
|
||||
export class WorkflowFilterItemDto {
|
||||
@IsUUID()
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -16,7 +16,7 @@ export class WorkflowFilterItemDto {
|
||||
|
||||
export class WorkflowActionItemDto {
|
||||
@IsUUID()
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@IsObject()
|
||||
@Optional()
|
||||
@@ -90,7 +90,7 @@ export class WorkflowResponseDto {
|
||||
export class WorkflowFilterResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
filterConfig!: FilterConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -98,7 +98,7 @@ export class WorkflowFilterResponseDto {
|
||||
export class WorkflowActionResponseDto {
|
||||
id!: string;
|
||||
workflowId!: string;
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
actionConfig!: ActionConfig | null;
|
||||
order!: number;
|
||||
}
|
||||
@@ -107,7 +107,7 @@ export function mapWorkflowFilter(filter: WorkflowFilter): WorkflowFilterRespons
|
||||
return {
|
||||
id: filter.id,
|
||||
workflowId: filter.workflowId,
|
||||
filterId: filter.filterId,
|
||||
pluginFilterId: filter.pluginFilterId,
|
||||
filterConfig: filter.filterConfig,
|
||||
order: filter.order,
|
||||
};
|
||||
@@ -117,7 +117,7 @@ export function mapWorkflowAction(action: WorkflowAction): WorkflowActionRespons
|
||||
return {
|
||||
id: action.id,
|
||||
workflowId: action.workflowId,
|
||||
actionId: action.actionId,
|
||||
pluginActionId: action.pluginActionId,
|
||||
actionConfig: action.actionConfig,
|
||||
order: action.order,
|
||||
};
|
||||
|
||||
@@ -44,6 +44,7 @@ export enum AssetFileType {
|
||||
FullSize = 'fullsize',
|
||||
Preview = 'preview',
|
||||
Thumbnail = 'thumbnail',
|
||||
Sidecar = 'sidecar',
|
||||
}
|
||||
|
||||
export enum AlbumUserRole {
|
||||
|
||||
@@ -20,8 +20,23 @@ limit
|
||||
-- AssetJobRepository.getForSidecarWriteJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
@@ -39,21 +54,36 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.getForSidecarCheckJob
|
||||
select
|
||||
"id",
|
||||
"sidecarPath",
|
||||
"originalPath"
|
||||
"originalPath",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
"asset"."id" = $2::uuid
|
||||
limit
|
||||
$2
|
||||
$3
|
||||
|
||||
-- AssetJobRepository.streamForThumbnailJob
|
||||
select
|
||||
@@ -158,7 +188,6 @@ select
|
||||
"asset"."originalFileName",
|
||||
"asset"."originalPath",
|
||||
"asset"."ownerId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."type",
|
||||
(
|
||||
select
|
||||
@@ -173,11 +202,27 @@ select
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
) as agg
|
||||
) as "faces"
|
||||
) as "faces",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
"asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.getAlbumThumbnailFiles
|
||||
select
|
||||
@@ -322,7 +367,6 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."ownerId",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."encodedVideoPath",
|
||||
"asset"."originalPath",
|
||||
to_json("asset_exif") as "exifInfo",
|
||||
@@ -433,18 +477,33 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."id" = $1
|
||||
and "asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.streamForStorageTemplateJob
|
||||
select
|
||||
@@ -454,12 +513,27 @@ select
|
||||
"asset"."checksum",
|
||||
"asset"."originalPath",
|
||||
"asset"."isExternal",
|
||||
"asset"."sidecarPath",
|
||||
"asset"."originalFileName",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte"
|
||||
"asset_exif"."fileSizeInByte",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
@@ -481,11 +555,15 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
(
|
||||
"asset"."sidecarPath" = $1
|
||||
or "asset"."sidecarPath" is null
|
||||
not exists (
|
||||
select
|
||||
"asset_file"."id"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
)
|
||||
and "asset"."visibility" != $2
|
||||
|
||||
-- AssetJobRepository.streamForDetectFacesJob
|
||||
select
|
||||
|
||||
@@ -216,6 +216,34 @@ from
|
||||
limit
|
||||
3
|
||||
|
||||
-- AssetRepository.getForCopy
|
||||
select
|
||||
"id",
|
||||
"stackId",
|
||||
"originalPath",
|
||||
"isFavorite",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"id" = $1::uuid
|
||||
limit
|
||||
$2
|
||||
|
||||
-- AssetRepository.getById
|
||||
select
|
||||
"asset".*
|
||||
|
||||
@@ -6,7 +6,6 @@ import { Asset, columns } from 'src/database';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageAsset } from 'src/types';
|
||||
import {
|
||||
anyUuid,
|
||||
asUuid,
|
||||
@@ -40,7 +39,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.select((eb) =>
|
||||
jsonArrayFrom(
|
||||
eb
|
||||
@@ -59,7 +59,8 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -122,6 +123,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(columns.asset)
|
||||
.select(withFaces)
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -228,7 +230,6 @@ export class AssetJobRepository {
|
||||
'asset.libraryId',
|
||||
'asset.ownerId',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.sidecarPath',
|
||||
'asset.encodedVideoPath',
|
||||
'asset.originalPath',
|
||||
])
|
||||
@@ -306,26 +307,24 @@ export class AssetJobRepository {
|
||||
'asset.checksum',
|
||||
'asset.originalPath',
|
||||
'asset.isExternal',
|
||||
'asset.sidecarPath',
|
||||
'asset.originalFileName',
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.fileCreatedAt',
|
||||
'asset_exif.timeZone',
|
||||
'asset_exif.fileSizeInByte',
|
||||
])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.deletedAt', 'is', null);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst() as Promise<
|
||||
StorageAsset | undefined
|
||||
>;
|
||||
getForStorageTemplateJob(id: string) {
|
||||
return this.storageTemplateAssetQuery().where('asset.id', '=', id).executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [], stream: true })
|
||||
streamForStorageTemplateJob() {
|
||||
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
|
||||
return this.storageTemplateAssetQuery().stream();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
@@ -343,9 +342,18 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id'])
|
||||
.$if(!force, (qb) =>
|
||||
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
|
||||
qb.where((eb) =>
|
||||
eb.not(
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('asset_file')
|
||||
.select('asset_file.id')
|
||||
.whereRef('asset_file.assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.Sidecar),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.stream();
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
|
||||
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
|
||||
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { AssetMetadataItem } from 'src/types';
|
||||
import {
|
||||
anyUuid,
|
||||
asUuid,
|
||||
@@ -224,7 +223,7 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
upsertMetadata(id: string, items: AssetMetadataItem[]) {
|
||||
upsertMetadata(id: string, items: Array<{ key: AssetMetadataKey; value: object }>) {
|
||||
return this.db
|
||||
.insertInto('asset_metadata')
|
||||
.values(items.map((item) => ({ assetId: id, ...item })))
|
||||
@@ -397,6 +396,17 @@ export class AssetRepository {
|
||||
return this.db.selectFrom('asset_file').select(['assetId', 'path']).limit(sql.lit(3)).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForCopy(id: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id', 'stackId', 'originalPath', 'isFavorite'])
|
||||
.select(withFiles)
|
||||
.where('id', '=', asUuid(id))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
return this.db
|
||||
@@ -843,6 +853,10 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteFile({ assetId, type }: { assetId: string; type: AssetFileType }): Promise<void> {
|
||||
await this.db.deleteFrom('asset_file').where('assetId', '=', asUuid(assetId)).where('type', '=', type).execute();
|
||||
}
|
||||
|
||||
async deleteFiles(files: Pick<Selectable<AssetFileTable>, 'id'>[]): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
|
||||
@@ -257,7 +257,7 @@ describe('getEnv', () => {
|
||||
expect(telemetry).toEqual({
|
||||
apiPort: 8081,
|
||||
microservicesPort: 8082,
|
||||
metrics: new Set([]),
|
||||
metrics: new Set(),
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -403,7 +403,6 @@ export class DatabaseRepository {
|
||||
.set((eb) => ({
|
||||
originalPath: eb.fn('REGEXP_REPLACE', ['originalPath', source, target]),
|
||||
encodedVideoPath: eb.fn('REGEXP_REPLACE', ['encodedVideoPath', source, target]),
|
||||
sidecarPath: eb.fn('REGEXP_REPLACE', ['sidecarPath', source, target]),
|
||||
}))
|
||||
.execute();
|
||||
|
||||
|
||||
@@ -45,12 +45,12 @@ export class OcrRepository {
|
||||
textScore: DummyValue.NUMBER,
|
||||
},
|
||||
],
|
||||
DummyValue.STRING,
|
||||
],
|
||||
})
|
||||
upsert(assetId: string, ocrDataList: Insertable<AssetOcrTable>[]) {
|
||||
upsert(assetId: string, ocrDataList: Insertable<AssetOcrTable>[], searchText: string) {
|
||||
let query = this.db.with('deleted_ocr', (db) => db.deleteFrom('asset_ocr').where('assetId', '=', assetId));
|
||||
if (ocrDataList.length > 0) {
|
||||
const searchText = ocrDataList.map((item) => item.text.trim()).join(' ');
|
||||
(query as any) = query
|
||||
.with('inserted_ocr', (db) => db.insertInto('asset_ocr').values(ocrDataList))
|
||||
.with('inserted_search', (db) =>
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { tokenizeForSearch } from 'src/utils/database';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`truncate ${sql.table('ocr_search')}`.execute(db);
|
||||
|
||||
let lastAssetId: string | undefined;
|
||||
while (true) {
|
||||
const rows = await db
|
||||
.selectFrom('asset_ocr')
|
||||
.select(['assetId', sql<string>`string_agg(text, ' ')`.as('text')])
|
||||
.$if(lastAssetId !== undefined, (qb) => qb.where('assetId', '>', lastAssetId))
|
||||
.groupBy('assetId')
|
||||
.orderBy('assetId')
|
||||
.limit(5000)
|
||||
.execute();
|
||||
|
||||
if (rows.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
await db
|
||||
.insertInto('ocr_search')
|
||||
.values(rows.map(({ assetId, text }) => ({ assetId, text: tokenizeForSearch(text).join(' ') })))
|
||||
.execute();
|
||||
|
||||
lastAssetId = rows.at(-1)!.assetId;
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {}
|
||||
@@ -0,0 +1,24 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`INSERT INTO "asset_file" ("assetId", "path", "type")
|
||||
SELECT
|
||||
id, "sidecarPath", 'sidecar'
|
||||
FROM "asset"
|
||||
WHERE "sidecarPath" IS NOT NULL AND "sidecarPath" != '';`.execute(db);
|
||||
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "sidecarPath";`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset" ADD "sidecarPath" character varying;`.execute(db);
|
||||
|
||||
await sql`
|
||||
UPDATE "asset"
|
||||
SET "sidecarPath" = "asset_file"."path"
|
||||
FROM "asset_file"
|
||||
WHERE "asset"."id" = "asset_file"."assetId" AND "asset_file"."type" = 'sidecar';
|
||||
`.execute(db);
|
||||
|
||||
await sql`DELETE FROM "asset_file" WHERE "type" = 'sidecar';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_filterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_actionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_filterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_actionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "filterId" TO "pluginFilterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "actionId" TO "pluginActionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_pluginFilterId_fkey" FOREIGN KEY ("pluginFilterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_pluginActionId_fkey" FOREIGN KEY ("pluginActionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_pluginFilterId_idx" ON "workflow_filter" ("pluginFilterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_pluginActionId_idx" ON "workflow_action" ("pluginActionId");`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP INDEX "workflow_filter_pluginFilterId_idx";`.execute(db);
|
||||
await sql`DROP INDEX "workflow_action_pluginActionId_idx";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" DROP CONSTRAINT "workflow_filter_pluginFilterId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" DROP CONSTRAINT "workflow_action_pluginActionId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" RENAME COLUMN "pluginFilterId" TO "filterId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" RENAME COLUMN "pluginActionId" TO "actionId";`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_filter" ADD CONSTRAINT "workflow_filter_filterId_fkey" FOREIGN KEY ("filterId") REFERENCES "plugin_filter" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "workflow_action" ADD CONSTRAINT "workflow_action_actionId_fkey" FOREIGN KEY ("actionId") REFERENCES "plugin_action" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_filter_filterId_idx" ON "workflow_filter" ("filterId");`.execute(db);
|
||||
await sql`CREATE INDEX "workflow_action_actionId_idx" ON "workflow_action" ("actionId");`.execute(db);
|
||||
}
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
Timestamp,
|
||||
UpdateDateColumn,
|
||||
} from 'src/sql-tools';
|
||||
import { AssetMetadata, AssetMetadataItem } from 'src/types';
|
||||
|
||||
@UpdatedAtTrigger('asset_metadata_updated_at')
|
||||
@Table('asset_metadata')
|
||||
@@ -22,7 +21,7 @@ import { AssetMetadata, AssetMetadataItem } from 'src/types';
|
||||
referencingOldTableAs: 'old',
|
||||
when: 'pg_trigger_depth() = 0',
|
||||
})
|
||||
export class AssetMetadataTable<T extends keyof AssetMetadata = AssetMetadataKey> implements AssetMetadataItem<T> {
|
||||
export class AssetMetadataTable {
|
||||
@ForeignKeyColumn(() => AssetTable, {
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
@@ -33,10 +32,10 @@ export class AssetMetadataTable<T extends keyof AssetMetadata = AssetMetadataKey
|
||||
assetId!: string;
|
||||
|
||||
@PrimaryColumn({ type: 'character varying' })
|
||||
key!: T;
|
||||
key!: AssetMetadataKey;
|
||||
|
||||
@Column({ type: 'jsonb' })
|
||||
value!: AssetMetadata[T];
|
||||
value!: object;
|
||||
|
||||
@UpdateIdColumn({ index: true })
|
||||
updateId!: Generated<string>;
|
||||
|
||||
@@ -105,9 +105,6 @@ export class AssetTable {
|
||||
@Column({ index: true })
|
||||
originalFileName!: string;
|
||||
|
||||
@Column({ nullable: true })
|
||||
sidecarPath!: string | null;
|
||||
|
||||
@Column({ type: 'bytea', nullable: true })
|
||||
thumbhash!: Buffer | null;
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ export class WorkflowTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['filterId'] })
|
||||
@Index({ columns: ['pluginFilterId'] })
|
||||
@Table('workflow_filter')
|
||||
export class WorkflowFilterTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -48,7 +48,7 @@ export class WorkflowFilterTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginFilterTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
filterId!: string;
|
||||
pluginFilterId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
filterConfig!: FilterConfig | null;
|
||||
@@ -58,7 +58,7 @@ export class WorkflowFilterTable {
|
||||
}
|
||||
|
||||
@Index({ columns: ['workflowId', 'order'] })
|
||||
@Index({ columns: ['actionId'] })
|
||||
@Index({ columns: ['pluginActionId'] })
|
||||
@Table('workflow_action')
|
||||
export class WorkflowActionTable {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
@@ -68,7 +68,7 @@ export class WorkflowActionTable {
|
||||
workflowId!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => PluginActionTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
actionId!: string;
|
||||
pluginActionId!: string;
|
||||
|
||||
@Column({ type: 'jsonb', nullable: true })
|
||||
actionConfig!: ActionConfig | null;
|
||||
|
||||
@@ -669,7 +669,7 @@ describe(AlbumService.name, () => {
|
||||
});
|
||||
|
||||
it('should not allow a shared user with viewer access to add assets', async () => {
|
||||
mocks.access.album.checkSharedAlbumAccess.mockResolvedValue(new Set([]));
|
||||
mocks.access.album.checkSharedAlbumAccess.mockResolvedValue(new Set());
|
||||
mocks.album.getById.mockResolvedValue(_.cloneDeep(albumStub.sharedWithUser));
|
||||
|
||||
await expect(
|
||||
|
||||
@@ -174,7 +174,6 @@ const assetEntity = Object.freeze({
|
||||
longitude: 10.703_075,
|
||||
},
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: null,
|
||||
} as MapAsset);
|
||||
|
||||
const existingAsset = Object.freeze({
|
||||
@@ -188,7 +187,6 @@ const existingAsset = Object.freeze({
|
||||
|
||||
const sidecarAsset = Object.freeze({
|
||||
...existingAsset,
|
||||
sidecarPath: 'sidecar-path',
|
||||
checksum: Buffer.from('_getExistingAssetWithSideCar', 'utf8'),
|
||||
}) as MapAsset;
|
||||
|
||||
@@ -721,18 +719,22 @@ describe(AssetMediaService.name, () => {
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: existingAsset.id,
|
||||
sidecarPath: null,
|
||||
originalFileName: 'photo1.jpeg',
|
||||
originalPath: 'fake_path/photo1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sidecarPath: null,
|
||||
originalFileName: 'existing-filename.jpeg',
|
||||
originalPath: 'fake_path/asset_1.jpeg',
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
|
||||
deletedAt: expect.any(Date),
|
||||
@@ -769,6 +771,13 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -798,6 +807,12 @@ describe(AssetMediaService.name, () => {
|
||||
deletedAt: expect.any(Date),
|
||||
status: AssetStatus.Trashed,
|
||||
});
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
assetId: existingAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
}),
|
||||
);
|
||||
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(
|
||||
updatedFile.originalPath,
|
||||
@@ -827,6 +842,9 @@ describe(AssetMediaService.name, () => {
|
||||
|
||||
expect(mocks.asset.create).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [updatedFile.originalPath, undefined] },
|
||||
|
||||
@@ -21,7 +21,16 @@ import {
|
||||
UploadFieldName,
|
||||
} from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
CacheControl,
|
||||
JobName,
|
||||
Permission,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { AuthRequest } from 'src/middleware/auth.guard';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { UploadFile, UploadRequest } from 'src/types';
|
||||
@@ -354,9 +363,12 @@ export class AssetMediaService extends BaseService {
|
||||
duration: dto.duration || null,
|
||||
|
||||
livePhotoVideoId: null,
|
||||
sidecarPath: sidecarPath || null,
|
||||
});
|
||||
|
||||
await (sidecarPath
|
||||
? this.assetRepository.upsertFile({ assetId, type: AssetFileType.Sidecar, path: sidecarPath })
|
||||
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
|
||||
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
|
||||
await this.jobRepository.queue({
|
||||
@@ -384,7 +396,6 @@ export class AssetMediaService extends BaseService {
|
||||
localDateTime: asset.localDateTime,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
sidecarPath: asset.sidecarPath,
|
||||
});
|
||||
|
||||
const { size } = await this.storageRepository.stat(created.originalPath);
|
||||
@@ -414,7 +425,6 @@ export class AssetMediaService extends BaseService {
|
||||
visibility: dto.visibility ?? AssetVisibility.Timeline,
|
||||
livePhotoVideoId: dto.livePhotoVideoId,
|
||||
originalFileName: dto.filename || file.originalName,
|
||||
sidecarPath: sidecarFile?.originalPath,
|
||||
});
|
||||
|
||||
if (dto.metadata) {
|
||||
@@ -422,6 +432,11 @@ export class AssetMediaService extends BaseService {
|
||||
}
|
||||
|
||||
if (sidecarFile) {
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: asset.id,
|
||||
path: sidecarFile.originalPath,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
}
|
||||
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
||||
|
||||
@@ -585,8 +585,8 @@ describe(AssetService.name, () => {
|
||||
'/uploads/user-id/webp/path.ext',
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
'/uploads/user-id/fullsize/path.webp',
|
||||
assetWithFace.encodedVideoPath,
|
||||
assetWithFace.sidecarPath,
|
||||
assetWithFace.encodedVideoPath, // this value is null
|
||||
undefined, // no sidecar path
|
||||
assetWithFace.originalPath,
|
||||
],
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { AssetFile } from 'src/database';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
@@ -18,7 +19,16 @@ import {
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetVisibility,
|
||||
JobName,
|
||||
JobStatus,
|
||||
Permission,
|
||||
QueueName,
|
||||
} from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||
import { requireElevatedPermission } from 'src/utils/access';
|
||||
@@ -197,8 +207,8 @@ export class AssetService extends BaseService {
|
||||
}: AssetCopyDto,
|
||||
) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
|
||||
const sourceAsset = await this.assetRepository.getById(sourceId);
|
||||
const targetAsset = await this.assetRepository.getById(targetId);
|
||||
const sourceAsset = await this.assetRepository.getForCopy(sourceId);
|
||||
const targetAsset = await this.assetRepository.getForCopy(targetId);
|
||||
|
||||
if (!sourceAsset || !targetAsset) {
|
||||
throw new BadRequestException('Both assets must exist');
|
||||
@@ -252,19 +262,25 @@ export class AssetService extends BaseService {
|
||||
sourceAsset,
|
||||
targetAsset,
|
||||
}: {
|
||||
sourceAsset: { sidecarPath: string | null };
|
||||
targetAsset: { id: string; sidecarPath: string | null; originalPath: string };
|
||||
sourceAsset: { files: AssetFile[] };
|
||||
targetAsset: { id: string; files: AssetFile[]; originalPath: string };
|
||||
}) {
|
||||
if (!sourceAsset.sidecarPath) {
|
||||
const { sidecarFile: sourceFile } = getAssetFiles(sourceAsset.files);
|
||||
if (!sourceFile?.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (targetAsset.sidecarPath) {
|
||||
await this.storageRepository.unlink(targetAsset.sidecarPath);
|
||||
const { sidecarFile: targetFile } = getAssetFiles(targetAsset.files ?? []);
|
||||
if (targetFile?.path) {
|
||||
await this.storageRepository.unlink(targetFile.path);
|
||||
}
|
||||
|
||||
await this.storageRepository.copyFile(sourceAsset.sidecarPath, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.update({ id: targetAsset.id, sidecarPath: `${targetAsset.originalPath}.xmp` });
|
||||
await this.storageRepository.copyFile(sourceFile.path, `${targetAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: targetAsset.id,
|
||||
path: `${targetAsset.originalPath}.xmp`,
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: targetAsset.id } });
|
||||
}
|
||||
|
||||
@@ -344,11 +360,11 @@ export class AssetService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
|
||||
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||
|
||||
if (deleteOnDisk) {
|
||||
files.push(asset.sidecarPath, asset.originalPath);
|
||||
files.push(sidecarFile?.path, asset.originalPath);
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });
|
||||
|
||||
@@ -61,7 +61,7 @@ export class BackupService extends BaseService {
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.sort()
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
|
||||
@@ -278,7 +278,7 @@ describe(LibraryService.name, () => {
|
||||
mocks.library.get.mockResolvedValue(library);
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: 1n });
|
||||
|
||||
const response = await sut.handleQueueSyncAssets({ id: library.id });
|
||||
|
||||
@@ -296,7 +296,7 @@ describe(LibraryService.name, () => {
|
||||
mocks.library.get.mockResolvedValue(library);
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: 1n });
|
||||
|
||||
const response = await sut.handleQueueSyncAssets({ id: library.id });
|
||||
|
||||
@@ -311,7 +311,7 @@ describe(LibraryService.name, () => {
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.library.streamAssetIds.mockReturnValue(makeStream([assetStub.external]));
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(0) });
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: 0n });
|
||||
mocks.library.streamAssetIds.mockReturnValue(makeStream([assetStub.external]));
|
||||
|
||||
const response = await sut.handleQueueSyncAssets({ id: library.id });
|
||||
|
||||
@@ -223,7 +223,14 @@ export class LibraryService extends BaseService {
|
||||
ownerId: dto.ownerId,
|
||||
name: dto.name ?? 'New External Library',
|
||||
importPaths: dto.importPaths ?? [],
|
||||
exclusionPatterns: dto.exclusionPatterns ?? ['**/@eaDir/**', '**/._*', '**/#recycle/**', '**/#snapshot/**'],
|
||||
exclusionPatterns: dto.exclusionPatterns ?? [
|
||||
'**/@eaDir/**',
|
||||
'**/._*',
|
||||
'**/#recycle/**',
|
||||
'**/#snapshot/**',
|
||||
'**/.stversions/**',
|
||||
'**/.stfolder/**',
|
||||
],
|
||||
});
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
@@ -174,8 +174,10 @@ export class MediaService extends BaseService {
|
||||
thumbhash: Buffer;
|
||||
};
|
||||
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
|
||||
this.logger.verbose(`Thumbnail generation for video ${id} ${asset.originalPath}`);
|
||||
generated = await this.generateVideoThumbnails(asset);
|
||||
} else if (asset.type === AssetType.Image) {
|
||||
this.logger.verbose(`Thumbnail generation for image ${id} ${asset.originalPath}`);
|
||||
generated = await this.generateImageThumbnails(asset);
|
||||
} else {
|
||||
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
|
||||
@@ -561,7 +563,7 @@ export class MediaService extends BaseService {
|
||||
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
|
||||
return streams
|
||||
.filter((stream) => stream.codecName !== 'unknown')
|
||||
.sort((stream1, stream2) => stream2.bitrate - stream1.bitrate)[0];
|
||||
.toSorted((stream1, stream2) => stream2.bitrate - stream1.bitrate)[0];
|
||||
}
|
||||
|
||||
private getTranscodeTarget(
|
||||
|
||||
@@ -6,7 +6,7 @@ import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MemoryCreateDto, MemoryResponseDto, MemorySearchDto, MemoryUpdateDto, mapMemory } from 'src/dtos/memory.dto';
|
||||
import { DatabaseLock, JobName, MemoryType, Permission, QueueName, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { addAssets, getMyPartnerIds, removeAssets } from 'src/utils/asset.util';
|
||||
import { addAssets, removeAssets } from 'src/utils/asset.util';
|
||||
|
||||
const DAYS = 3;
|
||||
|
||||
@@ -15,15 +15,6 @@ export class MemoryService extends BaseService {
|
||||
@OnJob({ name: JobName.MemoryGenerate, queue: QueueName.BackgroundTask })
|
||||
async onMemoriesCreate() {
|
||||
const users = await this.userRepository.getList({ withDeleted: false });
|
||||
const usersIds = await Promise.all(
|
||||
users.map((user) =>
|
||||
getMyPartnerIds({
|
||||
userId: user.id,
|
||||
repository: this.partnerRepository,
|
||||
timelineEnabled: true,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
await this.databaseRepository.withLock(DatabaseLock.MemoryCreation, async () => {
|
||||
const state = await this.systemMetadataRepository.get(SystemMetadataKey.MemoriesState);
|
||||
@@ -38,7 +29,7 @@ export class MemoryService extends BaseService {
|
||||
}
|
||||
|
||||
try {
|
||||
await Promise.all(users.map((owner, i) => this.createOnThisDayMemories(owner.id, usersIds[i], target)));
|
||||
await Promise.all(users.map((owner) => this.createOnThisDayMemories(owner.id, target)));
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to create memories for ${target.toISO()}: ${error}`);
|
||||
}
|
||||
@@ -51,10 +42,10 @@ export class MemoryService extends BaseService {
|
||||
});
|
||||
}
|
||||
|
||||
private async createOnThisDayMemories(ownerId: string, userIds: string[], target: DateTime) {
|
||||
private async createOnThisDayMemories(ownerId: string, target: DateTime) {
|
||||
const showAt = target.startOf('day').toISO();
|
||||
const hideAt = target.endOf('day').toISO();
|
||||
const memories = await this.assetRepository.getByDayOfYear([ownerId, ...userIds], target);
|
||||
const memories = await this.assetRepository.getByDayOfYear([ownerId], target);
|
||||
await Promise.all(
|
||||
memories.map(({ year, assets }) =>
|
||||
this.memoryRepository.create(
|
||||
|
||||
@@ -4,7 +4,16 @@ import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { defaults } from 'src/config';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
ExifOrientation,
|
||||
ImmichWorker,
|
||||
JobName,
|
||||
JobStatus,
|
||||
SourceType,
|
||||
} from 'src/enum';
|
||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
@@ -15,17 +24,24 @@ import { tagStub } from 'test/fixtures/tag.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const removeNonSidecarFiles = (asset: any) => {
|
||||
return {
|
||||
...asset,
|
||||
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
|
||||
};
|
||||
};
|
||||
|
||||
const forSidecarJob = (
|
||||
asset: {
|
||||
id?: string;
|
||||
originalPath?: string;
|
||||
sidecarPath?: string | null;
|
||||
files?: { id: string; type: AssetFileType; path: string }[];
|
||||
} = {},
|
||||
) => {
|
||||
return {
|
||||
id: factory.uuid(),
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
sidecarPath: null,
|
||||
files: [],
|
||||
...asset,
|
||||
};
|
||||
};
|
||||
@@ -166,7 +182,7 @@ describe(MetadataService.name, () => {
|
||||
it('should handle a date in a sidecar file', async () => {
|
||||
const originalDate = new Date('2023-11-21T16:13:17.517Z');
|
||||
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -185,7 +201,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file modification date when missing exif and earlier than creation date', async () => {
|
||||
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -211,7 +227,7 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file creation date when missing exif and earlier than modification date', async () => {
|
||||
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -234,7 +250,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
|
||||
process.env.TZ = 'America/Los_Angeles';
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -252,7 +268,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.image.fileModifiedAt,
|
||||
@@ -305,7 +321,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should apply reverse geocoding', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
|
||||
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
@@ -334,7 +350,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should discard latitude and longitude on null island', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
mockReadTags({
|
||||
GPSLatitude: 0,
|
||||
GPSLongitude: 0,
|
||||
@@ -346,7 +362,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -356,7 +372,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -376,7 +392,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a string', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -386,7 +402,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -396,7 +412,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -407,7 +423,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchal tags from Keywords', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Parent/Child' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -426,7 +442,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore Keywords when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -445,7 +461,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract hierarchy from HierarchicalSubject', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -466,7 +482,7 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -1030,8 +1046,15 @@ describe(MetadataService.name, () => {
|
||||
it('should prefer Duration from exif over sidecar', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
...assetStub.image,
|
||||
sidecarPath: '/path/to/something',
|
||||
files: [
|
||||
{
|
||||
id: 'some-id',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/something',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReadTags({ Duration: 123 }, { Duration: 456 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -1536,18 +1559,25 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .jpg.xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', files: [] });
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.jpg.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should detect a new sidecar at .xmp', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
|
||||
@@ -1555,33 +1585,44 @@ describe(MetadataService.name, () => {
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
|
||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/IMG_123.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should unset sidecar path if file does not exist anymore', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
|
||||
it('should unset sidecar path if file no longer exist', async () => {
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(false);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
|
||||
expect(mocks.asset.deleteFile).toHaveBeenCalledWith({ assetId: asset.id, type: AssetFileType.Sidecar });
|
||||
});
|
||||
|
||||
it('should do nothing if the sidecar file still exists', async () => {
|
||||
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
|
||||
|
||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarWrite', () => {
|
||||
it('should skip assets that do not exist anymore', async () => {
|
||||
it('should skip assets that no longer exist', async () => {
|
||||
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
|
||||
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
|
||||
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
|
||||
@@ -1610,7 +1651,7 @@ describe(MetadataService.name, () => {
|
||||
dateTimeOriginal: date,
|
||||
}),
|
||||
).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, {
|
||||
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, {
|
||||
Description: description,
|
||||
ImageDescription: description,
|
||||
DateTimeOriginal: date,
|
||||
|
||||
@@ -8,9 +8,10 @@ import { constants } from 'node:fs/promises';
|
||||
import { join, parse } from 'node:path';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { Asset, AssetFace } from 'src/database';
|
||||
import { Asset, AssetFace, AssetFile } from 'src/database';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
DatabaseLock,
|
||||
@@ -29,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { PersonTable } from 'src/schema/tables/person.table';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { getAssetFiles } from 'src/utils/asset.util';
|
||||
import { isAssetChecksumConstraint } from 'src/utils/database';
|
||||
import { isFaceImportEnabled } from 'src/utils/misc';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
@@ -37,7 +39,6 @@ import { upsertTags } from 'src/utils/tag';
|
||||
const EXIF_DATE_TAGS: Array<keyof ImmichTags> = [
|
||||
'SubSecDateTimeOriginal',
|
||||
'SubSecCreateDate',
|
||||
'SubSecMediaCreateDate',
|
||||
'DateTimeOriginal',
|
||||
'CreationDate',
|
||||
'CreateDate',
|
||||
@@ -360,17 +361,21 @@ export class MetadataService extends BaseService {
|
||||
break;
|
||||
}
|
||||
|
||||
const isChanged = sidecarPath !== asset.sidecarPath;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const isChanged = sidecarPath !== sidecarFile?.path;
|
||||
|
||||
this.logger.debug(
|
||||
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
`Sidecar check found old=${sidecarFile?.path}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
|
||||
if (!isChanged) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
await this.assetRepository.update({ id: asset.id, sidecarPath });
|
||||
await (sidecarPath === null
|
||||
? this.assetRepository.deleteFile({ assetId: asset.id, type: AssetFileType.Sidecar })
|
||||
: this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }));
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
@@ -395,7 +400,9 @@ export class MetadataService extends BaseService {
|
||||
|
||||
const tagsList = (asset.tags || []).map((tag) => tag.value);
|
||||
|
||||
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
|
||||
|
||||
const exif = _.omitBy(
|
||||
<Tags>{
|
||||
Description: description,
|
||||
@@ -415,18 +422,19 @@ export class MetadataService extends BaseService {
|
||||
|
||||
await this.metadataRepository.writeTags(sidecarPath, exif);
|
||||
|
||||
if (!asset.sidecarPath) {
|
||||
await this.assetRepository.update({ id, sidecarPath });
|
||||
if (asset.files.length === 0) {
|
||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
|
||||
}
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
|
||||
private getSidecarCandidates({ files, originalPath }: { files: AssetFile[]; originalPath: string }) {
|
||||
const candidates: string[] = [];
|
||||
|
||||
if (sidecarPath) {
|
||||
candidates.push(sidecarPath);
|
||||
const { sidecarFile } = getAssetFiles(files);
|
||||
if (sidecarFile?.path) {
|
||||
candidates.push(sidecarFile.path);
|
||||
}
|
||||
|
||||
const assetPath = parse(originalPath);
|
||||
@@ -457,14 +465,12 @@ export class MetadataService extends BaseService {
|
||||
return { width, height };
|
||||
}
|
||||
|
||||
private async getExifTags(asset: {
|
||||
originalPath: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
}): Promise<ImmichTags> {
|
||||
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
|
||||
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
|
||||
this.metadataRepository.readTags(asset.originalPath),
|
||||
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
|
||||
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
|
||||
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
|
||||
]);
|
||||
|
||||
|
||||
@@ -12,8 +12,21 @@ describe(OcrService.name, () => {
|
||||
({ sut, mocks } = newTestService(OcrService));
|
||||
|
||||
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
});
|
||||
|
||||
const mockOcrResult = (...texts: string[]) => {
|
||||
mocks.machineLearning.ocr.mockResolvedValue({
|
||||
box: texts.flatMap((_, i) => Array.from({ length: 8 }, (_, j) => i * 10 + j)),
|
||||
boxScore: texts.map(() => 0.9),
|
||||
text: texts,
|
||||
textScore: texts.map(() => 0.95),
|
||||
});
|
||||
};
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
@@ -72,10 +85,6 @@ describe(OcrService.name, () => {
|
||||
text: ['One Two Three', 'Four Five'],
|
||||
textScore: [0.95, 0.85],
|
||||
});
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
|
||||
|
||||
@@ -88,36 +97,40 @@ describe(OcrService.name, () => {
|
||||
maxResolution: 736,
|
||||
}),
|
||||
);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, [
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.9,
|
||||
text: 'One Two Three',
|
||||
textScore: 0.95,
|
||||
x1: 10,
|
||||
y1: 20,
|
||||
x2: 30,
|
||||
y2: 40,
|
||||
x3: 50,
|
||||
y3: 60,
|
||||
x4: 70,
|
||||
y4: 80,
|
||||
},
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.8,
|
||||
text: 'Four Five',
|
||||
textScore: 0.85,
|
||||
x1: 90,
|
||||
y1: 100,
|
||||
x2: 110,
|
||||
y2: 120,
|
||||
x3: 130,
|
||||
y3: 140,
|
||||
x4: 150,
|
||||
y4: 160,
|
||||
},
|
||||
]);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(
|
||||
assetStub.image.id,
|
||||
[
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.9,
|
||||
text: 'One Two Three',
|
||||
textScore: 0.95,
|
||||
x1: 10,
|
||||
y1: 20,
|
||||
x2: 30,
|
||||
y2: 40,
|
||||
x3: 50,
|
||||
y3: 60,
|
||||
x4: 70,
|
||||
y4: 80,
|
||||
},
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.8,
|
||||
text: 'Four Five',
|
||||
textScore: 0.85,
|
||||
x1: 90,
|
||||
y1: 100,
|
||||
x2: 110,
|
||||
y2: 120,
|
||||
x3: 130,
|
||||
y3: 140,
|
||||
x4: 150,
|
||||
y4: 160,
|
||||
},
|
||||
],
|
||||
'One Two Three Four Five',
|
||||
);
|
||||
});
|
||||
|
||||
it('should apply config settings', async () => {
|
||||
@@ -133,11 +146,7 @@ describe(OcrService.name, () => {
|
||||
},
|
||||
},
|
||||
});
|
||||
mocks.machineLearning.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
mockOcrResult();
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
|
||||
|
||||
@@ -150,7 +159,7 @@ describe(OcrService.name, () => {
|
||||
maxResolution: 1500,
|
||||
}),
|
||||
);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, []);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, [], '');
|
||||
});
|
||||
|
||||
it('should skip invisible assets', async () => {
|
||||
@@ -173,5 +182,83 @@ describe(OcrService.name, () => {
|
||||
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
|
||||
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('search tokenization', () => {
|
||||
it('should generate bigrams for Chinese text', async () => {
|
||||
mockOcrResult('機器學習');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), '機器 器學 學習');
|
||||
});
|
||||
|
||||
it('should generate bigrams for Japanese text', async () => {
|
||||
mockOcrResult('テスト');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), 'テス スト');
|
||||
});
|
||||
|
||||
it('should generate bigrams for Korean text', async () => {
|
||||
mockOcrResult('한국어');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), '한국 국어');
|
||||
});
|
||||
|
||||
it('should pass through Latin text unchanged', async () => {
|
||||
mockOcrResult('Hello World');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), 'Hello World');
|
||||
});
|
||||
|
||||
it('should handle mixed CJK and Latin text', async () => {
|
||||
mockOcrResult('機器學習Model');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), '機器 器學 學習 Model');
|
||||
});
|
||||
|
||||
it('should handle year followed by CJK', async () => {
|
||||
mockOcrResult('2024年レポート');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(
|
||||
assetStub.image.id,
|
||||
expect.any(Array),
|
||||
'2024 年レ レポ ポー ート',
|
||||
);
|
||||
});
|
||||
|
||||
it('should join multiple OCR boxes', async () => {
|
||||
mockOcrResult('機器', 'Learning');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), '機器 Learning');
|
||||
});
|
||||
|
||||
it('should normalize whitespace', async () => {
|
||||
mockOcrResult(' Hello World ');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), 'Hello World');
|
||||
});
|
||||
|
||||
it('should keep single CJK characters', async () => {
|
||||
mockOcrResult('A', '中', 'B');
|
||||
|
||||
await sut.handleOcr({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, expect.any(Array), 'A 中 B');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,6 +5,7 @@ import { AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum';
|
||||
import { OCR } from 'src/repositories/machine-learning.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { tokenizeForSearch } from 'src/utils/database';
|
||||
import { isOcrEnabled } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -53,8 +54,8 @@ export class OcrService extends BaseService {
|
||||
}
|
||||
|
||||
const ocrResults = await this.machineLearningRepository.ocr(asset.previewFile, machineLearning.ocr);
|
||||
|
||||
await this.ocrRepository.upsert(id, this.parseOcrResults(id, ocrResults));
|
||||
const { ocrDataList, searchText } = this.parseOcrResults(id, ocrResults);
|
||||
await this.ocrRepository.upsert(id, ocrDataList, searchText);
|
||||
|
||||
await this.assetRepository.upsertJobStatus({ assetId: id, ocrAt: new Date() });
|
||||
|
||||
@@ -64,7 +65,9 @@ export class OcrService extends BaseService {
|
||||
|
||||
private parseOcrResults(id: string, { box, boxScore, text, textScore }: OCR) {
|
||||
const ocrDataList = [];
|
||||
const searchTokens = [];
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
const rawText = text[i];
|
||||
const boxOffset = i * 8;
|
||||
ocrDataList.push({
|
||||
assetId: id,
|
||||
@@ -78,9 +81,11 @@ export class OcrService extends BaseService {
|
||||
y4: box[boxOffset + 7],
|
||||
boxScore: boxScore[i],
|
||||
textScore: textScore[i],
|
||||
text: text[i],
|
||||
text: rawText,
|
||||
});
|
||||
searchTokens.push(...tokenizeForSearch(rawText));
|
||||
}
|
||||
return ocrDataList;
|
||||
|
||||
return { ocrDataList, searchText: searchTokens.join(' ') };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,9 +252,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeFilters(workflowFilters: WorkflowFilter[], context: WorkflowContext): Promise<boolean> {
|
||||
for (const workflowFilter of workflowFilters) {
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(workflowFilter.pluginFilterId);
|
||||
if (!filter) {
|
||||
this.logger.error(`Filter ${workflowFilter.filterId} not found`);
|
||||
this.logger.error(`Filter ${workflowFilter.pluginFilterId} not found`);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -296,9 +296,9 @@ export class PluginService extends BaseService {
|
||||
|
||||
private async executeActions(workflowActions: WorkflowAction[], context: WorkflowContext): Promise<void> {
|
||||
for (const workflowAction of workflowActions) {
|
||||
const action = await this.pluginRepository.getAction(workflowAction.actionId);
|
||||
const action = await this.pluginRepository.getAction(workflowAction.pluginActionId);
|
||||
if (!action) {
|
||||
throw new Error(`Action ${workflowAction.actionId} not found`);
|
||||
throw new Error(`Action ${workflowAction.pluginActionId} not found`);
|
||||
}
|
||||
|
||||
const pluginInstance = this.loadedPlugins.get(action.pluginId);
|
||||
|
||||
@@ -6,10 +6,20 @@ import sanitize from 'sanitize-filename';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
|
||||
import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetPathType,
|
||||
AssetType,
|
||||
DatabaseLock,
|
||||
JobName,
|
||||
JobStatus,
|
||||
QueueName,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobOf, StorageAsset } from 'src/types';
|
||||
import { getAssetFile } from 'src/utils/asset.util';
|
||||
import { getLivePhotoMotionFilename } from 'src/utils/file';
|
||||
|
||||
const storageTokens = {
|
||||
@@ -196,7 +206,7 @@ export class StorageTemplateService extends BaseService {
|
||||
}
|
||||
|
||||
return this.databaseRepository.withLock(DatabaseLock.StorageTemplateMigration, async () => {
|
||||
const { id, sidecarPath, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const { id, originalPath, checksum, fileSizeInByte } = asset;
|
||||
const oldPath = originalPath;
|
||||
const newPath = await this.getTemplatePath(asset, metadata);
|
||||
|
||||
@@ -213,6 +223,8 @@ export class StorageTemplateService extends BaseService {
|
||||
newPath,
|
||||
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
|
||||
});
|
||||
|
||||
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar)?.path;
|
||||
if (sidecarPath) {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
|
||||
@@ -217,7 +217,7 @@ describe(TagService.name, () => {
|
||||
|
||||
describe('addAssets', () => {
|
||||
it('should handle invalid ids', async () => {
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set([]));
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set());
|
||||
await expect(sut.addAssets(authStub.admin, 'tag-1', { ids: ['asset-1'] })).resolves.toEqual([
|
||||
{ id: 'asset-1', success: false, error: 'no_permission' },
|
||||
]);
|
||||
|
||||
@@ -78,13 +78,13 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
private async validateAndMapFilters(
|
||||
filters: Array<{ filterId: string; filterConfig?: any }>,
|
||||
filters: Array<{ pluginFilterId: string; filterConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of filters) {
|
||||
const filter = await this.pluginRepository.getFilter(dto.filterId);
|
||||
const filter = await this.pluginRepository.getFilter(dto.pluginFilterId);
|
||||
if (!filter) {
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.filterId}`);
|
||||
throw new BadRequestException(`Invalid filter ID: ${dto.pluginFilterId}`);
|
||||
}
|
||||
|
||||
if (!filter.supportedContexts.includes(requiredContext)) {
|
||||
@@ -95,20 +95,20 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return filters.map((dto, index) => ({
|
||||
filterId: dto.filterId,
|
||||
pluginFilterId: dto.pluginFilterId,
|
||||
filterConfig: dto.filterConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
}
|
||||
|
||||
private async validateAndMapActions(
|
||||
actions: Array<{ actionId: string; actionConfig?: any }>,
|
||||
actions: Array<{ pluginActionId: string; actionConfig?: any }>,
|
||||
requiredContext: PluginContext,
|
||||
) {
|
||||
for (const dto of actions) {
|
||||
const action = await this.pluginRepository.getAction(dto.actionId);
|
||||
const action = await this.pluginRepository.getAction(dto.pluginActionId);
|
||||
if (!action) {
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.actionId}`);
|
||||
throw new BadRequestException(`Invalid action ID: ${dto.pluginActionId}`);
|
||||
}
|
||||
if (!action.supportedContexts.includes(requiredContext)) {
|
||||
throw new BadRequestException(
|
||||
@@ -118,7 +118,7 @@ export class WorkflowService extends BaseService {
|
||||
}
|
||||
|
||||
return actions.map((dto, index) => ({
|
||||
actionId: dto.actionId,
|
||||
pluginActionId: dto.pluginActionId,
|
||||
actionConfig: dto.actionConfig || null,
|
||||
order: index,
|
||||
}));
|
||||
|
||||
@@ -46,7 +46,7 @@ export const setIsEqual = (source: Set<unknown>, target: Set<unknown>) =>
|
||||
source.size === target.size && [...source].every((x) => target.has(x));
|
||||
|
||||
export const haveEqualColumns = (sourceColumns?: string[], targetColumns?: string[]) => {
|
||||
return setIsEqual(new Set(sourceColumns ?? []), new Set(targetColumns ?? []));
|
||||
return setIsEqual(new Set(sourceColumns), new Set(targetColumns));
|
||||
};
|
||||
|
||||
export const haveEqualOverrides = <T extends { override?: DatabaseOverride }>(source: T, target: T) => {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { VECTOR_EXTENSIONS } from 'src/constants';
|
||||
import { Asset } from 'src/database';
|
||||
import { Asset, AssetFile } from 'src/database';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
AssetMetadataKey,
|
||||
AssetOrder,
|
||||
AssetType,
|
||||
DatabaseSslMode,
|
||||
@@ -476,8 +475,8 @@ export type StorageAsset = {
|
||||
fileCreatedAt: Date;
|
||||
originalPath: string;
|
||||
originalFileName: string;
|
||||
sidecarPath: string | null;
|
||||
fileSizeInByte: number | null;
|
||||
files: AssetFile[];
|
||||
};
|
||||
|
||||
export type OnThisDayData = { year: number };
|
||||
@@ -563,12 +562,3 @@ export interface UserMetadata extends Record<UserMetadataKey, Record<string, any
|
||||
[UserMetadataKey.License]: { licenseKey: string; activationKey: string; activatedAt: string };
|
||||
[UserMetadataKey.Onboarding]: { isOnboarded: boolean };
|
||||
}
|
||||
|
||||
export type AssetMetadataItem<T extends keyof AssetMetadata = AssetMetadataKey> = {
|
||||
key: T;
|
||||
value: AssetMetadata[T];
|
||||
};
|
||||
|
||||
export interface AssetMetadata extends Record<AssetMetadataKey, Record<string, any>> {
|
||||
[AssetMetadataKey.MobileApp]: { iCloudId: string };
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ export const getAssetFiles = (files: AssetFile[]) => ({
|
||||
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
|
||||
previewFile: getAssetFile(files, AssetFileType.Preview),
|
||||
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
|
||||
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
|
||||
});
|
||||
|
||||
export const addAssets = async (
|
||||
|
||||
@@ -306,6 +306,46 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'asset', O>, tagId: stri
|
||||
);
|
||||
}
|
||||
|
||||
const isCJK = (c: number): boolean =>
|
||||
(c >= 0x4e_00 && c <= 0x9f_ff) ||
|
||||
(c >= 0xac_00 && c <= 0xd7_af) ||
|
||||
(c >= 0x30_40 && c <= 0x30_9f) ||
|
||||
(c >= 0x30_a0 && c <= 0x30_ff) ||
|
||||
(c >= 0x34_00 && c <= 0x4d_bf);
|
||||
|
||||
export const tokenizeForSearch = (text: string): string[] => {
|
||||
/* eslint-disable unicorn/prefer-code-point */
|
||||
const tokens: string[] = [];
|
||||
let i = 0;
|
||||
while (i < text.length) {
|
||||
const c = text.charCodeAt(i);
|
||||
if (c <= 32) {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const start = i;
|
||||
if (isCJK(c)) {
|
||||
while (i < text.length && isCJK(text.charCodeAt(i))) {
|
||||
i++;
|
||||
}
|
||||
if (i - start === 1) {
|
||||
tokens.push(text[start]);
|
||||
} else {
|
||||
for (let k = start; k < i - 1; k++) {
|
||||
tokens.push(text[k] + text[k + 1]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (i < text.length && text.charCodeAt(i) > 32 && !isCJK(text.charCodeAt(i))) {
|
||||
i++;
|
||||
}
|
||||
tokens.push(text.slice(start, i));
|
||||
}
|
||||
}
|
||||
return tokens;
|
||||
};
|
||||
|
||||
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
||||
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
||||
|
||||
@@ -391,7 +431,7 @@ export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuild
|
||||
.$if(!!options.ocr, (qb) =>
|
||||
qb
|
||||
.innerJoin('ocr_search', 'asset.id', 'ocr_search.assetId')
|
||||
.where(() => sql`f_unaccent(ocr_search.text) %>> f_unaccent(${options.ocr!})`),
|
||||
.where(() => sql`f_unaccent(ocr_search.text) %>> f_unaccent(${tokenizeForSearch(options.ocr!).join(' ')})`),
|
||||
)
|
||||
.$if(!!options.type, (qb) => qb.where('asset.type', '=', options.type!))
|
||||
.$if(options.isFavorite !== undefined, (qb) => qb.where('asset.isFavorite', '=', options.isFavorite!))
|
||||
|
||||
@@ -704,8 +704,7 @@ export class QsvSwDecodeConfig extends BaseHWConfig {
|
||||
}
|
||||
|
||||
getBitrateOptions() {
|
||||
const options = [];
|
||||
options.push(`-${this.useCQP() ? 'q:v' : 'global_quality:v'} ${this.config.crf}`);
|
||||
const options = [`-${this.useCQP() ? 'q:v' : 'global_quality:v'} ${this.config.crf}`];
|
||||
const bitrates = this.getBitrateDistribution();
|
||||
if (bitrates.max > 0) {
|
||||
options.push(`-maxrate ${bitrates.max}${bitrates.unit}`, `-bufsize ${bitrates.max * 2}${bitrates.unit}`);
|
||||
|
||||
@@ -139,7 +139,7 @@ function sortKeys<T>(target: T): T {
|
||||
}
|
||||
|
||||
const result: Partial<T> = {};
|
||||
const keys = Object.keys(target).sort() as Array<keyof T>;
|
||||
const keys = Object.keys(target).toSorted() as Array<keyof T>;
|
||||
for (const key of keys) {
|
||||
result[key] = sortKeys(target[key]);
|
||||
}
|
||||
@@ -178,10 +178,7 @@ const patchOpenAPI = (document: OpenAPIObject) => {
|
||||
throw new Error(`Invalid number format: ${schemaName}.${key}=float (use double instead). `);
|
||||
}
|
||||
}
|
||||
|
||||
if (schema.required) {
|
||||
schema.required = schema.required.sort();
|
||||
}
|
||||
schema.required?.sort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user