Compare commits

...

13 Commits

Author SHA1 Message Date
Daniel Dietzler
05b6d90a36 fix: updating lockable properties 2025-12-15 20:03:56 +01:00
Daniel Dietzler
faf9964af0 chore: simplify update all assets 2025-12-15 19:49:01 +01:00
Daniel Dietzler
c63353ece1 fix: tests 2025-12-15 19:49:01 +01:00
mertalev
ac5a092390 fix none handling 2025-12-15 19:49:01 +01:00
mertalev
7f08f36561 missed one 2025-12-15 19:49:01 +01:00
mertalev
c7793fa8f1 update sql 2025-12-15 19:49:01 +01:00
mertalev
722f473302 single statement 2025-12-15 19:49:01 +01:00
Daniel Dietzler
8e6c4acc9b fix: asset update race condition 2025-12-15 19:49:01 +01:00
Daniel Dietzler
40b07ec42e fix: asset update race condition 2025-12-15 19:49:01 +01:00
Mees Frensel
77926383db fix(server): only extract image's duration if format supports animation (#24587) 2025-12-15 12:36:46 -05:00
Yaros
35eda735c8 fix(web): recent search doesn't use search type (#24578)
Co-authored-by: Daniel Dietzler <mail@ddietzler.dev>
2025-12-15 12:44:00 +01:00
Diogo Correia
8f7a71d1cf fix(web): download panel being hidden by admin sidebar (#24583) 2025-12-15 12:29:18 +01:00
Yaros
33cdea88aa fix(mobile): birthday off by one day on remote (#24527) 2025-12-11 21:23:11 -06:00
24 changed files with 485 additions and 223 deletions

View File

@@ -17,8 +17,10 @@ class PersonApiRepository extends ApiRepository {
} }
Future<PersonDto> update(String id, {String? name, DateTime? birthday}) async { Future<PersonDto> update(String id, {String? name, DateTime? birthday}) async {
final dto = await checkNull(_api.updatePerson(id, PersonUpdateDto(name: name, birthDate: birthday))); final birthdayUtc = birthday == null ? null : DateTime.utc(birthday.year, birthday.month, birthday.day);
return _toPerson(dto); final dto = PersonUpdateDto(name: name, birthDate: birthdayUtc);
final response = await checkNull(_api.updatePerson(id, dto));
return _toPerson(response);
} }
static PersonDto _toPerson(PersonResponseDto dto) => PersonDto( static PersonDto _toPerson(PersonResponseDto dto) => PersonDto(

View File

@@ -240,7 +240,7 @@ export type Session = {
isPendingSyncReset: boolean; isPendingSyncReset: boolean;
}; };
export type Exif = Omit<Selectable<AssetExifTable>, 'updatedAt' | 'updateId'>; export type Exif = Omit<Selectable<AssetExifTable>, 'updatedAt' | 'updateId' | 'lockedProperties'>;
export type Person = { export type Person = {
createdAt: Date; createdAt: Date;

View File

@@ -50,9 +50,11 @@ select
where where
"asset"."id" = "tag_asset"."assetId" "asset"."id" = "tag_asset"."assetId"
) as agg ) as agg
) as "tags" ) as "tags",
to_json("asset_exif") as "exifInfo"
from from
"asset" "asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where where
"asset"."id" = $2::uuid "asset"."id" = $2::uuid
limit limit
@@ -224,6 +226,14 @@ from
where where
"asset"."id" = $2 "asset"."id" = $2
-- AssetJobRepository.getLockedPropertiesForMetadataExtraction
select
"asset_exif"."lockedProperties"
from
"asset_exif"
where
"asset_exif"."assetId" = $1
-- AssetJobRepository.getAlbumThumbnailFiles -- AssetJobRepository.getAlbumThumbnailFiles
select select
"asset_file"."id", "asset_file"."id",

View File

@@ -1,19 +1,49 @@
-- NOTE: This file is auto generated by ./sql-generator -- NOTE: This file is auto generated by ./sql-generator
-- AssetRepository.upsertExif
insert into
"asset_exif" ("dateTimeOriginal", "lockedProperties")
values
($1, $2)
on conflict ("assetId") do update
set
"dateTimeOriginal" = "excluded"."dateTimeOriginal",
"lockedProperties" = nullif(
array(
select distinct
unnest("asset_exif"."lockedProperties" || $3)
),
'{}'
)
-- AssetRepository.updateAllExif -- AssetRepository.updateAllExif
update "asset_exif" update "asset_exif"
set set
"model" = $1 "model" = $1,
"lockedProperties" = nullif(
array(
select distinct
unnest("asset_exif"."lockedProperties" || $2)
),
'{}'
)
where where
"assetId" in ($2) "assetId" in ($3)
-- AssetRepository.updateDateTimeOriginal -- AssetRepository.updateDateTimeOriginal
update "asset_exif" update "asset_exif"
set set
"dateTimeOriginal" = "dateTimeOriginal" + $1::interval, "dateTimeOriginal" = "dateTimeOriginal" + $1::interval,
"timeZone" = $2 "timeZone" = $2,
"lockedProperties" = nullif(
array(
select distinct
unnest("asset_exif"."lockedProperties" || $3)
),
'{}'
)
where where
"assetId" in ($3) "assetId" in ($4)
returning returning
"assetId", "assetId",
"dateTimeOriginal", "dateTimeOriginal",

View File

@@ -50,6 +50,7 @@ export class AssetJobRepository {
.whereRef('asset.id', '=', 'tag_asset.assetId'), .whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags'), ).as('tags'),
) )
.$call(withExifInner)
.limit(1) .limit(1)
.executeTakeFirst(); .executeTakeFirst();
} }
@@ -128,6 +129,16 @@ export class AssetJobRepository {
.executeTakeFirst(); .executeTakeFirst();
} }
@GenerateSql({ params: [DummyValue.UUID] })
async getLockedPropertiesForMetadataExtraction(assetId: string) {
return this.db
.selectFrom('asset_exif')
.select('asset_exif.lockedProperties')
.where('asset_exif.assetId', '=', assetId)
.executeTakeFirst()
.then((row) => row?.lockedProperties ?? []);
}
@GenerateSql({ params: [DummyValue.UUID, AssetFileType.Thumbnail] }) @GenerateSql({ params: [DummyValue.UUID, AssetFileType.Thumbnail] })
getAlbumThumbnailFiles(id: string, fileType?: AssetFileType) { getAlbumThumbnailFiles(id: string, fileType?: AssetFileType) {
return this.db return this.db

View File

@@ -1,5 +1,5 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, NotNull, Selectable, sql, Updateable, UpdateResult } from 'kysely'; import { ExpressionBuilder, Insertable, Kysely, NotNull, Selectable, sql, Updateable, UpdateResult } from 'kysely';
import { isEmpty, isUndefined, omitBy } from 'lodash'; import { isEmpty, isUndefined, omitBy } from 'lodash';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { Stack } from 'src/database'; import { Stack } from 'src/database';
@@ -7,7 +7,7 @@ import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto'; import { AuthDto } from 'src/dtos/auth.dto';
import { AssetFileType, AssetMetadataKey, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum'; import { AssetFileType, AssetMetadataKey, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema'; import { DB } from 'src/schema';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table'; import { AssetExifTable, LockableProperty } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table'; import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table'; import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
import { AssetTable } from 'src/schema/tables/asset.table'; import { AssetTable } from 'src/schema/tables/asset.table';
@@ -113,51 +113,77 @@ interface GetByIdsRelations {
tags?: boolean; tags?: boolean;
} }
const distinctLocked = <T extends LockableProperty[] | null>(eb: ExpressionBuilder<DB, 'asset_exif'>, columns: T) =>
sql<T>`nullif(array(select distinct unnest(${eb.ref('asset_exif.lockedProperties')} || ${columns})), '{}')`;
@Injectable() @Injectable()
export class AssetRepository { export class AssetRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {} constructor(@InjectKysely() private db: Kysely<DB>) {}
async upsertExif(exif: Insertable<AssetExifTable>): Promise<void> { @GenerateSql({
const value = { ...exif, assetId: asUuid(exif.assetId) }; params: [
{ dateTimeOriginal: DummyValue.DATE, lockedProperties: ['dateTimeOriginal'] },
{ lockedPropertiesBehavior: 'append' },
],
})
async upsertExif(
exif: Insertable<AssetExifTable>,
{ lockedPropertiesBehavior }: { lockedPropertiesBehavior: 'override' | 'append' | 'skip' },
): Promise<void> {
await this.db await this.db
.insertInto('asset_exif') .insertInto('asset_exif')
.values(value) .values(exif)
.onConflict((oc) => .onConflict((oc) =>
oc.column('assetId').doUpdateSet((eb) => oc.column('assetId').doUpdateSet((eb) => {
removeUndefinedKeys( const updateLocked = <T extends keyof AssetExifTable>(col: T) => eb.ref(`excluded.${col}`);
const skipLocked = <T extends keyof AssetExifTable>(col: T) =>
eb
.case()
.when(sql`${col}`, '=', eb.fn.any('asset_exif.lockedProperties'))
.then(eb.ref(`asset_exif.${col}`))
.else(eb.ref(`excluded.${col}`))
.end();
const ref = lockedPropertiesBehavior === 'skip' ? skipLocked : updateLocked;
return {
...removeUndefinedKeys(
{ {
description: eb.ref('excluded.description'), description: ref('description'),
exifImageWidth: eb.ref('excluded.exifImageWidth'), exifImageWidth: ref('exifImageWidth'),
exifImageHeight: eb.ref('excluded.exifImageHeight'), exifImageHeight: ref('exifImageHeight'),
fileSizeInByte: eb.ref('excluded.fileSizeInByte'), fileSizeInByte: ref('fileSizeInByte'),
orientation: eb.ref('excluded.orientation'), orientation: ref('orientation'),
dateTimeOriginal: eb.ref('excluded.dateTimeOriginal'), dateTimeOriginal: ref('dateTimeOriginal'),
modifyDate: eb.ref('excluded.modifyDate'), modifyDate: ref('modifyDate'),
timeZone: eb.ref('excluded.timeZone'), timeZone: ref('timeZone'),
latitude: eb.ref('excluded.latitude'), latitude: ref('latitude'),
longitude: eb.ref('excluded.longitude'), longitude: ref('longitude'),
projectionType: eb.ref('excluded.projectionType'), projectionType: ref('projectionType'),
city: eb.ref('excluded.city'), city: ref('city'),
livePhotoCID: eb.ref('excluded.livePhotoCID'), livePhotoCID: ref('livePhotoCID'),
autoStackId: eb.ref('excluded.autoStackId'), autoStackId: ref('autoStackId'),
state: eb.ref('excluded.state'), state: ref('state'),
country: eb.ref('excluded.country'), country: ref('country'),
make: eb.ref('excluded.make'), make: ref('make'),
model: eb.ref('excluded.model'), model: ref('model'),
lensModel: eb.ref('excluded.lensModel'), lensModel: ref('lensModel'),
fNumber: eb.ref('excluded.fNumber'), fNumber: ref('fNumber'),
focalLength: eb.ref('excluded.focalLength'), focalLength: ref('focalLength'),
iso: eb.ref('excluded.iso'), iso: ref('iso'),
exposureTime: eb.ref('excluded.exposureTime'), exposureTime: ref('exposureTime'),
profileDescription: eb.ref('excluded.profileDescription'), profileDescription: ref('profileDescription'),
colorspace: eb.ref('excluded.colorspace'), colorspace: ref('colorspace'),
bitsPerSample: eb.ref('excluded.bitsPerSample'), bitsPerSample: ref('bitsPerSample'),
rating: eb.ref('excluded.rating'), rating: ref('rating'),
fps: eb.ref('excluded.fps'), fps: ref('fps'),
lockedProperties:
lockedPropertiesBehavior === 'append'
? distinctLocked(eb, exif.lockedProperties ?? null)
: ref('lockedProperties'),
}, },
value, exif,
),
), ),
};
}),
) )
.execute(); .execute();
} }
@@ -169,19 +195,26 @@ export class AssetRepository {
return; return;
} }
await this.db.updateTable('asset_exif').set(options).where('assetId', 'in', ids).execute(); await this.db
.updateTable('asset_exif')
.set((eb) => ({
...options,
lockedProperties: distinctLocked(eb, Object.keys(options) as LockableProperty[]),
}))
.where('assetId', 'in', ids)
.execute();
} }
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.NUMBER, DummyValue.STRING] }) @GenerateSql({ params: [[DummyValue.UUID], DummyValue.NUMBER, DummyValue.STRING] })
@Chunked() @Chunked()
async updateDateTimeOriginal( updateDateTimeOriginal(ids: string[], delta?: number, timeZone?: string) {
ids: string[], return this.db
delta?: number,
timeZone?: string,
): Promise<{ assetId: string; dateTimeOriginal: Date | null; timeZone: string | null }[]> {
return await this.db
.updateTable('asset_exif') .updateTable('asset_exif')
.set({ dateTimeOriginal: sql`"dateTimeOriginal" + ${(delta ?? 0) + ' minute'}::interval`, timeZone }) .set((eb) => ({
dateTimeOriginal: sql`"dateTimeOriginal" + ${(delta ?? 0) + ' minute'}::interval`,
timeZone,
lockedProperties: distinctLocked(eb, ['dateTimeOriginal', 'timeZone']),
}))
.where('assetId', 'in', ids) .where('assetId', 'in', ids)
.returning(['assetId', 'dateTimeOriginal', 'timeZone']) .returning(['assetId', 'dateTimeOriginal', 'timeZone'])
.execute(); .execute();

View File

@@ -0,0 +1,9 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_exif" ADD "lockedProperties" character varying[];`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_exif" DROP COLUMN "lockedProperties";`.execute(db);
}

View File

@@ -2,6 +2,16 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { AssetTable } from 'src/schema/tables/asset.table'; import { AssetTable } from 'src/schema/tables/asset.table';
import { Column, ForeignKeyColumn, Generated, Int8, Table, Timestamp, UpdateDateColumn } from 'src/sql-tools'; import { Column, ForeignKeyColumn, Generated, Int8, Table, Timestamp, UpdateDateColumn } from 'src/sql-tools';
export type LockableProperty = (typeof lockableProperties)[number];
export const lockableProperties = [
'description',
'dateTimeOriginal',
'latitude',
'longitude',
'rating',
'timeZone',
] as const;
@Table('asset_exif') @Table('asset_exif')
@UpdatedAtTrigger('asset_exif_updatedAt') @UpdatedAtTrigger('asset_exif_updatedAt')
export class AssetExifTable { export class AssetExifTable {
@@ -97,4 +107,7 @@ export class AssetExifTable {
@UpdateIdColumn({ index: true }) @UpdateIdColumn({ index: true })
updateId!: Generated<string>; updateId!: Generated<string>;
@Column({ type: 'character varying', array: true, nullable: true })
lockedProperties!: Array<LockableProperty> | null;
} }

View File

@@ -370,7 +370,10 @@ export class AssetMediaService extends BaseService {
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar })); : this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt)); await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size }); await this.assetRepository.upsertExif(
{ assetId, fileSizeInByte: file.size },
{ lockedPropertiesBehavior: 'override' },
);
await this.jobRepository.queue({ await this.jobRepository.queue({
name: JobName.AssetExtractMetadata, name: JobName.AssetExtractMetadata,
data: { id: assetId, source: 'upload' }, data: { id: assetId, source: 'upload' },
@@ -399,7 +402,10 @@ export class AssetMediaService extends BaseService {
}); });
const { size } = await this.storageRepository.stat(created.originalPath); const { size } = await this.storageRepository.stat(created.originalPath);
await this.assetRepository.upsertExif({ assetId: created.id, fileSizeInByte: size }); await this.assetRepository.upsertExif(
{ assetId: created.id, fileSizeInByte: size },
{ lockedPropertiesBehavior: 'override' },
);
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: created.id, source: 'copy' } }); await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: created.id, source: 'copy' } });
return created; return created;
} }
@@ -440,7 +446,10 @@ export class AssetMediaService extends BaseService {
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt)); await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
} }
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt)); await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId: asset.id, fileSizeInByte: file.size }); await this.assetRepository.upsertExif(
{ assetId: asset.id, fileSizeInByte: file.size },
{ lockedPropertiesBehavior: 'override' },
);
await this.eventRepository.emit('AssetCreate', { asset }); await this.eventRepository.emit('AssetCreate', { asset });

View File

@@ -225,7 +225,10 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, 'asset-1', { description: 'Test description' }); await sut.update(authStub.admin, 'asset-1', { description: 'Test description' });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith({ assetId: 'asset-1', description: 'Test description' }); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-1', description: 'Test description', lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
}); });
it('should update the exif rating', async () => { it('should update the exif rating', async () => {
@@ -235,7 +238,14 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, 'asset-1', { rating: 3 }); await sut.update(authStub.admin, 'asset-1', { rating: 3 });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith({ assetId: 'asset-1', rating: 3 }); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
assetId: 'asset-1',
rating: 3,
lockedProperties: ['rating'],
},
{ lockedPropertiesBehavior: 'append' },
);
}); });
it('should fail linking a live video if the motion part could not be found', async () => { it('should fail linking a live video if the motion part could not be found', async () => {
@@ -427,9 +437,7 @@ describe(AssetService.name, () => {
}); });
expect(mocks.asset.updateAll).toHaveBeenCalled(); expect(mocks.asset.updateAll).toHaveBeenCalled();
expect(mocks.asset.updateAllExif).toHaveBeenCalledWith(['asset-1'], { latitude: 0, longitude: 0 }); expect(mocks.asset.updateAllExif).toHaveBeenCalledWith(['asset-1'], { latitude: 0, longitude: 0 });
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarWrite, data: { id: 'asset-1' } }]);
{ name: JobName.SidecarWrite, data: { id: 'asset-1', latitude: 0, longitude: 0 } },
]);
}); });
it('should update exif table if latitude field is provided', async () => { it('should update exif table if latitude field is provided', async () => {
@@ -450,9 +458,7 @@ describe(AssetService.name, () => {
latitude: 30, latitude: 30,
longitude: 50, longitude: 50,
}); });
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarWrite, data: { id: 'asset-1' } }]);
{ name: JobName.SidecarWrite, data: { id: 'asset-1', dateTimeOriginal, latitude: 30, longitude: 50 } },
]);
}); });
it('should update Assets table if duplicateId is provided as null', async () => { it('should update Assets table if duplicateId is provided as null', async () => {
@@ -482,18 +488,7 @@ describe(AssetService.name, () => {
timeZone, timeZone,
}); });
expect(mocks.asset.updateDateTimeOriginal).toHaveBeenCalledWith(['asset-1'], dateTimeRelative, timeZone); expect(mocks.asset.updateDateTimeOriginal).toHaveBeenCalledWith(['asset-1'], dateTimeRelative, timeZone);
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarWrite, data: { id: 'asset-1' } }]);
{
name: JobName.SidecarWrite,
data: {
id: 'asset-1',
dateTimeOriginal: '2020-02-25T06:41:00.000+02:00',
description: undefined,
latitude: undefined,
longitude: undefined,
},
},
]);
}); });
}); });

View File

@@ -30,9 +30,10 @@ import {
QueueName, QueueName,
} from 'src/enum'; } from 'src/enum';
import { BaseService } from 'src/services/base.service'; import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types'; import { JobItem, JobOf } from 'src/types';
import { requireElevatedPermission } from 'src/utils/access'; import { requireElevatedPermission } from 'src/utils/access';
import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUnlink } from 'src/utils/asset.util'; import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUnlink } from 'src/utils/asset.util';
import { updateLockedColumns } from 'src/utils/database';
@Injectable() @Injectable()
export class AssetService extends BaseService { export class AssetService extends BaseService {
@@ -142,56 +143,26 @@ export class AssetService extends BaseService {
} = dto; } = dto;
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids }); await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids });
const assetDto = { isFavorite, visibility, duplicateId }; const assetDto = _.omitBy({ isFavorite, visibility, duplicateId }, _.isUndefined);
const exifDto = { latitude, longitude, rating, description, dateTimeOriginal }; const exifDto = _.omitBy({ latitude, longitude, rating, description, dateTimeOriginal }, _.isUndefined);
const isExifChanged = Object.values(exifDto).some((v) => v !== undefined); if (Object.keys(exifDto).length > 0) {
if (isExifChanged) {
await this.assetRepository.updateAllExif(ids, exifDto); await this.assetRepository.updateAllExif(ids, exifDto);
} }
const assets = if ((dateTimeRelative !== undefined && dateTimeRelative !== 0) || timeZone !== undefined) {
(dateTimeRelative !== undefined && dateTimeRelative !== 0) || timeZone !== undefined await this.assetRepository.updateDateTimeOriginal(ids, dateTimeRelative, timeZone);
? await this.assetRepository.updateDateTimeOriginal(ids, dateTimeRelative, timeZone)
: undefined;
const dateTimesWithTimezone = assets
? assets.map((asset) => {
const isoString = asset.dateTimeOriginal?.toISOString();
let dateTime = isoString ? DateTime.fromISO(isoString) : null;
if (dateTime && asset.timeZone) {
dateTime = dateTime.setZone(asset.timeZone);
} }
return { if (Object.keys(assetDto).length > 0) {
assetId: asset.assetId,
dateTimeOriginal: dateTime?.toISO() ?? null,
};
})
: ids.map((id) => ({ assetId: id, dateTimeOriginal }));
if (dateTimesWithTimezone.length > 0) {
await this.jobRepository.queueAll(
dateTimesWithTimezone.map(({ assetId: id, dateTimeOriginal }) => ({
name: JobName.SidecarWrite,
data: {
...exifDto,
id,
dateTimeOriginal: dateTimeOriginal ?? undefined,
},
})),
);
}
const isAssetChanged = Object.values(assetDto).some((v) => v !== undefined);
if (isAssetChanged) {
await this.assetRepository.updateAll(ids, assetDto); await this.assetRepository.updateAll(ids, assetDto);
}
if (visibility === AssetVisibility.Locked) { if (visibility === AssetVisibility.Locked) {
await this.albumRepository.removeAssetsFromAll(ids); await this.albumRepository.removeAssetsFromAll(ids);
} }
}
await this.jobRepository.queueAll(ids.map((id) => ({ name: JobName.SidecarWrite, data: { id } })));
} }
async copy( async copy(
@@ -456,12 +427,25 @@ export class AssetService extends BaseService {
return asset; return asset;
} }
private async updateExif(dto: ISidecarWriteJob) { private async updateExif(dto: {
id: string;
description?: string;
dateTimeOriginal?: string;
latitude?: number;
longitude?: number;
rating?: number;
}) {
const { id, description, dateTimeOriginal, latitude, longitude, rating } = dto; const { id, description, dateTimeOriginal, latitude, longitude, rating } = dto;
const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude, rating }, _.isUndefined); const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude, rating }, _.isUndefined);
if (Object.keys(writes).length > 0) { if (Object.keys(writes).length > 0) {
await this.assetRepository.upsertExif({ assetId: id, ...writes }); await this.assetRepository.upsertExif(
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id, ...writes } }); updateLockedColumns({
assetId: id,
...writes,
}),
{ lockedPropertiesBehavior: 'append' },
);
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id } });
} }
} }
} }

View File

@@ -187,7 +187,9 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.sidecar.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.sidecar.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate })); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.update).toHaveBeenCalledWith( expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({ expect.objectContaining({
id: assetStub.image.id, id: assetStub.image.id,
@@ -214,6 +216,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileModifiedAt }), expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
{ lockedPropertiesBehavior: 'skip' },
); );
expect(mocks.asset.update).toHaveBeenCalledWith({ expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id, id: assetStub.image.id,
@@ -238,7 +241,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: fileCreatedAt })); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith({ expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id, id: assetStub.image.id,
duration: null, duration: null,
@@ -258,6 +264,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'), dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
expect(mocks.asset.update).toHaveBeenCalledWith( expect(mocks.asset.update).toHaveBeenCalledWith(
@@ -281,7 +288,9 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 })); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.update).toHaveBeenCalledWith({ expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id, id: assetStub.image.id,
duration: null, duration: null,
@@ -310,6 +319,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: null, state: null, country: null }), expect.objectContaining({ city: null, state: null, country: null }),
{ lockedPropertiesBehavior: 'skip' },
); );
expect(mocks.asset.update).toHaveBeenCalledWith({ expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.withLocation.id, id: assetStub.withLocation.id,
@@ -339,6 +349,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }), expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
{ lockedPropertiesBehavior: 'skip' },
); );
expect(mocks.asset.update).toHaveBeenCalledWith({ expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.withLocation.id, id: assetStub.withLocation.id,
@@ -358,7 +369,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ latitude: null, longitude: null })); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ latitude: null, longitude: null }),
{ lockedPropertiesBehavior: 'skip' },
);
}); });
it('should extract tags from TagsList', async () => { it('should extract tags from TagsList', async () => {
@@ -571,6 +585,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }), expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -879,7 +894,8 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith({ expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
assetId: assetStub.image.id, assetId: assetStub.image.id,
bitsPerSample: expect.any(Number), bitsPerSample: expect.any(Number),
autoStackId: null, autoStackId: null,
@@ -909,7 +925,9 @@ describe(MetadataService.name, () => {
country: null, country: null,
state: null, state: null,
city: null, city: null,
}); },
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith( expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({ expect.objectContaining({
id: assetStub.image.id, id: assetStub.image.id,
@@ -943,6 +961,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
timeZone: 'UTC+0', timeZone: 'UTC+0',
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1034,7 +1053,10 @@ describe(MetadataService.name, () => {
}); });
it('should use Duration from exif', async () => { it('should use Duration from exif', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
...assetStub.image,
originalPath: '/original/path.webp',
});
mockReadTags({ Duration: 123 }, {}); mockReadTags({ Duration: 123 }, {});
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -1046,6 +1068,7 @@ describe(MetadataService.name, () => {
it('should prefer Duration from exif over sidecar', async () => { it('should prefer Duration from exif over sidecar', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
...assetStub.image, ...assetStub.image,
originalPath: '/original/path.webp',
files: [ files: [
{ {
id: 'some-id', id: 'some-id',
@@ -1063,6 +1086,16 @@ describe(MetadataService.name, () => {
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ duration: '00:02:03.000' })); expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ duration: '00:02:03.000' }));
}); });
it('should ignore all Duration tags for definitely static images', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.imageDng);
mockReadTags({ Duration: 123 }, { Duration: 456 });
await sut.handleMetadataExtraction({ id: assetStub.imageDng.id });
expect(mocks.metadata.readTags).toHaveBeenCalledTimes(1);
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ duration: null }));
});
it('should ignore Duration from exif for videos', async () => { it('should ignore Duration from exif for videos', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video); mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video);
mockReadTags({ Duration: 123 }, {}); mockReadTags({ Duration: 123 }, {});
@@ -1089,6 +1122,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
description: '', description: '',
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
mockReadTags({ ImageDescription: ' my\n description' }); mockReadTags({ ImageDescription: ' my\n description' });
@@ -1097,6 +1131,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
description: 'my\n description', description: 'my\n description',
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1109,6 +1144,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
description: '1000', description: '1000',
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1332,6 +1368,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
modifyDate: expect.any(Date), modifyDate: expect.any(Date),
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1344,6 +1381,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
rating: null, rating: null,
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1356,6 +1394,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
rating: 5, rating: 5,
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1368,6 +1407,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
rating: -1, rating: -1,
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
@@ -1489,7 +1529,9 @@ describe(MetadataService.name, () => {
mockReadTags(exif); mockReadTags(exif);
await sut.handleMetadataExtraction({ id: assetStub.image.id }); await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining(expected)); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining(expected), {
lockedPropertiesBehavior: 'skip',
});
}); });
it.each([ it.each([
@@ -1515,6 +1557,7 @@ describe(MetadataService.name, () => {
expect.objectContaining({ expect.objectContaining({
lensModel: expected, lensModel: expected,
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
}); });
}); });
@@ -1623,12 +1666,14 @@ describe(MetadataService.name, () => {
describe('handleSidecarWrite', () => { describe('handleSidecarWrite', () => {
it('should skip assets that no longer exist', async () => { it('should skip assets that no longer exist', async () => {
mocks.assetJob.getLockedPropertiesForMetadataExtraction.mockResolvedValue([]);
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0); mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed); await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
expect(mocks.metadata.writeTags).not.toHaveBeenCalled(); expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
}); });
it('should skip jobs with no metadata', async () => { it('should skip jobs with no metadata', async () => {
mocks.assetJob.getLockedPropertiesForMetadataExtraction.mockResolvedValue([]);
const asset = factory.jobAssets.sidecarWrite(); const asset = factory.jobAssets.sidecarWrite();
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset); mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset);
await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Skipped); await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
@@ -1641,20 +1686,22 @@ describe(MetadataService.name, () => {
const gps = 12; const gps = 12;
const date = '2023-11-22T04:56:12.196Z'; const date = '2023-11-22T04:56:12.196Z';
mocks.assetJob.getLockedPropertiesForMetadataExtraction.mockResolvedValue([
'description',
'latitude',
'longitude',
'dateTimeOriginal',
]);
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset); mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset);
await expect( await expect(
sut.handleSidecarWrite({ sut.handleSidecarWrite({
id: asset.id, id: asset.id,
description,
latitude: gps,
longitude: gps,
dateTimeOriginal: date,
}), }),
).resolves.toBe(JobStatus.Success); ).resolves.toBe(JobStatus.Success);
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, { expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.files[0].path, {
DateTimeOriginal: date,
Description: description, Description: description,
ImageDescription: description, ImageDescription: description,
DateTimeOriginal: date,
GPSLatitude: gps, GPSLatitude: gps,
GPSLongitude: gps, GPSLongitude: gps,
}); });

View File

@@ -32,6 +32,7 @@ import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types'; import { JobItem, JobOf } from 'src/types';
import { getAssetFiles } from 'src/utils/asset.util'; import { getAssetFiles } from 'src/utils/asset.util';
import { isAssetChecksumConstraint } from 'src/utils/database'; import { isAssetChecksumConstraint } from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
import { isFaceImportEnabled } from 'src/utils/misc'; import { isFaceImportEnabled } from 'src/utils/misc';
import { upsertTags } from 'src/utils/tag'; import { upsertTags } from 'src/utils/tag';
@@ -289,7 +290,7 @@ export class MetadataService extends BaseService {
}; };
const promises: Promise<unknown>[] = [ const promises: Promise<unknown>[] = [
this.assetRepository.upsertExif(exifData), this.assetRepository.upsertExif(exifData, { lockedPropertiesBehavior: 'skip' }),
this.assetRepository.update({ this.assetRepository.update({
id: asset.id, id: asset.id,
duration: this.getDuration(exifTags), duration: this.getDuration(exifTags),
@@ -392,22 +393,34 @@ export class MetadataService extends BaseService {
@OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar }) @OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar })
async handleSidecarWrite(job: JobOf<JobName.SidecarWrite>): Promise<JobStatus> { async handleSidecarWrite(job: JobOf<JobName.SidecarWrite>): Promise<JobStatus> {
const { id, description, dateTimeOriginal, latitude, longitude, rating, tags } = job; const { id, tags } = job;
const asset = await this.assetJobRepository.getForSidecarWriteJob(id); const asset = await this.assetJobRepository.getForSidecarWriteJob(id);
if (!asset) { if (!asset) {
return JobStatus.Failed; return JobStatus.Failed;
} }
const lockedProperties = await this.assetJobRepository.getLockedPropertiesForMetadataExtraction(id);
const tagsList = (asset.tags || []).map((tag) => tag.value); const tagsList = (asset.tags || []).map((tag) => tag.value);
const { sidecarFile } = getAssetFiles(asset.files); const { sidecarFile } = getAssetFiles(asset.files);
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`; const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
const { description, dateTimeOriginal, latitude, longitude, rating } = _.pick(
{
description: asset.exifInfo.description,
dateTimeOriginal: asset.exifInfo.dateTimeOriginal,
latitude: asset.exifInfo.latitude,
longitude: asset.exifInfo.longitude,
rating: asset.exifInfo.rating,
},
lockedProperties,
);
const exif = _.omitBy( const exif = _.omitBy(
<Tags>{ <Tags>{
Description: description, Description: description,
ImageDescription: description, ImageDescription: description,
DateTimeOriginal: dateTimeOriginal, DateTimeOriginal: dateTimeOriginal == null ? undefined : String(dateTimeOriginal),
GPSLatitude: latitude, GPSLatitude: latitude,
GPSLongitude: longitude, GPSLongitude: longitude,
Rating: rating, Rating: rating,
@@ -486,7 +499,8 @@ export class MetadataService extends BaseService {
} }
// prefer duration from video tags // prefer duration from video tags
if (videoTags) { // don't save duration if asset is definitely not an animated image (see e.g. CR3 with Duration: 1s)
if (videoTags || !mimeTypes.isPossiblyAnimatedImage(asset.originalPath)) {
delete mediaTags.Duration; delete mediaTags.Duration;
} }

View File

@@ -222,11 +222,6 @@ export interface IDeleteFilesJob extends IBaseJob {
} }
export interface ISidecarWriteJob extends IEntityJob { export interface ISidecarWriteJob extends IEntityJob {
description?: string;
dateTimeOriginal?: string;
latitude?: number;
longitude?: number;
rating?: number;
tags?: true; tags?: true;
} }

View File

@@ -19,6 +19,7 @@ import { columns, Exif, Person } from 'src/database';
import { AssetFileType, AssetVisibility, DatabaseExtension, DatabaseSslMode } from 'src/enum'; import { AssetFileType, AssetVisibility, DatabaseExtension, DatabaseSslMode } from 'src/enum';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository'; import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { DB } from 'src/schema'; import { DB } from 'src/schema';
import { lockableProperties } from 'src/schema/tables/asset-exif.table';
import { DatabaseConnectionParams, VectorExtension } from 'src/types'; import { DatabaseConnectionParams, VectorExtension } from 'src/types';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object; type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
@@ -488,3 +489,8 @@ export function vectorIndexQuery({ vectorExtension, table, indexName, lists }: V
} }
} }
} }
export const updateLockedColumns = <T extends Record<string, unknown>>(exif: T) => ({
...exif,
lockedProperties: lockableProperties.filter((property) => property in exif),
});

View File

@@ -152,6 +152,26 @@ describe('mimeTypes', () => {
} }
}); });
describe('animated image', () => {
for (const img of ['a.avif', 'a.gif', 'a.webp']) {
it('should identify animated image mime types as such', () => {
expect(mimeTypes.isPossiblyAnimatedImage(img)).toBeTruthy();
});
}
for (const img of ['a.cr3', 'a.jpg', 'a.tiff']) {
it('should identify static image mime types as such', () => {
expect(mimeTypes.isPossiblyAnimatedImage(img)).toBeFalsy();
});
}
for (const extension of Object.keys(mimeTypes.video)) {
it('should not identify video mime types as animated', () => {
expect(mimeTypes.isPossiblyAnimatedImage(extension)).toBeFalsy();
});
}
});
describe('video', () => { describe('video', () => {
it('should contain only lowercase mime types', () => { it('should contain only lowercase mime types', () => {
const keys = Object.keys(mimeTypes.video); const keys = Object.keys(mimeTypes.video);

View File

@@ -64,6 +64,11 @@ const image: Record<string, string[]> = {
'.tiff': ['image/tiff'], '.tiff': ['image/tiff'],
}; };
const possiblyAnimatedImageExtensions = new Set(['.avif', '.gif', '.heic', '.heif', '.jxl', '.png', '.webp']);
const possiblyAnimatedImage: Record<string, string[]> = Object.fromEntries(
Object.entries(image).filter(([key]) => possiblyAnimatedImageExtensions.has(key)),
);
const extensionOverrides: Record<string, string> = { const extensionOverrides: Record<string, string> = {
'image/jpeg': '.jpg', 'image/jpeg': '.jpg',
}; };
@@ -119,6 +124,7 @@ export const mimeTypes = {
isAsset: (filename: string) => isType(filename, image) || isType(filename, video), isAsset: (filename: string) => isType(filename, image) || isType(filename, video),
isImage: (filename: string) => isType(filename, image), isImage: (filename: string) => isType(filename, image),
isWebSupportedImage: (filename: string) => isType(filename, webSupportedImage), isWebSupportedImage: (filename: string) => isType(filename, webSupportedImage),
isPossiblyAnimatedImage: (filename: string) => isType(filename, possiblyAnimatedImage),
isProfile: (filename: string) => isType(filename, profile), isProfile: (filename: string) => isType(filename, profile),
isSidecar: (filename: string) => isType(filename, sidecar), isSidecar: (filename: string) => isType(filename, sidecar),
isVideo: (filename: string) => isType(filename, video), isVideo: (filename: string) => isType(filename, video),

View File

@@ -202,7 +202,7 @@ export class MediumTestContext<S extends BaseService = BaseService> {
} }
async newExif(dto: Insertable<AssetExifTable>) { async newExif(dto: Insertable<AssetExifTable>) {
const result = await this.get(AssetRepository).upsertExif(dto); const result = await this.get(AssetRepository).upsertExif(dto, { lockedPropertiesBehavior: 'override' });
return { result }; return { result };
} }

View File

@@ -268,4 +268,65 @@ describe(AssetService.name, () => {
}); });
}); });
}); });
describe('update', () => {
it('should update dateTimeOriginal', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, description: 'test' });
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: null });
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal'] });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00' }),
}),
);
});
});
describe('updateAll', () => {
it('should relatively update assets', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
await sut.updateAll(auth, { ids: [asset.id], dateTimeRelative: -11 });
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['timeZone', 'dateTimeOriginal'] });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({
dateTimeOriginal: '2023-11-19T18:00:00+00:00',
}),
}),
);
});
});
}); });

View File

@@ -95,6 +95,7 @@ describe(MetadataService.name, () => {
dateTimeOriginal: new Date(expected.dateTimeOriginal), dateTimeOriginal: new Date(expected.dateTimeOriginal),
timeZone: expected.timeZone, timeZone: expected.timeZone,
}), }),
{ lockedPropertiesBehavior: 'skip' },
); );
expect(mocks.asset.update).toHaveBeenCalledWith( expect(mocks.asset.update).toHaveBeenCalledWith(

View File

@@ -2,6 +2,7 @@ import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum'; import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository'; import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema'; import { DB } from 'src/schema';
import { updateLockedColumns } from 'src/utils/database';
import { SyncTestContext } from 'test/medium.factory'; import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory'; import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils'; import { getKyselyDB, wait } from 'test/utils';
@@ -288,10 +289,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset // update the asset
const assetRepository = ctx.get(AssetRepository); const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif({ await assetRepository.upsertExif(
updateLockedColumns({
assetId: asset.id, assetId: asset.id,
city: 'New City', city: 'New City',
}); }),
{ lockedPropertiesBehavior: 'append' },
);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([ await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{ {
@@ -346,10 +350,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset // update the asset
const assetRepository = ctx.get(AssetRepository); const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif({ await assetRepository.upsertExif(
updateLockedColumns({
assetId: assetDelayedExif.id, assetId: assetDelayedExif.id,
city: 'Delayed Exif', city: 'Delayed Exif',
}); }),
{ lockedPropertiesBehavior: 'append' },
);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([ await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{ {

View File

@@ -4,6 +4,7 @@ import {
AuthApiKey, AuthApiKey,
AuthSharedLink, AuthSharedLink,
AuthUser, AuthUser,
Exif,
Library, Library,
Memory, Memory,
Partner, Partner,
@@ -319,8 +320,10 @@ const versionHistoryFactory = () => ({
version: '1.123.45', version: '1.123.45',
}); });
const assetSidecarWriteFactory = () => ({ const assetSidecarWriteFactory = () => {
id: newUuid(), const id = newUuid();
return {
id,
originalPath: '/path/to/original-path.jpg.xmp', originalPath: '/path/to/original-path.jpg.xmp',
tags: [], tags: [],
files: [ files: [
@@ -330,7 +333,15 @@ const assetSidecarWriteFactory = () => ({
type: AssetFileType.Sidecar, type: AssetFileType.Sidecar,
}, },
], ],
}); exifInfo: {
assetId: id,
description: 'this is a description',
latitude: 12,
longitude: 12,
dateTimeOriginal: '2023-11-22T04:56:12.196Z',
} as unknown as Exif,
};
};
const assetOcrFactory = ( const assetOcrFactory = (
ocr: { ocr: {

View File

@@ -16,7 +16,7 @@
{#if downloadManager.isDownloading} {#if downloadManager.isDownloading}
<div <div
transition:fly={{ x: -100, duration: 350 }} transition:fly={{ x: -100, duration: 350 }}
class="fixed bottom-10 start-2 max-h-67.5 w-79 rounded-2xl border dark:border-white/10 p-4 shadow-lg bg-subtle" class="fixed bottom-10 start-2 max-h-67.5 w-79 z-60 rounded-2xl border dark:border-white/10 p-4 shadow-lg bg-subtle"
> >
<Heading size="tiny">{$t('downloading')}</Heading> <Heading size="tiny">{$t('downloading')}</Heading>
<div class="my-2 mb-2 flex max-h-50 flex-col overflow-y-auto text-sm"> <div class="my-2 mb-2 flex max-h-50 flex-col overflow-y-auto text-sm">

View File

@@ -79,10 +79,30 @@
searchStore.isSearchEnabled = false; searchStore.isSearchEnabled = false;
}; };
const buildSearchPayload = (term: string): SmartSearchDto | MetadataSearchDto => {
const searchType = getSearchType();
switch (searchType) {
case 'smart': {
return { query: term };
}
case 'metadata': {
return { originalFileName: term };
}
case 'description': {
return { description: term };
}
case 'ocr': {
return { ocr: term };
}
default: {
return { query: term };
}
}
};
const onHistoryTermClick = async (searchTerm: string) => { const onHistoryTermClick = async (searchTerm: string) => {
value = searchTerm; value = searchTerm;
const searchPayload = { query: searchTerm }; await handleSearch(buildSearchPayload(searchTerm));
await handleSearch(searchPayload);
}; };
const onFilterClick = async () => { const onFilterClick = async () => {
@@ -112,29 +132,7 @@
}; };
const onSubmit = () => { const onSubmit = () => {
const searchType = getSearchType(); handlePromiseError(handleSearch(buildSearchPayload(value)));
let payload = {} as SmartSearchDto | MetadataSearchDto;
switch (searchType) {
case 'smart': {
payload = { query: value } as SmartSearchDto;
break;
}
case 'metadata': {
payload = { originalFileName: value } as MetadataSearchDto;
break;
}
case 'description': {
payload = { description: value } as MetadataSearchDto;
break;
}
case 'ocr': {
payload = { ocr: value } as MetadataSearchDto;
break;
}
}
handlePromiseError(handleSearch(payload));
saveSearchTerm(value); saveSearchTerm(value);
}; };