feat: Edit metadata (#5066)

* chore: rebase and clean-up

* feat: sync description, add e2e tests

* feat: simplify web code

* chore: unit tests

* fix: linting

* Bug fix with the arrows key

* timezone typeahead filter

timezone typeahead filter

* small stlying

* format fix

* Bug fix in the map selection

Bug fix in the map selection

* Websocket basic

Websocket basic

* Update metadata visualisation through the websocket

* Update timeline

* fix merge

* fix web

* fix web

* maplibre system

* format fix

* format fix

* refactor: clean up

* Fix small bug in the hour/timezone

* Don't diplay modify for readOnly asset

* Add log in case of failure

* Formater + try/catch error

* Remove everything related to websocket

* Revert "Remove everything related to websocket"

This reverts commit 14bcb9e1e4.

* remove notification

* fix test

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
YFrendo
2023-11-30 04:52:28 +01:00
committed by GitHub
parent b396e0eee3
commit 644e52b153
42 changed files with 1045 additions and 81 deletions

View File

@@ -8,7 +8,7 @@ import { AccessCore, Permission } from '../access';
import { AuthUserDto } from '../auth';
import { mimeTypes } from '../domain.constant';
import { HumanReadableSize, usePagination } from '../domain.util';
import { IAssetDeletionJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
import { IAssetDeletionJob, ISidecarWriteJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
import {
CommunicationEvent,
IAccessRepository,
@@ -393,10 +393,8 @@ export class AssetService {
async update(authUser: AuthUserDto, id: string, dto: UpdateAssetDto): Promise<AssetResponseDto> {
await this.access.requirePermission(authUser, Permission.ASSET_UPDATE, id);
const { description, ...rest } = dto;
if (description !== undefined) {
await this.assetRepository.upsertExif({ assetId: id, description });
}
const { description, dateTimeOriginal, latitude, longitude, ...rest } = dto;
await this.updateMetadata({ id, description, dateTimeOriginal, latitude, longitude });
const asset = await this.assetRepository.save({ id, ...rest });
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids: [id] } });
@@ -404,7 +402,7 @@ export class AssetService {
}
async updateAll(authUser: AuthUserDto, dto: AssetBulkUpdateDto): Promise<void> {
const { ids, removeParent, ...options } = dto;
const { ids, removeParent, dateTimeOriginal, latitude, longitude, ...options } = dto;
await this.access.requirePermission(authUser, Permission.ASSET_UPDATE, ids);
if (removeParent) {
@@ -424,6 +422,10 @@ export class AssetService {
await this.assetRepository.updateAll([options.stackParentId], { stackParentId: null });
}
for (const id of ids) {
await this.updateMetadata({ id, dateTimeOriginal, latitude, longitude });
}
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
await this.assetRepository.updateAll(ids, options);
this.communicationRepository.send(CommunicationEvent.ASSET_UPDATE, authUser.id, ids);
@@ -587,4 +589,13 @@ export class AssetService {
}
}
}
private async updateMetadata(dto: ISidecarWriteJob) {
const { id, description, dateTimeOriginal, latitude, longitude } = dto;
const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude }, _.isUndefined);
if (Object.keys(writes).length > 0) {
await this.assetRepository.upsertExif({ assetId: id, ...writes });
await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id, ...writes } });
}
}
}

View File

@@ -1,7 +1,19 @@
import { AssetType } from '@app/infra/entities';
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsBoolean, IsEnum, IsInt, IsPositive, IsString, Min } from 'class-validator';
import {
IsBoolean,
IsDateString,
IsEnum,
IsInt,
IsLatitude,
IsLongitude,
IsNotEmpty,
IsPositive,
IsString,
Min,
ValidateIf,
} from 'class-validator';
import { Optional, QueryBoolean, QueryDate, ValidateUUID } from '../../domain.util';
import { BulkIdsDto } from '../response-dto';
@@ -10,6 +22,10 @@ export enum AssetOrder {
DESC = 'desc',
}
const hasGPS = (o: { latitude: undefined; longitude: undefined }) =>
o.latitude !== undefined || o.longitude !== undefined;
const ValidateGPS = () => ValidateIf(hasGPS);
export class AssetSearchDto {
@ValidateUUID({ optional: true })
id?: string;
@@ -172,6 +188,20 @@ export class AssetBulkUpdateDto extends BulkIdsDto {
@Optional()
@IsBoolean()
removeParent?: boolean;
@Optional()
@IsDateString()
dateTimeOriginal?: string;
@ValidateGPS()
@IsLatitude()
@IsNotEmpty()
latitude?: number;
@ValidateGPS()
@IsLongitude()
@IsNotEmpty()
longitude?: number;
}
export class UpdateAssetDto {
@@ -186,6 +216,20 @@ export class UpdateAssetDto {
@Optional()
@IsString()
description?: string;
@Optional()
@IsDateString()
dateTimeOriginal?: string;
@ValidateGPS()
@IsLatitude()
@IsNotEmpty()
latitude?: number;
@ValidateGPS()
@IsLongitude()
@IsNotEmpty()
longitude?: number;
}
export class RandomAssetsDto {

View File

@@ -96,6 +96,7 @@ export enum JobName {
QUEUE_SIDECAR = 'queue-sidecar',
SIDECAR_DISCOVERY = 'sidecar-discovery',
SIDECAR_SYNC = 'sidecar-sync',
SIDECAR_WRITE = 'sidecar-write',
}
export const JOBS_ASSET_PAGINATION_SIZE = 1000;
@@ -168,6 +169,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.QUEUE_SIDECAR]: QueueName.SIDECAR,
[JobName.SIDECAR_DISCOVERY]: QueueName.SIDECAR,
[JobName.SIDECAR_SYNC]: QueueName.SIDECAR,
[JobName.SIDECAR_WRITE]: QueueName.SIDECAR,
// Library management
[JobName.LIBRARY_SCAN_ASSET]: QueueName.LIBRARY,

View File

@@ -9,7 +9,7 @@ export interface IAssetFaceJob extends IBaseJob {
export interface IEntityJob extends IBaseJob {
id: string;
source?: 'upload';
source?: 'upload' | 'sidecar-write';
}
export interface IAssetDeletionJob extends IEntityJob {
@@ -33,3 +33,10 @@ export interface IBulkEntityJob extends IBaseJob {
export interface IDeleteFilesJob extends IBaseJob {
files: Array<string | null | undefined>;
}
export interface ISidecarWriteJob extends IEntityJob {
description?: string;
dateTimeOriginal?: string;
latitude?: number;
longitude?: number;
}

View File

@@ -165,7 +165,19 @@ export class JobService {
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: item.data });
break;
case JobName.SIDECAR_WRITE:
await this.jobRepository.queue({
name: JobName.METADATA_EXTRACTION,
data: { id: item.data.id, source: 'sidecar-write' },
});
case JobName.METADATA_EXTRACTION:
if (item.data.source === 'sidecar-write') {
const [asset] = await this.assetRepository.getByIds([item.data.id]);
if (asset) {
this.communicationRepository.send(CommunicationEvent.ASSET_UPDATE, asset.ownerId, mapAsset(asset));
}
}
await this.jobRepository.queue({ name: JobName.LINK_LIVE_PHOTOS, data: item.data });
break;

View File

@@ -218,11 +218,11 @@ describe(MetadataService.name, () => {
const originalDate = new Date('2023-11-21T16:13:17.517Z');
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
when(metadataMock.getExifTags)
when(metadataMock.readTags)
.calledWith(assetStub.sidecar.originalPath)
// higher priority tag
.mockResolvedValue({ CreationDate: originalDate.toISOString() });
when(metadataMock.getExifTags)
when(metadataMock.readTags)
.calledWith(assetStub.sidecar.sidecarPath as string)
// lower priority tag, but in sidecar
.mockResolvedValue({ CreateDate: sidecarDate.toISOString() });
@@ -240,7 +240,7 @@ describe(MetadataService.name, () => {
it('should handle lists of numbers', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.getExifTags.mockResolvedValue({ ISO: [160] as any });
metadataMock.readTags.mockResolvedValue({ ISO: [160] as any });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
@@ -257,7 +257,7 @@ describe(MetadataService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.withLocation]);
configMock.load.mockResolvedValue([{ key: SystemConfigKey.REVERSE_GEOCODING_ENABLED, value: true }]);
metadataMock.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
metadataMock.getExifTags.mockResolvedValue({
metadataMock.readTags.mockResolvedValue({
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
});
@@ -289,7 +289,7 @@ describe(MetadataService.name, () => {
it('should apply motion photos', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
metadataMock.getExifTags.mockResolvedValue({
metadataMock.readTags.mockResolvedValue({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@@ -310,7 +310,7 @@ describe(MetadataService.name, () => {
it('should create new motion asset if not found and link it with the photo', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
metadataMock.getExifTags.mockResolvedValue({
metadataMock.readTags.mockResolvedValue({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@@ -367,7 +367,7 @@ describe(MetadataService.name, () => {
tz: '+02:00',
};
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.getExifTags.mockResolvedValue(tags);
metadataMock.readTags.mockResolvedValue(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
@@ -406,7 +406,7 @@ describe(MetadataService.name, () => {
it('should handle duration', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.getExifTags.mockResolvedValue({ Duration: 6.21 });
metadataMock.readTags.mockResolvedValue({ Duration: 6.21 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -422,7 +422,7 @@ describe(MetadataService.name, () => {
it('should handle duration as an object without Scale', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.getExifTags.mockResolvedValue({ Duration: { Value: 6.2 } });
metadataMock.readTags.mockResolvedValue({ Duration: { Value: 6.2 } });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -438,7 +438,7 @@ describe(MetadataService.name, () => {
it('should handle duration with scale', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.getExifTags.mockResolvedValue({ Duration: { Scale: 1.11111111111111e-5, Value: 558720 } });
metadataMock.readTags.mockResolvedValue({ Duration: { Scale: 1.11111111111111e-5, Value: 558720 } });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -531,4 +531,41 @@ describe(MetadataService.name, () => {
});
});
});
describe('handleSidecarWrite', () => {
it('should skip assets that do not exist anymore', async () => {
assetMock.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(false);
expect(metadataMock.writeTags).not.toHaveBeenCalled();
});
it('should skip jobs with not metadata', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
await expect(sut.handleSidecarWrite({ id: assetStub.sidecar.id })).resolves.toBe(true);
expect(metadataMock.writeTags).not.toHaveBeenCalled();
});
it('should write tags', async () => {
const description = 'this is a description';
const gps = 12;
const date = '2023-11-22T04:56:12.196Z';
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
await expect(
sut.handleSidecarWrite({
id: assetStub.sidecar.id,
description,
latitude: gps,
longitude: gps,
dateTimeOriginal: date,
}),
).resolves.toBe(true);
expect(metadataMock.writeTags).toHaveBeenCalledWith(assetStub.sidecar.sidecarPath, {
ImageDescription: description,
CreationDate: date,
GPSLatitude: gps,
GPSLongitude: gps,
});
});
});
});

View File

@@ -3,10 +3,11 @@ import { Inject, Injectable, Logger } from '@nestjs/common';
import { ExifDateTime, Tags } from 'exiftool-vendored';
import { firstDateTime } from 'exiftool-vendored/dist/FirstDateTime';
import { constants } from 'fs/promises';
import _ from 'lodash';
import { Duration } from 'luxon';
import { Subscription } from 'rxjs';
import { usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from '../job';
import { IBaseJob, IEntityJob, ISidecarWriteJob, JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from '../job';
import {
ExifDuration,
IAlbumRepository,
@@ -79,7 +80,6 @@ export class MetadataService {
private logger = new Logger(MetadataService.name);
private storageCore: StorageCore;
private configCore: SystemConfigCore;
private oldCities?: string;
private subscription: Subscription | null = null;
constructor(
@@ -244,6 +244,37 @@ export class MetadataService {
return true;
}
async handleSidecarWrite(job: ISidecarWriteJob) {
const { id, description, dateTimeOriginal, latitude, longitude } = job;
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
}
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
const exif = _.omitBy<Tags>(
{
ImageDescription: description,
CreationDate: dateTimeOriginal,
GPSLatitude: latitude,
GPSLongitude: longitude,
},
_.isUndefined,
);
if (Object.keys(exif).length === 0) {
return true;
}
await this.repository.writeTags(sidecarPath, exif);
if (!asset.sidecarPath) {
await this.assetRepository.save({ id, sidecarPath });
}
return true;
}
private async applyReverseGeocoding(asset: AssetEntity, exifData: ExifEntityWithoutGeocodeAndTypeOrm) {
const { latitude, longitude } = exifData;
if (!(await this.configCore.hasFeature(FeatureFlag.REVERSE_GEOCODING)) || !longitude || !latitude) {
@@ -346,8 +377,8 @@ export class MetadataService {
asset: AssetEntity,
): Promise<{ exifData: ExifEntityWithoutGeocodeAndTypeOrm; tags: ImmichTags }> {
const stats = await this.storageRepository.stat(asset.originalPath);
const mediaTags = await this.repository.getExifTags(asset.originalPath);
const sidecarTags = asset.sidecarPath ? await this.repository.getExifTags(asset.sidecarPath) : null;
const mediaTags = await this.repository.readTags(asset.originalPath);
const sidecarTags = asset.sidecarPath ? await this.repository.readTags(asset.sidecarPath) : null;
// ensure date from sidecar is used if present
const hasDateOverride = !!this.getDateTimeOriginal(sidecarTags);

View File

@@ -9,6 +9,7 @@ import {
IEntityJob,
ILibraryFileJob,
ILibraryRefreshJob,
ISidecarWriteJob,
} from '../job/job.interface';
export interface JobCounts {
@@ -54,11 +55,11 @@ export type JobItem =
| { name: JobName.QUEUE_METADATA_EXTRACTION; data: IBaseJob }
| { name: JobName.METADATA_EXTRACTION; data: IEntityJob }
| { name: JobName.LINK_LIVE_PHOTOS; data: IEntityJob }
// Sidecar Scanning
| { name: JobName.QUEUE_SIDECAR; data: IBaseJob }
| { name: JobName.SIDECAR_DISCOVERY; data: IEntityJob }
| { name: JobName.SIDECAR_SYNC; data: IEntityJob }
| { name: JobName.SIDECAR_WRITE; data: ISidecarWriteJob }
// Object Tagging
| { name: JobName.QUEUE_OBJECT_TAGGING; data: IBaseJob }

View File

@@ -33,5 +33,6 @@ export interface IMetadataRepository {
init(): Promise<void>;
teardown(): Promise<void>;
reverseGeocode(point: GeoPoint): Promise<ReverseGeocodeResult | null>;
getExifTags(path: string): Promise<ImmichTags | null>;
readTags(path: string): Promise<ImmichTags | null>;
writeTags(path: string, tags: Partial<Tags>): Promise<void>;
}

View File

@@ -9,7 +9,7 @@ import { GeodataAdmin1Entity, GeodataAdmin2Entity, GeodataPlacesEntity, SystemMe
import { DatabaseLock } from '@app/infra/utils/database-locks';
import { Inject, Logger } from '@nestjs/common';
import { InjectDataSource, InjectRepository } from '@nestjs/typeorm';
import { DefaultReadTaskOptions, exiftool } from 'exiftool-vendored';
import { DefaultReadTaskOptions, exiftool, Tags } from 'exiftool-vendored';
import { createReadStream, existsSync } from 'fs';
import { readFile } from 'fs/promises';
import * as geotz from 'geo-tz';
@@ -181,7 +181,7 @@ export class MetadataRepository implements IMetadataRepository {
return { country, state, city };
}
getExifTags(path: string): Promise<ImmichTags | null> {
readTags(path: string): Promise<ImmichTags | null> {
return exiftool
.read(path, undefined, {
...DefaultReadTaskOptions,
@@ -198,4 +198,12 @@ export class MetadataRepository implements IMetadataRepository {
return null;
}) as Promise<ImmichTags | null>;
}
async writeTags(path: string, tags: Partial<Tags>): Promise<void> {
try {
await exiftool.write(path, tags, ['-overwrite_original']);
} catch (error) {
this.logger.warn(`Error writing exif data (${path}): ${error}`);
}
}
}

View File

@@ -84,6 +84,7 @@ export class AppService {
[JobName.QUEUE_SIDECAR]: (data) => this.metadataService.handleQueueSidecar(data),
[JobName.SIDECAR_DISCOVERY]: (data) => this.metadataService.handleSidecarDiscovery(data),
[JobName.SIDECAR_SYNC]: () => this.metadataService.handleSidecarSync(),
[JobName.SIDECAR_WRITE]: (data) => this.metadataService.handleSidecarWrite(data),
[JobName.LIBRARY_SCAN_ASSET]: (data) => this.libraryService.handleAssetRefresh(data),
[JobName.LIBRARY_SCAN]: (data) => this.libraryService.handleQueueAssetRefresh(data),
[JobName.LIBRARY_DELETE]: (data) => this.libraryService.handleDeleteLibrary(data),