mirror of
https://github.com/immich-app/immich.git
synced 2025-12-21 01:11:16 +03:00
feat(server, web): search location (#7139)
* feat: search location * fix: tests * feat: outclick * location search index * update query * fixed query * updated sql * update query * Update search.dto.ts Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> * coalesce * fix: tests * feat: add alternate names * fix: generate sql files * single table, add alternate names to query, cleanup * merge main * update sql * pr feedback * pr feedback * chore: fix merge --------- Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com> Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com>
This commit is contained in:
@@ -91,7 +91,7 @@ export const citiesFile = 'cities500.txt';
|
||||
export const geodataDatePath = join(GEODATA_ROOT_PATH, 'geodata-date.txt');
|
||||
export const geodataAdmin1Path = join(GEODATA_ROOT_PATH, 'admin1CodesASCII.txt');
|
||||
export const geodataAdmin2Path = join(GEODATA_ROOT_PATH, 'admin2Codes.txt');
|
||||
export const geodataCitites500Path = join(GEODATA_ROOT_PATH, citiesFile);
|
||||
export const geodataCities500Path = join(GEODATA_ROOT_PATH, citiesFile);
|
||||
|
||||
const image: Record<string, string[]> = {
|
||||
'.3fr': ['image/3fr', 'image/x-hasselblad-3fr'],
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AssetEntity, AssetFaceEntity, AssetType, SmartInfoEntity } from '@app/infra/entities';
|
||||
import { AssetEntity, AssetFaceEntity, AssetType, GeodataPlacesEntity, SmartInfoEntity } from '@app/infra/entities';
|
||||
import { Paginated } from '../domain.util';
|
||||
|
||||
export const ISearchRepository = 'ISearchRepository';
|
||||
@@ -186,4 +186,5 @@ export interface ISearchRepository {
|
||||
searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions): Paginated<AssetEntity>;
|
||||
searchFaces(search: FaceEmbeddingSearch): Promise<FaceSearchResult[]>;
|
||||
upsert(smartInfo: Partial<SmartInfoEntity>, embedding?: Embedding): Promise<void>;
|
||||
searchPlaces(placeName: string): Promise<GeodataPlacesEntity[]>;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { AssetOrder } from '@app/domain/asset/dto/asset.dto';
|
||||
import { AssetType } from '@app/infra/entities';
|
||||
import { AssetType, GeodataPlacesEntity } from '@app/infra/entities';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Transform, Type } from 'class-transformer';
|
||||
import { IsBoolean, IsEnum, IsInt, IsNotEmpty, IsString, Max, Min } from 'class-validator';
|
||||
@@ -241,6 +241,12 @@ export class SearchDto {
|
||||
size?: number;
|
||||
}
|
||||
|
||||
export class SearchPlacesDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
name!: string;
|
||||
}
|
||||
|
||||
export class SearchPeopleDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@@ -251,3 +257,21 @@ export class SearchPeopleDto {
|
||||
@Optional()
|
||||
withHidden?: boolean;
|
||||
}
|
||||
|
||||
export class PlacesResponseDto {
|
||||
name!: string;
|
||||
latitude!: number;
|
||||
longitude!: number;
|
||||
admin1name?: string;
|
||||
admin2name?: string;
|
||||
}
|
||||
|
||||
export function mapPlaces(place: GeodataPlacesEntity): PlacesResponseDto {
|
||||
return {
|
||||
name: place.name,
|
||||
latitude: place.latitude,
|
||||
longitude: place.longitude,
|
||||
admin1name: place.admin1Name,
|
||||
admin2name: place.admin2Name,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -16,7 +16,15 @@ import {
|
||||
SearchStrategy,
|
||||
} from '../repositories';
|
||||
import { FeatureFlag, SystemConfigCore } from '../system-config';
|
||||
import { MetadataSearchDto, SearchDto, SearchPeopleDto, SmartSearchDto } from './dto';
|
||||
import {
|
||||
MetadataSearchDto,
|
||||
PlacesResponseDto,
|
||||
SearchDto,
|
||||
SearchPeopleDto,
|
||||
SearchPlacesDto,
|
||||
SmartSearchDto,
|
||||
mapPlaces,
|
||||
} from './dto';
|
||||
import { SearchSuggestionRequestDto, SearchSuggestionType } from './dto/search-suggestion.dto';
|
||||
import { SearchResponseDto } from './response-dto';
|
||||
|
||||
@@ -41,6 +49,11 @@ export class SearchService {
|
||||
return this.personRepository.getByName(auth.user.id, dto.name, { withHidden: dto.withHidden });
|
||||
}
|
||||
|
||||
async searchPlaces(dto: SearchPlacesDto): Promise<PlacesResponseDto[]> {
|
||||
const places = await this.searchRepository.searchPlaces(dto.name);
|
||||
return places.map((place) => mapPlaces(place));
|
||||
}
|
||||
|
||||
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||
|
||||
@@ -2,9 +2,11 @@ import {
|
||||
AuthDto,
|
||||
MetadataSearchDto,
|
||||
PersonResponseDto,
|
||||
PlacesResponseDto,
|
||||
SearchDto,
|
||||
SearchExploreResponseDto,
|
||||
SearchPeopleDto,
|
||||
SearchPlacesDto,
|
||||
SearchResponseDto,
|
||||
SearchService,
|
||||
SmartSearchDto,
|
||||
@@ -48,6 +50,11 @@ export class SearchController {
|
||||
return this.service.searchPerson(auth, dto);
|
||||
}
|
||||
|
||||
@Get('places')
|
||||
searchPlaces(@Query() dto: SearchPlacesDto): Promise<PlacesResponseDto[]> {
|
||||
return this.service.searchPlaces(dto);
|
||||
}
|
||||
|
||||
@Get('suggestions')
|
||||
getSearchSuggestions(@Auth() auth: AuthDto, @Query() dto: SearchSuggestionRequestDto): Promise<string[]> {
|
||||
return this.service.getSearchSuggestions(auth, dto);
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||
|
||||
@Entity('geodata_admin1')
|
||||
export class GeodataAdmin1Entity {
|
||||
@PrimaryColumn({ type: 'varchar' })
|
||||
key!: string;
|
||||
|
||||
@Column({ type: 'varchar' })
|
||||
name!: string;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||
|
||||
@Entity('geodata_admin2')
|
||||
export class GeodataAdmin2Entity {
|
||||
@PrimaryColumn({ type: 'varchar' })
|
||||
key!: string;
|
||||
|
||||
@Column({ type: 'varchar' })
|
||||
name!: string;
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
import { GeodataAdmin1Entity } from '@app/infra/entities/geodata-admin1.entity';
|
||||
import { GeodataAdmin2Entity } from '@app/infra/entities/geodata-admin2.entity';
|
||||
import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm';
|
||||
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||
|
||||
@Entity('geodata_places', { synchronize: false })
|
||||
export class GeodataPlacesEntity {
|
||||
@@ -21,7 +19,7 @@ export class GeodataPlacesEntity {
|
||||
// asExpression: 'll_to_earth((latitude)::double precision, (longitude)::double precision)',
|
||||
// type: 'earth',
|
||||
// })
|
||||
earthCoord!: unknown;
|
||||
// earthCoord!: unknown;
|
||||
|
||||
@Column({ type: 'char', length: 2 })
|
||||
countryCode!: string;
|
||||
@@ -32,27 +30,14 @@ export class GeodataPlacesEntity {
|
||||
@Column({ type: 'varchar', length: 80, nullable: true })
|
||||
admin2Code!: string;
|
||||
|
||||
@Column({
|
||||
type: 'varchar',
|
||||
generatedType: 'STORED',
|
||||
asExpression: `"countryCode" || '.' || "admin1Code"`,
|
||||
nullable: true,
|
||||
})
|
||||
admin1Key!: string;
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
admin1Name!: string;
|
||||
|
||||
@ManyToOne(() => GeodataAdmin1Entity, { eager: true, nullable: true, createForeignKeyConstraints: false })
|
||||
admin1!: GeodataAdmin1Entity;
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
admin2Name!: string;
|
||||
|
||||
@Column({
|
||||
type: 'varchar',
|
||||
generatedType: 'STORED',
|
||||
asExpression: `"countryCode" || '.' || "admin1Code" || '.' || "admin2Code"`,
|
||||
nullable: true,
|
||||
})
|
||||
admin2Key!: string;
|
||||
|
||||
@ManyToOne(() => GeodataAdmin2Entity, { eager: true, nullable: true, createForeignKeyConstraints: false })
|
||||
admin2!: GeodataAdmin2Entity;
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
alternateNames!: string;
|
||||
|
||||
@Column({ type: 'date' })
|
||||
modificationDate!: Date;
|
||||
|
||||
@@ -7,8 +7,6 @@ import { AssetStackEntity } from './asset-stack.entity';
|
||||
import { AssetEntity } from './asset.entity';
|
||||
import { AuditEntity } from './audit.entity';
|
||||
import { ExifEntity } from './exif.entity';
|
||||
import { GeodataAdmin1Entity } from './geodata-admin1.entity';
|
||||
import { GeodataAdmin2Entity } from './geodata-admin2.entity';
|
||||
import { GeodataPlacesEntity } from './geodata-places.entity';
|
||||
import { LibraryEntity } from './library.entity';
|
||||
import { MoveEntity } from './move.entity';
|
||||
@@ -32,8 +30,6 @@ export * from './asset-stack.entity';
|
||||
export * from './asset.entity';
|
||||
export * from './audit.entity';
|
||||
export * from './exif.entity';
|
||||
export * from './geodata-admin1.entity';
|
||||
export * from './geodata-admin2.entity';
|
||||
export * from './geodata-places.entity';
|
||||
export * from './library.entity';
|
||||
export * from './move.entity';
|
||||
@@ -59,8 +55,6 @@ export const databaseEntities = [
|
||||
AuditEntity,
|
||||
ExifEntity,
|
||||
GeodataPlacesEntity,
|
||||
GeodataAdmin1Entity,
|
||||
GeodataAdmin2Entity,
|
||||
MoveEntity,
|
||||
PartnerEntity,
|
||||
PersonEntity,
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class GeodataLocationSearch1708059341865 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE EXTENSION IF NOT EXISTS pg_trgm`);
|
||||
await queryRunner.query(`CREATE EXTENSION IF NOT EXISTS unaccent`);
|
||||
|
||||
// https://stackoverflow.com/a/11007216
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE FUNCTION f_unaccent(text)
|
||||
RETURNS text
|
||||
LANGUAGE sql IMMUTABLE PARALLEL SAFE STRICT
|
||||
RETURN unaccent('unaccent', $1)`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE geodata_places ADD COLUMN "admin1Name" varchar`);
|
||||
await queryRunner.query(`ALTER TABLE geodata_places ADD COLUMN "admin2Name" varchar`);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE geodata_places
|
||||
SET "admin1Name" = admin1.name
|
||||
FROM geodata_admin1 admin1
|
||||
WHERE admin1.key = "admin1Key"`);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE geodata_places
|
||||
SET "admin2Name" = admin2.name
|
||||
FROM geodata_admin2 admin2
|
||||
WHERE admin2.key = "admin2Key"`);
|
||||
|
||||
await queryRunner.query(`DROP TABLE geodata_admin1 CASCADE`);
|
||||
await queryRunner.query(`DROP TABLE geodata_admin2 CASCADE`);
|
||||
|
||||
await queryRunner.query(`
|
||||
ALTER TABLE geodata_places
|
||||
DROP COLUMN "admin1Key",
|
||||
DROP COLUMN "admin2Key"`);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX idx_geodata_places_name
|
||||
ON geodata_places
|
||||
USING gin (f_unaccent(name) gin_trgm_ops)`);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX idx_geodata_places_admin1_name
|
||||
ON geodata_places
|
||||
USING gin (f_unaccent("admin1Name") gin_trgm_ops)`);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX idx_geodata_places_admin2_name
|
||||
ON geodata_places
|
||||
USING gin (f_unaccent("admin2Name") gin_trgm_ops)`);
|
||||
|
||||
await queryRunner.query(
|
||||
`
|
||||
DELETE FROM "typeorm_metadata"
|
||||
WHERE
|
||||
"type" = $1 AND
|
||||
"name" = $2 AND
|
||||
"database" = $3 AND
|
||||
"schema" = $4 AND
|
||||
"table" = $5`,
|
||||
['GENERATED_COLUMN', 'admin1Key', 'immich', 'public', 'geodata_places'],
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`
|
||||
DELETE FROM "typeorm_metadata"
|
||||
WHERE
|
||||
"type" = $1 AND
|
||||
"name" = $2 AND
|
||||
"database" = $3 AND
|
||||
"schema" = $4 AND
|
||||
"table" = $5`,
|
||||
['GENERATED_COLUMN', 'admin2Key', 'immich', 'public', 'geodata_places'],
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`
|
||||
CREATE TABLE "geodata_admin1" (
|
||||
"key" character varying NOT NULL,
|
||||
"name" character varying NOT NULL,
|
||||
CONSTRAINT "PK_3fe3a89c5aac789d365871cb172" PRIMARY KEY ("key")
|
||||
)`);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE TABLE "geodata_admin2" (
|
||||
"key" character varying NOT NULL,
|
||||
"name" character varying NOT NULL,
|
||||
CONSTRAINT "PK_1e3886455dbb684d6f6b4756726" PRIMARY KEY ("key")
|
||||
)`);
|
||||
|
||||
await queryRunner.query(`
|
||||
ALTER TABLE geodata_places
|
||||
ADD COLUMN "admin1Key" character varying
|
||||
GENERATED ALWAYS AS ("countryCode" || '.' || "admin1Code") STORED,
|
||||
ADD COLUMN "admin2Key" character varying
|
||||
GENERATED ALWAYS AS ("countryCode" || '.' || "admin1Code" || '.' || "admin2Code") STORED`);
|
||||
|
||||
await queryRunner.query(
|
||||
`
|
||||
INSERT INTO "geodata_admin1"
|
||||
SELECT DISTINCT
|
||||
"admin1Key" AS "key",
|
||||
"admin1Name" AS "name"
|
||||
FROM geodata_places
|
||||
WHERE "admin1Name" IS NOT NULL`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`
|
||||
INSERT INTO "geodata_admin2"
|
||||
SELECT DISTINCT
|
||||
"admin2Key" AS "key",
|
||||
"admin2Name" AS "name"
|
||||
FROM geodata_places
|
||||
WHERE "admin2Name" IS NOT NULL`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE geodata_places
|
||||
SET "admin1Name" = admin1.name
|
||||
FROM geodata_admin1 admin1
|
||||
WHERE admin1.key = "admin1Key"`);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE geodata_places
|
||||
SET "admin2Name" = admin2.name
|
||||
FROM geodata_admin2 admin2
|
||||
WHERE admin2.key = "admin2Key";`);
|
||||
|
||||
await queryRunner.query(
|
||||
`
|
||||
INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value")
|
||||
VALUES ($1, $2, $3, $4, $5, $6)`,
|
||||
['immich', 'public', 'geodata_places', 'GENERATED_COLUMN', 'admin1Key', '"countryCode" || \'.\' || "admin1Code"'],
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value")
|
||||
VALUES ($1, $2, $3, $4, $5, $6)`,
|
||||
[
|
||||
'immich',
|
||||
'public',
|
||||
'geodata_places',
|
||||
'GENERATED_COLUMN',
|
||||
'admin2Key',
|
||||
'"countryCode" || \'.\' || "admin1Code" || \'.\' || "admin2Code"',
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class GeonamesEnhancement1708116312820 implements MigrationInterface {
|
||||
name = 'GeonamesEnhancement1708116312820'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE geodata_places ADD COLUMN "alternateNames" varchar`);
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX idx_geodata_places_admin2_alternate_names
|
||||
ON geodata_places
|
||||
USING gin (f_unaccent("alternateNames") gin_trgm_ops)`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE geodata_places DROP COLUMN "alternateNames"`);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import {
|
||||
citiesFile,
|
||||
geodataAdmin1Path,
|
||||
geodataAdmin2Path,
|
||||
geodataCitites500Path,
|
||||
geodataCities500Path,
|
||||
geodataDatePath,
|
||||
GeoPoint,
|
||||
IMetadataRepository,
|
||||
@@ -10,13 +10,7 @@ import {
|
||||
ISystemMetadataRepository,
|
||||
ReverseGeocodeResult,
|
||||
} from '@app/domain';
|
||||
import {
|
||||
ExifEntity,
|
||||
GeodataAdmin1Entity,
|
||||
GeodataAdmin2Entity,
|
||||
GeodataPlacesEntity,
|
||||
SystemMetadataKey,
|
||||
} from '@app/infra/entities';
|
||||
import { ExifEntity, GeodataPlacesEntity, SystemMetadataKey } from '@app/infra/entities';
|
||||
import { ImmichLogger } from '@app/infra/logger';
|
||||
import { Inject } from '@nestjs/common';
|
||||
import { InjectDataSource, InjectRepository } from '@nestjs/typeorm';
|
||||
@@ -26,19 +20,16 @@ import { getName } from 'i18n-iso-countries';
|
||||
import { createReadStream, existsSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import * as readLine from 'node:readline';
|
||||
import { DataSource, DeepPartial, QueryRunner, Repository } from 'typeorm';
|
||||
import { DataSource, QueryRunner, Repository } from 'typeorm';
|
||||
import { QueryDeepPartialEntity } from 'typeorm/query-builder/QueryPartialEntity.js';
|
||||
import { DummyValue, GenerateSql } from '../infra.util';
|
||||
|
||||
type GeoEntity = GeodataPlacesEntity | GeodataAdmin1Entity | GeodataAdmin2Entity;
|
||||
type GeoEntityClass = typeof GeodataPlacesEntity | typeof GeodataAdmin1Entity | typeof GeodataAdmin2Entity;
|
||||
|
||||
export class MetadataRepository implements IMetadataRepository {
|
||||
constructor(
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(GeodataPlacesEntity) private readonly geodataPlacesRepository: Repository<GeodataPlacesEntity>,
|
||||
@InjectRepository(GeodataAdmin1Entity) private readonly geodataAdmin1Repository: Repository<GeodataAdmin1Entity>,
|
||||
@InjectRepository(GeodataAdmin2Entity) private readonly geodataAdmin2Repository: Repository<GeodataAdmin2Entity>,
|
||||
@Inject(ISystemMetadataRepository) private readonly systemMetadataRepository: ISystemMetadataRepository,
|
||||
@Inject(ISystemMetadataRepository)
|
||||
private readonly systemMetadataRepository: ISystemMetadataRepository,
|
||||
@InjectDataSource() private dataSource: DataSource,
|
||||
) {}
|
||||
|
||||
@@ -54,7 +45,6 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log('Importing geodata to database from file');
|
||||
await this.importGeodata();
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.REVERSE_GEOCODING_STATE, {
|
||||
@@ -69,12 +59,14 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
const queryRunner = this.dataSource.createQueryRunner();
|
||||
await queryRunner.connect();
|
||||
|
||||
const admin1 = await this.loadAdmin(geodataAdmin1Path);
|
||||
const admin2 = await this.loadAdmin(geodataAdmin2Path);
|
||||
|
||||
try {
|
||||
await queryRunner.startTransaction();
|
||||
|
||||
await this.loadCities500(queryRunner);
|
||||
await this.loadAdmin1(queryRunner);
|
||||
await this.loadAdmin2(queryRunner);
|
||||
await queryRunner.manager.clear(GeodataPlacesEntity);
|
||||
await this.loadCities500(queryRunner, admin1, admin2);
|
||||
|
||||
await queryRunner.commitTransaction();
|
||||
} catch (error) {
|
||||
@@ -86,76 +78,73 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
}
|
||||
}
|
||||
|
||||
private async loadGeodataToTableFromFile<T extends GeoEntity>(
|
||||
private async loadGeodataToTableFromFile(
|
||||
queryRunner: QueryRunner,
|
||||
lineToEntityMapper: (lineSplit: string[]) => T,
|
||||
lineToEntityMapper: (lineSplit: string[]) => GeodataPlacesEntity,
|
||||
filePath: string,
|
||||
entity: GeoEntityClass,
|
||||
) {
|
||||
if (!existsSync(filePath)) {
|
||||
this.logger.error(`Geodata file ${filePath} not found`);
|
||||
throw new Error(`Geodata file ${filePath} not found`);
|
||||
}
|
||||
await queryRunner.manager.clear(entity);
|
||||
|
||||
const input = createReadStream(filePath);
|
||||
let buffer: DeepPartial<T>[] = [];
|
||||
const lineReader = readLine.createInterface({ input: input });
|
||||
let bufferGeodata: QueryDeepPartialEntity<GeodataPlacesEntity>[] = [];
|
||||
const lineReader = readLine.createInterface({ input });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const lineSplit = line.split('\t');
|
||||
buffer.push(lineToEntityMapper(lineSplit));
|
||||
if (buffer.length > 1000) {
|
||||
await queryRunner.manager.save(buffer);
|
||||
buffer = [];
|
||||
const geoData = lineToEntityMapper(lineSplit);
|
||||
bufferGeodata.push(geoData);
|
||||
if (bufferGeodata.length > 1000) {
|
||||
await queryRunner.manager.upsert(GeodataPlacesEntity, bufferGeodata, ['id']);
|
||||
bufferGeodata = [];
|
||||
}
|
||||
}
|
||||
await queryRunner.manager.save(buffer);
|
||||
await queryRunner.manager.upsert(GeodataPlacesEntity, bufferGeodata, ['id']);
|
||||
}
|
||||
|
||||
private async loadCities500(queryRunner: QueryRunner) {
|
||||
await this.loadGeodataToTableFromFile<GeodataPlacesEntity>(
|
||||
private async loadCities500(
|
||||
queryRunner: QueryRunner,
|
||||
admin1Map: Map<string, string>,
|
||||
admin2Map: Map<string, string>,
|
||||
) {
|
||||
await this.loadGeodataToTableFromFile(
|
||||
queryRunner,
|
||||
(lineSplit: string[]) =>
|
||||
this.geodataPlacesRepository.create({
|
||||
id: Number.parseInt(lineSplit[0]),
|
||||
name: lineSplit[1],
|
||||
alternateNames: lineSplit[3],
|
||||
latitude: Number.parseFloat(lineSplit[4]),
|
||||
longitude: Number.parseFloat(lineSplit[5]),
|
||||
countryCode: lineSplit[8],
|
||||
admin1Code: lineSplit[10],
|
||||
admin2Code: lineSplit[11],
|
||||
modificationDate: lineSplit[18],
|
||||
admin1Name: admin1Map.get(`${lineSplit[8]}.${lineSplit[10]}`),
|
||||
admin2Name: admin2Map.get(`${lineSplit[8]}.${lineSplit[10]}.${lineSplit[11]}`),
|
||||
}),
|
||||
geodataCitites500Path,
|
||||
GeodataPlacesEntity,
|
||||
geodataCities500Path,
|
||||
);
|
||||
}
|
||||
|
||||
private async loadAdmin1(queryRunner: QueryRunner) {
|
||||
await this.loadGeodataToTableFromFile<GeodataAdmin1Entity>(
|
||||
queryRunner,
|
||||
(lineSplit: string[]) =>
|
||||
this.geodataAdmin1Repository.create({
|
||||
key: lineSplit[0],
|
||||
name: lineSplit[1],
|
||||
}),
|
||||
geodataAdmin1Path,
|
||||
GeodataAdmin1Entity,
|
||||
);
|
||||
}
|
||||
private async loadAdmin(filePath: string) {
|
||||
if (!existsSync(filePath)) {
|
||||
this.logger.error(`Geodata file ${filePath} not found`);
|
||||
throw new Error(`Geodata file ${filePath} not found`);
|
||||
}
|
||||
|
||||
private async loadAdmin2(queryRunner: QueryRunner) {
|
||||
await this.loadGeodataToTableFromFile<GeodataAdmin2Entity>(
|
||||
queryRunner,
|
||||
(lineSplit: string[]) =>
|
||||
this.geodataAdmin2Repository.create({
|
||||
key: lineSplit[0],
|
||||
name: lineSplit[1],
|
||||
}),
|
||||
geodataAdmin2Path,
|
||||
GeodataAdmin2Entity,
|
||||
);
|
||||
const input = createReadStream(filePath);
|
||||
const lineReader = readLine.createInterface({ input: input });
|
||||
|
||||
const adminMap = new Map<string, string>();
|
||||
for await (const line of lineReader) {
|
||||
const lineSplit = line.split('\t');
|
||||
adminMap.set(lineSplit[0], lineSplit[1]);
|
||||
}
|
||||
|
||||
return adminMap;
|
||||
}
|
||||
|
||||
async teardown() {
|
||||
@@ -167,8 +156,6 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
|
||||
const response = await this.geodataPlacesRepository
|
||||
.createQueryBuilder('geoplaces')
|
||||
.leftJoinAndSelect('geoplaces.admin1', 'admin1')
|
||||
.leftJoinAndSelect('geoplaces.admin2', 'admin2')
|
||||
.where('earth_box(ll_to_earth(:latitude, :longitude), 25000) @> "earthCoord"', point)
|
||||
.orderBy('earth_distance(ll_to_earth(:latitude, :longitude), "earthCoord")')
|
||||
.limit(1)
|
||||
@@ -183,9 +170,9 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
|
||||
this.logger.verbose(`Raw: ${JSON.stringify(response, null, 2)}`);
|
||||
|
||||
const { countryCode, name: city, admin1, admin2 } = response;
|
||||
const { countryCode, name: city, admin1Name, admin2Name } = response;
|
||||
const country = getName(countryCode, 'en') ?? null;
|
||||
const stateParts = [admin2?.name, admin1?.name].filter((name) => !!name);
|
||||
const stateParts = [admin2Name, admin1Name].filter((name) => !!name);
|
||||
const state = stateParts.length > 0 ? stateParts.join(', ') : null;
|
||||
|
||||
return { country, state, city };
|
||||
|
||||
@@ -12,7 +12,13 @@ import {
|
||||
SmartSearchOptions,
|
||||
} from '@app/domain';
|
||||
import { getCLIPModelInfo } from '@app/domain/smart-info/smart-info.constant';
|
||||
import { AssetEntity, AssetFaceEntity, SmartInfoEntity, SmartSearchEntity } from '@app/infra/entities';
|
||||
import {
|
||||
AssetEntity,
|
||||
AssetFaceEntity,
|
||||
GeodataPlacesEntity,
|
||||
SmartInfoEntity,
|
||||
SmartSearchEntity,
|
||||
} from '@app/infra/entities';
|
||||
import { ImmichLogger } from '@app/infra/logger';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
@@ -31,6 +37,7 @@ export class SearchRepository implements ISearchRepository {
|
||||
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
||||
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
|
||||
@InjectRepository(SmartSearchEntity) private smartSearchRepository: Repository<SmartSearchEntity>,
|
||||
@InjectRepository(GeodataPlacesEntity) private readonly geodataPlacesRepository: Repository<GeodataPlacesEntity>,
|
||||
) {
|
||||
this.faceColumns = this.assetFaceRepository.manager.connection
|
||||
.getMetadata(AssetFaceEntity)
|
||||
@@ -172,6 +179,27 @@ export class SearchRepository implements ISearchRepository {
|
||||
}));
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.STRING] })
|
||||
async searchPlaces(placeName: string): Promise<GeodataPlacesEntity[]> {
|
||||
return await this.geodataPlacesRepository
|
||||
.createQueryBuilder('geoplaces')
|
||||
.where(`f_unaccent(name) %>> f_unaccent(:placeName)`)
|
||||
.orWhere(`f_unaccent("admin2Name") %>> f_unaccent(:placeName)`)
|
||||
.orWhere(`f_unaccent("admin1Name") %>> f_unaccent(:placeName)`)
|
||||
.orWhere(`f_unaccent("alternateNames") %>> f_unaccent(:placeName)`)
|
||||
.orderBy(
|
||||
`
|
||||
COALESCE(f_unaccent(name) <->>> f_unaccent(:placeName), 0) +
|
||||
COALESCE(f_unaccent("admin2Name") <->>> f_unaccent(:placeName), 0) +
|
||||
COALESCE(f_unaccent("admin1Name") <->>> f_unaccent(:placeName), 0) +
|
||||
COALESCE(f_unaccent("alternateNames") <->>> f_unaccent(:placeName), 0)
|
||||
`,
|
||||
)
|
||||
.setParameters({ placeName })
|
||||
.limit(20)
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async upsert(smartInfo: Partial<SmartInfoEntity>, embedding?: Embedding): Promise<void> {
|
||||
await this.repository.upsert(smartInfo, { conflictPaths: ['assetId'] });
|
||||
if (!smartInfo.assetId || !embedding) {
|
||||
|
||||
@@ -238,3 +238,37 @@ FROM
|
||||
WHERE
|
||||
res.distance <= $3
|
||||
COMMIT
|
||||
|
||||
-- SearchRepository.searchPlaces
|
||||
SELECT
|
||||
"geoplaces"."id" AS "geoplaces_id",
|
||||
"geoplaces"."name" AS "geoplaces_name",
|
||||
"geoplaces"."longitude" AS "geoplaces_longitude",
|
||||
"geoplaces"."latitude" AS "geoplaces_latitude",
|
||||
"geoplaces"."countryCode" AS "geoplaces_countryCode",
|
||||
"geoplaces"."admin1Code" AS "geoplaces_admin1Code",
|
||||
"geoplaces"."admin2Code" AS "geoplaces_admin2Code",
|
||||
"geoplaces"."admin1Name" AS "geoplaces_admin1Name",
|
||||
"geoplaces"."admin2Name" AS "geoplaces_admin2Name",
|
||||
"geoplaces"."alternateNames" AS "geoplaces_alternateNames",
|
||||
"geoplaces"."modificationDate" AS "geoplaces_modificationDate"
|
||||
FROM
|
||||
"geodata_places" "geoplaces"
|
||||
WHERE
|
||||
f_unaccent (name) %>> f_unaccent ($1)
|
||||
OR f_unaccent ("admin2Name") %>> f_unaccent ($1)
|
||||
OR f_unaccent ("admin1Name") %>> f_unaccent ($1)
|
||||
OR f_unaccent ("alternateNames") %>> f_unaccent ($1)
|
||||
ORDER BY
|
||||
COALESCE(f_unaccent (name) <->>> f_unaccent ($1), 0) + COALESCE(
|
||||
f_unaccent ("admin2Name") <->>> f_unaccent ($1),
|
||||
0
|
||||
) + COALESCE(
|
||||
f_unaccent ("admin1Name") <->>> f_unaccent ($1),
|
||||
0
|
||||
) + COALESCE(
|
||||
f_unaccent ("alternateNames") <->>> f_unaccent ($1),
|
||||
0
|
||||
) ASC
|
||||
LIMIT
|
||||
20
|
||||
|
||||
Reference in New Issue
Block a user