Merge remote-tracking branch 'origin/main' into feature/rearrange-buttons-2

This commit is contained in:
idubnori
2025-11-17 09:35:03 +09:00
505 changed files with 34122 additions and 8163 deletions

10
.github/mise.toml vendored Normal file
View File

@@ -0,0 +1,10 @@
[tasks.install]
run = "pnpm install --filter github --frozen-lockfile"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."

View File

@@ -165,7 +165,7 @@ jobs:
fi fi
- name: Publish Android Artifact - name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with: with:
name: release-apk-signed name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk path: mobile/build/app/outputs/flutter-apk/*.apk
@@ -188,8 +188,8 @@ jobs:
needs: pre-job needs: pre-job
permissions: permissions:
contents: read contents: read
# Run on main branch or workflow_dispatch # Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true && github.ref == 'refs/heads/main' }} if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
runs-on: macos-latest runs-on: macos-latest
steps: steps:
@@ -303,12 +303,20 @@ jobs:
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }} APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }} APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
ENVIRONMENT: ${{ inputs.environment || 'development' }} ENVIRONMENT: ${{ inputs.environment || 'development' }}
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
GITHUB_REF: ${{ github.ref }}
working-directory: ./mobile/ios working-directory: ./mobile/ios
run: | run: |
if [[ "$ENVIRONMENT" == "development" ]]; then # Only upload to TestFlight on main branch
bundle exec fastlane gha_testflight_dev if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then
if [[ "$ENVIRONMENT" == "development" ]]; then
bundle exec fastlane gha_testflight_dev
else
bundle exec fastlane gha_release_prod
fi
else else
bundle exec fastlane gha_release_prod # Build only, no TestFlight upload for non-main branches
bundle exec fastlane gha_build_only
fi fi
- name: Clean up keychain - name: Clean up keychain
@@ -317,7 +325,7 @@ jobs:
security delete-keychain build.keychain || true security delete-keychain build.keychain || true
- name: Upload IPA artifact - name: Upload IPA artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with: with:
name: ios-release-ipa name: ios-release-ipa
path: mobile/ios/Runner.ipa path: mobile/ios/Runner.ipa

View File

@@ -84,7 +84,7 @@ jobs:
token: ${{ steps.token.outputs.token }} token: ${{ steps.token.outputs.token }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
@@ -105,7 +105,7 @@ jobs:
- name: Generate docker image tags - name: Generate docker image tags
id: metadata id: metadata
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0 uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
with: with:
flavor: | flavor: |
latest=false latest=false

View File

@@ -35,7 +35,7 @@ jobs:
needs: [get_body, should_run] needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }} if: ${{ needs.should_run.outputs.should_run == 'true' }}
container: container:
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723 image: ghcr.io/immich-app/mdq:main@sha256:9c905a4ff69f00c4b2f98b40b6090ab3ab18d1a15ed1379733b8691aa1fcb271
outputs: outputs:
checked: ${{ steps.get_checkbox.outputs.checked }} checked: ${{ steps.get_checkbox.outputs.checked }}
steps: steps:

View File

@@ -57,7 +57,7 @@ jobs:
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9 uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@@ -70,7 +70,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 # v4.30.9 uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -83,6 +83,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh # ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9 uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with: with:
category: '/language:${{matrix.language}}' category: '/language:${{matrix.language}}'

View File

@@ -85,7 +85,7 @@ jobs:
run: pnpm build run: pnpm build
- name: Upload build output - name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with: with:
name: docs-build-output name: docs-build-output
path: docs/build/ path: docs/build/

View File

@@ -174,7 +174,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs' working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run tf apply' run: 'mise run //deployment:tf apply'
- name: Deploy Docs Subdomain Output - name: Deploy Docs Subdomain Output
id: docs-output id: docs-output
@@ -186,7 +186,7 @@ jobs:
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs' working-directory: 'deployment/modules/cloudflare/docs'
run: | run: |
mise run tf output -- -json | jq -r ' mise run //deployment:tf output -- -json | jq -r '
"projectName=\(.pages_project_name.value)", "projectName=\(.pages_project_name.value)",
"subdomain=\(.immich_app_branch_subdomain.value)" "subdomain=\(.immich_app_branch_subdomain.value)"
' >> $GITHUB_OUTPUT ' >> $GITHUB_OUTPUT
@@ -211,7 +211,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs-release' working-directory: 'deployment/modules/cloudflare/docs-release'
run: 'mise run tf apply' run: 'mise run //deployment:tf apply'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0

View File

@@ -39,7 +39,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs' working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run tf destroy -- -refresh=false' run: 'mise run //deployment:tf destroy -- -refresh=false'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0

View File

@@ -62,7 +62,7 @@ jobs:
ref: main ref: main
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1 uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Setup pnpm - name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
@@ -138,7 +138,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Download APK - name: Download APK
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with: with:
name: release-apk-signed name: release-apk-signed
github-token: ${{ steps.generate-token.outputs.token }} github-token: ${{ steps.generate-token.outputs.token }}

170
.github/workflows/release-pr.yml vendored Normal file
View File

@@ -0,0 +1,170 @@
name: Manage release PR
on:
workflow_dispatch:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
permissions: {}
jobs:
bump:
runs-on: ubuntu-latest
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
ref: main
- name: Install uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Determine release type
id: bump-type
uses: ietf-tools/semver-action@c90370b2958652d71c06a3484129a4d423a6d8a8 # v1.11.0
with:
token: ${{ steps.generate-token.outputs.token }}
- name: Bump versions
env:
TYPE: ${{ steps.bump-type.outputs.bump }}
run: |
if [ "$TYPE" == "none" ]; then
exit 1 # TODO: Is there a cleaner way to abort the workflow?
fi
misc/release/pump-version.sh -s $TYPE -m true
- name: Manage Outline release document
id: outline
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
NEXT_VERSION: ${{ steps.bump-type.outputs.next }}
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const fs = require('fs');
const outlineKey = process.env.OUTLINE_API_KEY;
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9'
const collectionId = 'e2910656-714c-4871-8721-447d9353bd73';
const baseUrl = 'https://outline.immich.cloud';
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ parentDocumentId })
});
if (!listResponse.ok) {
throw new Error(`Outline list failed: ${listResponse.statusText}`);
}
const listData = await listResponse.json();
const allDocuments = listData.data || [];
const document = allDocuments.find(doc => doc.title === 'next');
let documentId;
let documentUrl;
let documentText;
if (!document) {
// Create new document
console.log('No existing document found. Creating new one...');
const notesTmpl = fs.readFileSync('misc/release/notes.tmpl', 'utf8');
const createResponse = await fetch(`${baseUrl}/api/documents.create`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
title: 'next',
text: notesTmpl,
collectionId: collectionId,
parentDocumentId: parentDocumentId,
publish: true
})
});
if (!createResponse.ok) {
throw new Error(`Failed to create document: ${createResponse.statusText}`);
}
const createData = await createResponse.json();
documentId = createData.data.id;
const urlId = createData.data.urlId;
documentUrl = `${baseUrl}/doc/next-${urlId}`;
documentText = createData.data.text || '';
console.log(`Created new document: ${documentUrl}`);
} else {
documentId = document.id;
const docPath = document.url;
documentUrl = `${baseUrl}${docPath}`;
documentText = document.text || '';
console.log(`Found existing document: ${documentUrl}`);
}
// Generate GitHub release notes
console.log('Generating GitHub release notes...');
const releaseNotesResponse = await github.rest.repos.generateReleaseNotes({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: `${process.env.NEXT_VERSION}`,
});
// Combine the content
const changelog = `
# ${process.env.NEXT_VERSION}
${documentText}
${releaseNotesResponse.data.body}
---
`
const existingChangelog = fs.existsSync('CHANGELOG.md') ? fs.readFileSync('CHANGELOG.md', 'utf8') : '';
fs.writeFileSync('CHANGELOG.md', changelog + existingChangelog, 'utf8');
core.setOutput('document_url', documentUrl);
- name: Create PR
id: create-pr
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.generate-token.outputs.token }}
commit-message: 'chore: release ${{ steps.bump-type.outputs.next }}'
title: 'chore: release ${{ steps.bump-type.outputs.next }}'
body: 'Release notes: ${{ steps.outline.outputs.document_url }}'
labels: 'changelog:skip'
branch: 'release/next'
draft: true

View File

@@ -382,6 +382,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with: with:
persist-credentials: false persist-credentials: false
submodules: 'recursive'
token: ${{ steps.token.outputs.token }} token: ${{ steps.token.outputs.token }}
- name: Setup pnpm - name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
@@ -562,7 +563,7 @@ jobs:
persist-credentials: false persist-credentials: false
token: ${{ steps.token.outputs.token }} token: ${{ steps.token.outputs.token }}
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1 uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818) # TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with: # with:

29
cli/mise.toml Normal file
View File

@@ -0,0 +1,29 @@
[tasks.install]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks.build]
env._.path = "./node_modules/.bin"
run = "vite build"
[tasks.test]
env._.path = "./node_modules/.bin"
run = "vite"
[tasks.lint]
env._.path = "./node_modules/.bin"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."lint-fix"]
run = { task = "lint --fix" }
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."
[tasks.check]
env._.path = "./node_modules/.bin"
run = "tsc --noEmit"

View File

@@ -1,6 +1,6 @@
{ {
"name": "@immich/cli", "name": "@immich/cli",
"version": "2.2.100", "version": "2.2.101",
"description": "Command Line Interface (CLI) for Immich", "description": "Command Line Interface (CLI) for Immich",
"type": "module", "type": "module",
"exports": "./dist/index.js", "exports": "./dist/index.js",
@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12", "@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9", "@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1", "@types/mock-fs": "^4.13.1",
"@types/node": "^22.18.12", "@types/node": "^22.19.0",
"@vitest/coverage-v8": "^3.0.0", "@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0", "byte-size": "^9.0.0",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",

20
deployment/mise.toml Normal file
View File

@@ -0,0 +1,20 @@
[tools]
terragrunt = "0.91.2"
opentofu = "1.10.6"
[tasks."tg:fmt"]
run = "terragrunt hclfmt"
description = "Format terragrunt files"
[tasks.tf]
run = "terragrunt run --all"
description = "Wrapper for terragrunt run-all"
dir = "{{cwd}}"
[tasks."tf:fmt"]
run = "tofu fmt -recursive tf/"
description = "Format terraform files"
[tasks."tf:init"]
run = { task = "tf init -- -reconfigure" }
dir = "{{cwd}}"

View File

@@ -41,6 +41,7 @@ services:
- app-node_modules:/usr/src/app/node_modules - app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit - sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage - coverage:/usr/src/app/web/coverage
- ../plugins:/build/corePlugin
env_file: env_file:
- .env - .env
environment: environment:

View File

@@ -83,7 +83,7 @@ services:
container_name: immich_prometheus container_name: immich_prometheus
ports: ports:
- 9090:9090 - 9090:9090
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7 image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
volumes: volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml - ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus - prometheus-data:/prometheus

View File

@@ -10,16 +10,19 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
## Prerequisites ## Prerequisites
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`. You must install pgvector as it is a prerequisite for VectorChord.
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`). running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`. You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
:::note :::note Supported versions
Immich is known to work with Postgres versions `>= 14, < 18`. Immich is known to work with Postgres versions `>= 14, < 19`.
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`. VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
The current accepted range for VectorChord is `>= 0.3, < 0.6`.
::: :::
## Specifying the connection URL ## Specifying the connection URL

View File

@@ -12,3 +12,13 @@ pnpm run migrations:generate <migration-name>
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor. 3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately. The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
## Reverting a Migration
If you need to undo the most recently applied migration—for example, when developing or testing on schema changes—run:
```bash
pnpm run migrations:revert
```
This command rolls back the latest migration and brings the database schema back to its previous state.

View File

@@ -5,7 +5,7 @@ sidebar_position: 2
# Setup # Setup
:::note :::note
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can: If there's a feature you're planning to work on, just give us a heads up in [#contributing](https://discord.com/channels/979116623879368755/1071165397228855327) on [our Discord](https://discord.immich.app) so we can:
1. Let you know if it's something we would accept into Immich 1. Let you know if it's something we would accept into Immich
2. Provide any guidance on how something like that would ideally be implemented 2. Provide any guidance on how something like that would ideally be implemented

View File

@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
```sql title="Count by tag" ```sql title="Count by tag"
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t" SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
WHERE "a"."visibility" != 'hidden' WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value" ORDER BY "number_assets" DESC; GROUP BY "t"."value" ORDER BY "number_assets" DESC;
``` ```
```sql title="Count by tag (per user)" ```sql title="Count by tag (per user)"
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t" SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id" JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
WHERE "a"."visibility" != 'hidden' WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC; GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
``` ```

View File

@@ -16,48 +16,76 @@ The default configuration looks like this:
```json ```json
{ {
"ffmpeg": {
"crf": 23,
"threads": 0,
"preset": "ultrafast",
"targetVideoCodec": "h264",
"acceptedVideoCodecs": ["h264"],
"targetAudioCodec": "aac",
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
"acceptedContainers": ["mov", "ogg", "webm"],
"targetResolution": "720",
"maxBitrate": "0",
"bframes": -1,
"refs": 0,
"gopSize": 0,
"temporalAQ": false,
"cqMode": "auto",
"twoPass": false,
"preferredHwDevice": "auto",
"transcode": "required",
"tonemap": "hable",
"accel": "disabled",
"accelDecode": false
},
"backup": { "backup": {
"database": { "database": {
"enabled": true,
"cronExpression": "0 02 * * *", "cronExpression": "0 02 * * *",
"enabled": true,
"keepLastAmount": 14 "keepLastAmount": 14
} }
}, },
"ffmpeg": {
"accel": "disabled",
"accelDecode": false,
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
"acceptedContainers": ["mov", "ogg", "webm"],
"acceptedVideoCodecs": ["h264"],
"bframes": -1,
"cqMode": "auto",
"crf": 23,
"gopSize": 0,
"maxBitrate": "0",
"preferredHwDevice": "auto",
"preset": "ultrafast",
"refs": 0,
"targetAudioCodec": "aac",
"targetResolution": "720",
"targetVideoCodec": "h264",
"temporalAQ": false,
"threads": 0,
"tonemap": "hable",
"transcode": "required",
"twoPass": false
},
"image": {
"colorspace": "p3",
"extractEmbedded": false,
"fullsize": {
"enabled": false,
"format": "jpeg",
"quality": 80
},
"preview": {
"format": "jpeg",
"quality": 80,
"size": 1440
},
"thumbnail": {
"format": "webp",
"quality": 80,
"size": 250
}
},
"job": { "job": {
"backgroundTask": { "backgroundTask": {
"concurrency": 5 "concurrency": 5
}, },
"smartSearch": { "faceDetection": {
"concurrency": 2 "concurrency": 2
}, },
"library": {
"concurrency": 5
},
"metadataExtraction": { "metadataExtraction": {
"concurrency": 5 "concurrency": 5
}, },
"faceDetection": { "migration": {
"concurrency": 2 "concurrency": 5
},
"notifications": {
"concurrency": 5
},
"ocr": {
"concurrency": 1
}, },
"search": { "search": {
"concurrency": 5 "concurrency": 5
@@ -65,20 +93,23 @@ The default configuration looks like this:
"sidecar": { "sidecar": {
"concurrency": 5 "concurrency": 5
}, },
"library": { "smartSearch": {
"concurrency": 5 "concurrency": 2
},
"migration": {
"concurrency": 5
}, },
"thumbnailGeneration": { "thumbnailGeneration": {
"concurrency": 3 "concurrency": 3
}, },
"videoConversion": { "videoConversion": {
"concurrency": 1 "concurrency": 1
}
},
"library": {
"scan": {
"cronExpression": "0 0 * * *",
"enabled": true
}, },
"notifications": { "watch": {
"concurrency": 5 "enabled": false
} }
}, },
"logging": { "logging": {
@@ -86,8 +117,11 @@ The default configuration looks like this:
"level": "log" "level": "log"
}, },
"machineLearning": { "machineLearning": {
"enabled": true, "availabilityChecks": {
"urls": ["http://immich-machine-learning:3003"], "enabled": true,
"interval": 30000,
"timeout": 2000
},
"clip": { "clip": {
"enabled": true, "enabled": true,
"modelName": "ViT-B-32__openai" "modelName": "ViT-B-32__openai"
@@ -96,27 +130,59 @@ The default configuration looks like this:
"enabled": true, "enabled": true,
"maxDistance": 0.01 "maxDistance": 0.01
}, },
"enabled": true,
"facialRecognition": { "facialRecognition": {
"enabled": true, "enabled": true,
"modelName": "buffalo_l",
"minScore": 0.7,
"maxDistance": 0.5, "maxDistance": 0.5,
"minFaces": 3 "minFaces": 3,
} "minScore": 0.7,
"modelName": "buffalo_l"
},
"ocr": {
"enabled": true,
"maxResolution": 736,
"minDetectionScore": 0.5,
"minRecognitionScore": 0.8,
"modelName": "PP-OCRv5_mobile"
},
"urls": ["http://immich-machine-learning:3003"]
}, },
"map": { "map": {
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
"enabled": true, "enabled": true,
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json", "lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
},
"reverseGeocoding": {
"enabled": true
}, },
"metadata": { "metadata": {
"faces": { "faces": {
"import": false "import": false
} }
}, },
"newVersionCheck": {
"enabled": true
},
"nightlyTasks": {
"clusterNewFaces": true,
"databaseCleanup": true,
"generateMemories": true,
"missingThumbnails": true,
"startTime": "00:00",
"syncQuotaUsage": true
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"host": "",
"ignoreCert": false,
"password": "",
"port": 587,
"secure": false,
"username": ""
}
}
},
"oauth": { "oauth": {
"autoLaunch": false, "autoLaunch": false,
"autoRegister": true, "autoRegister": true,
@@ -128,70 +194,44 @@ The default configuration looks like this:
"issuerUrl": "", "issuerUrl": "",
"mobileOverrideEnabled": false, "mobileOverrideEnabled": false,
"mobileRedirectUri": "", "mobileRedirectUri": "",
"profileSigningAlgorithm": "none",
"roleClaim": "immich_role",
"scope": "openid email profile", "scope": "openid email profile",
"signingAlgorithm": "RS256", "signingAlgorithm": "RS256",
"profileSigningAlgorithm": "none",
"storageLabelClaim": "preferred_username", "storageLabelClaim": "preferred_username",
"storageQuotaClaim": "immich_quota" "storageQuotaClaim": "immich_quota",
"timeout": 30000,
"tokenEndpointAuthMethod": "client_secret_post"
}, },
"passwordLogin": { "passwordLogin": {
"enabled": true "enabled": true
}, },
"reverseGeocoding": {
"enabled": true
},
"server": {
"externalDomain": "",
"loginPageMessage": "",
"publicUsers": true
},
"storageTemplate": { "storageTemplate": {
"enabled": false, "enabled": false,
"hashVerificationEnabled": true, "hashVerificationEnabled": true,
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}" "template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
}, },
"image": { "templates": {
"thumbnail": { "email": {
"format": "webp", "albumInviteTemplate": "",
"size": 250, "albumUpdateTemplate": "",
"quality": 80 "welcomeTemplate": ""
}, }
"preview": {
"format": "jpeg",
"size": 1440,
"quality": 80
},
"colorspace": "p3",
"extractEmbedded": false
},
"newVersionCheck": {
"enabled": true
},
"trash": {
"enabled": true,
"days": 30
}, },
"theme": { "theme": {
"customCss": "" "customCss": ""
}, },
"library": { "trash": {
"scan": { "days": 30,
"enabled": true, "enabled": true
"cronExpression": "0 0 * * *"
},
"watch": {
"enabled": false
}
},
"server": {
"externalDomain": "",
"loginPageMessage": ""
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"ignoreCert": false,
"host": "",
"port": 587,
"username": "",
"password": ""
}
}
}, },
"user": { "user": {
"deleteDelay": 7 "deleteDelay": 7

View File

@@ -149,29 +149,31 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning ## Machine Learning
| Variable | Description | Default | Containers | | Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- | | :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning | | `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning | | `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning | | `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning | | `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning | | `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning | | `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning | | `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning | | `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning | | `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning | | `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning | | `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning | | `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning | | `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning | | `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning | | `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning | | `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning | | `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning | | `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning | | `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning | | `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning | | `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones. \*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.

25
docs/mise.toml Normal file
View File

@@ -0,0 +1,25 @@
[tasks.install]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks.start]
env._.path = "./node_modules/.bin"
run = "docusaurus --port 3005"
[tasks.build]
env._.path = "./node_modules/.bin"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks.preview]
env._.path = "./node_modules/.bin"
run = "docusaurus serve"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."

View File

@@ -1,4 +1,8 @@
[ [
{
"label": "v2.2.3",
"url": "https://docs.v2.2.3.archive.immich.app"
},
{ {
"label": "v2.2.2", "label": "v2.2.2",
"url": "https://docs.v2.2.2.archive.immich.app" "url": "https://docs.v2.2.2.archive.immich.app"

View File

@@ -35,7 +35,7 @@ services:
- 2285:2285 - 2285:2285
redis: redis:
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
database: database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338 image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338

29
e2e/mise.toml Normal file
View File

@@ -0,0 +1,29 @@
[tasks.install]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks.test]
env._.path = "./node_modules/.bin"
run = "vitest --run"
[tasks."test-web"]
env._.path = "./node_modules/.bin"
run = "playwright test"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."
[tasks.lint]
env._.path = "./node_modules/.bin"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."lint-fix"]
run = { task = "lint --fix" }
[tasks.check]
env._.path = "./node_modules/.bin"
run = "tsc --noEmit"

View File

@@ -1,6 +1,6 @@
{ {
"name": "immich-e2e", "name": "immich-e2e",
"version": "2.2.2", "version": "2.2.3",
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"type": "module", "type": "module",
@@ -25,7 +25,7 @@
"@playwright/test": "^1.44.1", "@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2", "@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2", "@types/luxon": "^3.4.2",
"@types/node": "^22.18.12", "@types/node": "^22.19.0",
"@types/oidc-provider": "^9.0.0", "@types/oidc-provider": "^9.0.0",
"@types/pg": "^8.15.1", "@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4", "@types/pngjs": "^6.0.4",

View File

@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto'; import { randomBytes } from 'node:crypto';
import { readFile, writeFile } from 'node:fs/promises'; import { readFile, writeFile } from 'node:fs/promises';
import { basename, join } from 'node:path'; import { basename, join } from 'node:path';
import sharp from 'sharp';
import { Socket } from 'socket.io-client'; import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures'; import { createUserDto, uuidDto } from 'src/fixtures';
import { makeRandomImage } from 'src/generators'; import { makeRandomImage } from 'src/generators';
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
}) as DateTime<true>; }) as DateTime<true>;
const yesterday = today.minus({ days: 1 }); const yesterday = today.minus({ days: 1 });
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
// Generate unique color to ensure different checksums for each image
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
// Create a 100x100 solid color JPEG using Sharp
const imageBytes = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
// Add random suffix to filename to avoid collisions
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
const filepath = join(tempDir, uniqueFilename);
await writeFile(filepath, imageBytes);
// Filter out undefined values before writing EXIF
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
await exiftool.write(filepath, cleanExifData);
// Re-read the image bytes after EXIF has been written
const finalImageBytes = await readFile(filepath);
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
};
describe('/asset', () => { describe('/asset', () => {
let admin: LoginResponseDto; let admin: LoginResponseDto;
let websocket: Socket; let websocket: Socket;
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
}); });
}); });
describe('EXIF metadata extraction', () => {
describe('Additional date tag extraction', () => {
describe('Date-time vs time-only tag handling', () => {
it('should fall back to file timestamps when only time-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
// Exclude all date-time tags to force fallback to file timestamps
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
SonyDateTime2: undefined,
GPSDateStamp: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should prefer DateTimeOriginal over time-only tags', async () => {
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal, not TimeCreated
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
describe('GPSDateTime tag extraction', () => {
it('should extract GPSDateTime with GPS coordinates', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: 37.7749,
GPSLongitude: -122.4194,
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T12:30:00.000Z').getTime(),
);
});
});
describe('CreateDate tag extraction', () => {
it('should extract CreateDate when available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
CreateDate: '2023:11:15 10:30:00',
// Exclude other higher priority date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T10:30:00.000Z').getTime(),
);
});
});
describe('GPSDateStamp tag extraction', () => {
it('should fall back to file timestamps when only date-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
GPSLatitude: 51.5074,
GPSLongitude: -0.1278,
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
DateTimeOriginal: undefined,
CreateDate: undefined,
CreationDate: undefined,
GPSDateTime: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
});
/*
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
*
* NOT WRITABLE to JPEG:
* - MediaCreateDate: Can be read from video files but not written to JPEG
* - DateTimeCreated: Read-only tag in JPEG format
* - DateTimeUTC: Cannot be written to JPEG files
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
* - SubSecMediaCreateDate: Tag not defined for JPEG format
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
*
* WRITABLE but NOT READABLE from JPEG:
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
* - SubSecCreateDate: Can be written but not read back from JPEG
*
* EFFECTIVELY TESTABLE TAGS (writable and readable):
* - DateTimeOriginal ✓
* - CreateDate ✓
* - CreationDate ✓
* - GPSDateTime ✓
*
* The metadata service correctly handles non-readable tags and will fall back to
* file timestamps when only non-readable tags are present.
*/
describe('Date tag priority order', () => {
it('should respect the complete date tag priority order', async () => {
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
const testCases = [
{
name: 'DateTimeOriginal has highest priority among testable tags',
exifData: {
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-04-04T04:00:00.000Z',
},
{
name: 'CreationDate when DateTimeOriginal missing',
exifData: {
CreationDate: '2023:05:05 05:00:00', // TESTABLE
CreateDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-05-05T05:00:00.000Z',
},
{
name: 'CreationDate when standard EXIF tags missing',
exifData: {
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-07-07T07:00:00.000Z',
},
{
name: 'GPSDateTime when no other testable date tags present',
exifData: {
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
Make: 'SONY',
},
expectedDate: '2023-10-10T10:00:00.000Z',
},
];
for (const testCase of testCases) {
const { imageBytes, filename } = await createTestImageWithExif(
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
testCase.exifData,
);
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
expect(
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
`Date mismatch for: ${testCase.name}`,
).toBe(new Date(testCase.expectedDate).getTime());
}
});
});
describe('Edge cases for date tag handling', () => {
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
// Intentionally no GPSTimeStamp
// Exclude all other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should handle all testable date tags present to verify complete priority order', async () => {
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
// All TESTABLE date tags to JPEG format (writable AND readable)
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
// Note: Excluded non-testable tags:
// SubSec tags: writable but not readable from JPEG
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal as it has the highest priority among testable tags
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-04-04T04:00:00.000Z').getTime(),
);
});
it('should use CreationDate when SubSec tags are missing', async () => {
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
CreationDate: '2023:07:07 07:00:00', // WRITABLE
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
// Exclude SubSec and standard EXIF tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use CreationDate when available
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-07-07T07:00:00.000Z').getTime(),
);
});
it('should skip invalid date formats and use next valid tag', async () => {
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
// Note: Testing invalid date handling with only WRITABLE tags
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should skip invalid dates and use the first valid one (GPSDateTime)
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
});
});
describe('POST /assets/exist', () => { describe('POST /assets/exist', () => {
it('ignores invalid deviceAssetIds', async () => { it('ignores invalid deviceAssetIds', async () => {
const response = await utils.checkExistingAssets(user1.accessToken, { const response = await utils.checkExistingAssets(user1.accessToken, {

View File

@@ -1,4 +1,4 @@
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk'; import { LoginResponseDto, QueueCommand, QueueName, updateConfig } from '@immich/sdk';
import { cpSync, rmSync } from 'node:fs'; import { cpSync, rmSync } from 'node:fs';
import { readFile } from 'node:fs/promises'; import { readFile } from 'node:fs/promises';
import { basename } from 'node:path'; import { basename } from 'node:path';
@@ -17,28 +17,28 @@ describe('/jobs', () => {
describe('PUT /jobs', () => { describe('PUT /jobs', () => {
afterEach(async () => { afterEach(async () => {
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, { await utils.queueCommand(admin.accessToken, QueueName.FaceDetection, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, { await utils.queueCommand(admin.accessToken, QueueName.SmartSearch, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, { await utils.queueCommand(admin.accessToken, QueueName.DuplicateDetection, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
@@ -59,8 +59,8 @@ describe('/jobs', () => {
it('should queue metadata extraction for missing assets', async () => { it('should queue metadata extraction for missing assets', async () => {
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`; const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Pause, command: QueueCommand.Pause,
force: false, force: false,
}); });
@@ -77,20 +77,20 @@ describe('/jobs', () => {
expect(asset.exifInfo?.make).toBeNull(); expect(asset.exifInfo?.make).toBeNull();
} }
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Empty, command: QueueCommand.Empty,
force: false, force: false,
}); });
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Start, command: QueueCommand.Start,
force: false, force: false,
}); });
@@ -124,8 +124,8 @@ describe('/jobs', () => {
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path); cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, { await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
command: JobCommand.Start, command: QueueCommand.Start,
force: false, force: false,
}); });
@@ -144,8 +144,8 @@ describe('/jobs', () => {
it('should queue thumbnail extraction for assets missing thumbs', async () => { it('should queue thumbnail extraction for assets missing thumbs', async () => {
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`; const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Pause, command: QueueCommand.Pause,
force: false, force: false,
}); });
@@ -153,32 +153,32 @@ describe('/jobs', () => {
assetData: { bytes: await readFile(path), filename: basename(path) }, assetData: { bytes: await readFile(path), filename: basename(path) },
}); });
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction); await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration); await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
const assetBefore = await utils.getAssetInfo(admin.accessToken, id); const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
expect(assetBefore.thumbhash).toBeNull(); expect(assetBefore.thumbhash).toBeNull();
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Empty, command: QueueCommand.Empty,
force: false, force: false,
}); });
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction); await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration); await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Start, command: QueueCommand.Start,
force: false, force: false,
}); });
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction); await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration); await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
const assetAfter = await utils.getAssetInfo(admin.accessToken, id); const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
expect(assetAfter.thumbhash).not.toBeNull(); expect(assetAfter.thumbhash).not.toBeNull();
@@ -193,26 +193,26 @@ describe('/jobs', () => {
assetData: { bytes: await readFile(path), filename: basename(path) }, assetData: { bytes: await readFile(path), filename: basename(path) },
}); });
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction); await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration); await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
const assetBefore = await utils.getAssetInfo(admin.accessToken, id); const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path); cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Resume, command: QueueCommand.Resume,
force: false, force: false,
}); });
// This runs the missing thumbnail job // This runs the missing thumbnail job
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, { await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
command: JobCommand.Start, command: QueueCommand.Start,
force: false, force: false,
}); });
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction); await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration); await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
const assetAfter = await utils.getAssetInfo(admin.accessToken, id); const assetAfter = await utils.getAssetInfo(admin.accessToken, id);

View File

@@ -1,6 +1,6 @@
import { import {
JobName,
LoginResponseDto, LoginResponseDto,
QueueName,
createStack, createStack,
deleteUserAdmin, deleteUserAdmin,
getMyUser, getMyUser,
@@ -328,7 +328,7 @@ describe('/admin/users', () => {
{ headers: asBearerAuth(user.accessToken) }, { headers: asBearerAuth(user.accessToken) },
); );
await utils.waitForQueueFinish(admin.accessToken, JobName.BackgroundTask); await utils.waitForQueueFinish(admin.accessToken, QueueName.BackgroundTask);
const { status, body } = await request(app) const { status, body } = await request(app)
.delete(`/admin/users/${user.userId}`) .delete(`/admin/users/${user.userId}`)

View File

@@ -1,178 +0,0 @@
#!/usr/bin/env node
/**
* Script to generate test images with additional EXIF date tags
* This creates actual JPEG images with embedded metadata for testing
* Images are generated into e2e/test-assets/metadata/dates/
*/
import { execSync } from 'node:child_process';
import { writeFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import sharp from 'sharp';
interface TestImage {
filename: string;
description: string;
exifTags: Record<string, string>;
}
const testImages: TestImage[] = [
{
filename: 'time-created.jpg',
description: 'Image with TimeCreated tag',
exifTags: {
TimeCreated: '2023:11:15 14:30:00',
Make: 'Canon',
Model: 'EOS R5',
},
},
{
filename: 'gps-datetime.jpg',
description: 'Image with GPSDateTime and coordinates',
exifTags: {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: '37.7749',
GPSLongitude: '-122.4194',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'datetime-utc.jpg',
description: 'Image with DateTimeUTC tag',
exifTags: {
DateTimeUTC: '2023:11:15 10:30:00',
Make: 'Nikon',
Model: 'D850',
},
},
{
filename: 'gps-datestamp.jpg',
description: 'Image with GPSDateStamp and GPSTimeStamp',
exifTags: {
GPSDateStamp: '2023:11:15',
GPSTimeStamp: '08:30:00',
GPSLatitude: '51.5074',
GPSLongitude: '-0.1278',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'sony-datetime2.jpg',
description: 'Sony camera image with SonyDateTime2 tag',
exifTags: {
SonyDateTime2: '2023:11:15 06:30:00',
Make: 'SONY',
Model: 'ILCE-7RM5',
},
},
{
filename: 'date-priority-test.jpg',
description: 'Image with multiple date tags to test priority',
exifTags: {
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
DateTimeOriginal: '2023:02:02 02:00:00',
SubSecCreateDate: '2023:03:03 03:00:00',
CreateDate: '2023:04:04 04:00:00',
CreationDate: '2023:05:05 05:00:00',
DateTimeCreated: '2023:06:06 06:00:00',
TimeCreated: '2023:07:07 07:00:00',
GPSDateTime: '2023:08:08 08:00:00',
DateTimeUTC: '2023:09:09 09:00:00',
GPSDateStamp: '2023:10:10',
SonyDateTime2: '2023:11:11 11:00:00',
},
},
{
filename: 'new-tags-only.jpg',
description: 'Image with only additional date tags (no standard tags)',
exifTags: {
TimeCreated: '2023:12:01 15:45:30',
GPSDateTime: '2023:12:01 13:45:30Z',
DateTimeUTC: '2023:12:01 13:45:30',
GPSDateStamp: '2023:12:01',
SonyDateTime2: '2023:12:01 08:45:30',
GPSLatitude: '40.7128',
GPSLongitude: '-74.0060',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
];
const generateTestImages = async (): Promise<void> => {
// Target directory: e2e/test-assets/metadata/dates/
// Current file is in: e2e/src/
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
console.log('Generating test images with additional EXIF date tags...');
console.log(`Target directory: ${targetDir}`);
for (const image of testImages) {
try {
const imagePath = join(targetDir, image.filename);
// Create unique JPEG file using Sharp
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
const jpegData = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
writeFileSync(imagePath, jpegData);
// Build exiftool command to add EXIF data
const exifArgs = Object.entries(image.exifTags)
.map(([tag, value]) => `-${tag}="${value}"`)
.join(' ');
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
console.log(`Creating ${image.filename}: ${image.description}`);
execSync(command, { stdio: 'pipe' });
// Verify the tags were written
const verifyCommand = `exiftool -json "${imagePath}"`;
const result = execSync(verifyCommand, { encoding: 'utf8' });
const metadata = JSON.parse(result)[0];
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
// Log first date tag found for verification
const firstDateTag = Object.keys(image.exifTags).find(
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
);
if (firstDateTag && metadata[firstDateTag]) {
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
}
} catch (error) {
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
}
}
console.log('\nTest image generation complete!');
console.log('Files created in:', targetDir);
console.log('\nTo test these images:');
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
};
export { generateTestImages };
// Run the generator if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateTestImages().catch(console.error);
}

View File

@@ -1,5 +1,4 @@
import { import {
AllJobStatusResponseDto,
AssetMediaCreateDto, AssetMediaCreateDto,
AssetMediaResponseDto, AssetMediaResponseDto,
AssetResponseDto, AssetResponseDto,
@@ -7,11 +6,12 @@ import {
CheckExistingAssetsDto, CheckExistingAssetsDto,
CreateAlbumDto, CreateAlbumDto,
CreateLibraryDto, CreateLibraryDto,
JobCommandDto,
JobName,
MetadataSearchDto, MetadataSearchDto,
Permission, Permission,
PersonCreateDto, PersonCreateDto,
QueueCommandDto,
QueueName,
QueuesResponseDto,
SharedLinkCreateDto, SharedLinkCreateDto,
UpdateLibraryDto, UpdateLibraryDto,
UserAdminCreateDto, UserAdminCreateDto,
@@ -27,14 +27,14 @@ import {
createStack, createStack,
createUserAdmin, createUserAdmin,
deleteAssets, deleteAssets,
getAllJobsStatus,
getAssetInfo, getAssetInfo,
getConfig, getConfig,
getConfigDefaults, getConfigDefaults,
getQueuesLegacy,
login, login,
runQueueCommandLegacy,
scanLibrary, scanLibrary,
searchAssets, searchAssets,
sendJobCommand,
setBaseUrl, setBaseUrl,
signUpAdmin, signUpAdmin,
tagAssets, tagAssets,
@@ -477,8 +477,8 @@ export const utils = {
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) => tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }), tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
jobCommand: async (accessToken: string, jobName: JobName, jobCommandDto: JobCommandDto) => queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
sendJobCommand({ id: jobName, jobCommandDto }, { headers: asBearerAuth(accessToken) }), runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
setAuthCookies: async (context: BrowserContext, accessToken: string, domain = '127.0.0.1') => setAuthCookies: async (context: BrowserContext, accessToken: string, domain = '127.0.0.1') =>
await context.addCookies([ await context.addCookies([
@@ -524,13 +524,13 @@ export const utils = {
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) }); await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
}, },
isQueueEmpty: async (accessToken: string, queue: keyof AllJobStatusResponseDto) => { isQueueEmpty: async (accessToken: string, queue: keyof QueuesResponseDto) => {
const queues = await getAllJobsStatus({ headers: asBearerAuth(accessToken) }); const queues = await getQueuesLegacy({ headers: asBearerAuth(accessToken) });
const jobCounts = queues[queue].jobCounts; const jobCounts = queues[queue].jobCounts;
return !jobCounts.active && !jobCounts.waiting; return !jobCounts.active && !jobCounts.waiting;
}, },
waitForQueueFinish: (accessToken: string, queue: keyof AllJobStatusResponseDto, ms?: number) => { waitForQueueFinish: (accessToken: string, queue: keyof QueuesResponseDto, ms?: number) => {
// eslint-disable-next-line no-async-promise-executor // eslint-disable-next-line no-async-promise-executor
return new Promise<void>(async (resolve, reject) => { return new Promise<void>(async (resolve, reject) => {
const timeout = setTimeout(() => reject(new Error('Timed out waiting for queue to empty')), ms || 10_000); const timeout = setTimeout(() => reject(new Error('Timed out waiting for queue to empty')), ms || 10_000);

View File

@@ -52,7 +52,7 @@ test.describe('User Administration', () => {
await page.goto(`/admin/users/${user.userId}`); await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit user' }).click(); await page.getByRole('button', { name: 'Edit' }).click();
await expect(page.getByLabel('Admin User')).not.toBeChecked(); await expect(page.getByLabel('Admin User')).not.toBeChecked();
await page.getByText('Admin User').click(); await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).toBeChecked(); await expect(page.getByLabel('Admin User')).toBeChecked();
@@ -77,7 +77,7 @@ test.describe('User Administration', () => {
await page.goto(`/admin/users/${user.userId}`); await page.goto(`/admin/users/${user.userId}`);
await page.getByRole('button', { name: 'Edit user' }).click(); await page.getByRole('button', { name: 'Edit' }).click();
await expect(page.getByLabel('Admin User')).toBeChecked(); await expect(page.getByLabel('Admin User')).toBeChecked();
await page.getByText('Admin User').click(); await page.getByText('Admin User').click();
await expect(page.getByLabel('Admin User')).not.toBeChecked(); await expect(page.getByLabel('Admin User')).not.toBeChecked();

View File

@@ -32,6 +32,7 @@
"add_to_album_toggle": "Toggle selection for {album}", "add_to_album_toggle": "Toggle selection for {album}",
"add_to_albums": "Add to albums", "add_to_albums": "Add to albums",
"add_to_albums_count": "Add to albums ({count})", "add_to_albums_count": "Add to albums ({count})",
"add_to_bottom_bar": "Add to",
"add_to_shared_album": "Add to shared album", "add_to_shared_album": "Add to shared album",
"add_upload_to_stack": "Add upload to stack", "add_upload_to_stack": "Add upload to stack",
"add_url": "Add URL", "add_url": "Add URL",
@@ -430,6 +431,7 @@
"age_months": "Age {months, plural, one {# month} other {# months}}", "age_months": "Age {months, plural, one {# month} other {# months}}",
"age_year_months": "Age 1 year, {months, plural, one {# month} other {# months}}", "age_year_months": "Age 1 year, {months, plural, one {# month} other {# months}}",
"age_years": "{years, plural, other {Age #}}", "age_years": "{years, plural, other {Age #}}",
"album": "Album",
"album_added": "Album added", "album_added": "Album added",
"album_added_notification_setting_description": "Receive an email notification when you are added to a shared album", "album_added_notification_setting_description": "Receive an email notification when you are added to a shared album",
"album_cover_updated": "Album cover updated", "album_cover_updated": "Album cover updated",
@@ -475,6 +477,7 @@
"allow_edits": "Allow edits", "allow_edits": "Allow edits",
"allow_public_user_to_download": "Allow public user to download", "allow_public_user_to_download": "Allow public user to download",
"allow_public_user_to_upload": "Allow public user to upload", "allow_public_user_to_upload": "Allow public user to upload",
"allowed": "Allowed",
"alt_text_qr_code": "QR code image", "alt_text_qr_code": "QR code image",
"anti_clockwise": "Anti-clockwise", "anti_clockwise": "Anti-clockwise",
"api_key": "API Key", "api_key": "API Key",
@@ -1196,6 +1199,8 @@
"import_path": "Import path", "import_path": "Import path",
"in_albums": "In {count, plural, one {# album} other {# albums}}", "in_albums": "In {count, plural, one {# album} other {# albums}}",
"in_archive": "In archive", "in_archive": "In archive",
"in_year": "In {year}",
"in_year_selector": "In",
"include_archived": "Include archived", "include_archived": "Include archived",
"include_shared_albums": "Include shared albums", "include_shared_albums": "Include shared albums",
"include_shared_partner_assets": "Include shared partner assets", "include_shared_partner_assets": "Include shared partner assets",
@@ -1232,6 +1237,7 @@
"language_setting_description": "Select your preferred language", "language_setting_description": "Select your preferred language",
"large_files": "Large Files", "large_files": "Large Files",
"last": "Last", "last": "Last",
"last_months": "{count, plural, one {Last month} other {Last # months}}",
"last_seen": "Last seen", "last_seen": "Last seen",
"latest_version": "Latest Version", "latest_version": "Latest Version",
"latitude": "Latitude", "latitude": "Latitude",
@@ -1314,6 +1320,10 @@
"main_menu": "Main menu", "main_menu": "Main menu",
"make": "Make", "make": "Make",
"manage_geolocation": "Manage location", "manage_geolocation": "Manage location",
"manage_media_access_rationale": "This permission is required for proper handling of moving assets to the trash and restoring them from it.",
"manage_media_access_settings": "Open settings",
"manage_media_access_subtitle": "Allow the Immich app to manage and move media files.",
"manage_media_access_title": "Media Management Access",
"manage_shared_links": "Manage shared links", "manage_shared_links": "Manage shared links",
"manage_sharing_with_partners": "Manage sharing with partners", "manage_sharing_with_partners": "Manage sharing with partners",
"manage_the_app_settings": "Manage the app settings", "manage_the_app_settings": "Manage the app settings",
@@ -1377,6 +1387,7 @@
"more": "More", "more": "More",
"move": "Move", "move": "Move",
"move_off_locked_folder": "Move out of locked folder", "move_off_locked_folder": "Move out of locked folder",
"move_to": "Move to",
"move_to_lock_folder_action_prompt": "{count} added to the locked folder", "move_to_lock_folder_action_prompt": "{count} added to the locked folder",
"move_to_locked_folder": "Move to locked folder", "move_to_locked_folder": "Move to locked folder",
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder", "move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
@@ -1406,6 +1417,7 @@
"new_pin_code": "New PIN code", "new_pin_code": "New PIN code",
"new_pin_code_subtitle": "This is your first time accessing the locked folder. Create a PIN code to securely access this page", "new_pin_code_subtitle": "This is your first time accessing the locked folder. Create a PIN code to securely access this page",
"new_timeline": "New Timeline", "new_timeline": "New Timeline",
"new_update": "New update",
"new_user_created": "New user created", "new_user_created": "New user created",
"new_version_available": "NEW VERSION AVAILABLE", "new_version_available": "NEW VERSION AVAILABLE",
"newest_first": "Newest first", "newest_first": "Newest first",
@@ -1421,6 +1433,7 @@
"no_cast_devices_found": "No cast devices found", "no_cast_devices_found": "No cast devices found",
"no_checksum_local": "No checksum available - cannot fetch local assets", "no_checksum_local": "No checksum available - cannot fetch local assets",
"no_checksum_remote": "No checksum available - cannot fetch remote asset", "no_checksum_remote": "No checksum available - cannot fetch remote asset",
"no_devices": "No authorized devices",
"no_duplicates_found": "No duplicates were found.", "no_duplicates_found": "No duplicates were found.",
"no_exif_info_available": "No exif info available", "no_exif_info_available": "No exif info available",
"no_explore_results_message": "Upload more photos to explore your collection.", "no_explore_results_message": "Upload more photos to explore your collection.",
@@ -1437,6 +1450,7 @@
"no_results_description": "Try a synonym or more general keyword", "no_results_description": "Try a synonym or more general keyword",
"no_shared_albums_message": "Create an album to share photos and videos with people in your network", "no_shared_albums_message": "Create an album to share photos and videos with people in your network",
"no_uploads_in_progress": "No uploads in progress", "no_uploads_in_progress": "No uploads in progress",
"not_allowed": "Not allowed",
"not_available": "N/A", "not_available": "N/A",
"not_in_any_album": "Not in any album", "not_in_any_album": "Not in any album",
"not_selected": "Not selected", "not_selected": "Not selected",
@@ -1547,6 +1561,8 @@
"photos_count": "{count, plural, one {{count, number} Photo} other {{count, number} Photos}}", "photos_count": "{count, plural, one {{count, number} Photo} other {{count, number} Photos}}",
"photos_from_previous_years": "Photos from previous years", "photos_from_previous_years": "Photos from previous years",
"pick_a_location": "Pick a location", "pick_a_location": "Pick a location",
"pick_custom_range": "Custom range",
"pick_date_range": "Select a date range",
"pin_code_changed_successfully": "Successfully changed PIN code", "pin_code_changed_successfully": "Successfully changed PIN code",
"pin_code_reset_successfully": "Successfully reset PIN code", "pin_code_reset_successfully": "Successfully reset PIN code",
"pin_code_setup_successfully": "Successfully setup a PIN code", "pin_code_setup_successfully": "Successfully setup a PIN code",
@@ -2029,6 +2045,7 @@
"third_party_resources": "Third-Party Resources", "third_party_resources": "Third-Party Resources",
"time": "Time", "time": "Time",
"time_based_memories": "Time-based memories", "time_based_memories": "Time-based memories",
"time_based_memories_duration": "Number of seconds to display each image.",
"timeline": "Timeline", "timeline": "Timeline",
"timezone": "Timezone", "timezone": "Timezone",
"to_archive": "Archive", "to_archive": "Archive",
@@ -2169,6 +2186,7 @@
"welcome": "Welcome", "welcome": "Welcome",
"welcome_to_immich": "Welcome to Immich", "welcome_to_immich": "Welcome to Immich",
"wifi_name": "Wi-Fi Name", "wifi_name": "Wi-Fi Name",
"workflow": "Workflow",
"wrong_pin_code": "Wrong PIN code", "wrong_pin_code": "Wrong PIN code",
"year": "Year", "year": "Year",
"years_ago": "{years, plural, one {# year} other {# years}} ago", "years_ago": "{years, plural, one {# year} other {# years}} ago",

View File

@@ -13,6 +13,8 @@ from rich.logging import RichHandler
from uvicorn import Server from uvicorn import Server
from uvicorn.workers import UvicornWorker from uvicorn.workers import UvicornWorker
from .schemas import ModelPrecision
class ClipSettings(BaseModel): class ClipSettings(BaseModel):
textual: str | None = None textual: str | None = None
@@ -24,6 +26,11 @@ class FacialRecognitionSettings(BaseModel):
detection: str | None = None detection: str | None = None
class OcrSettings(BaseModel):
recognition: str | None = None
detection: str | None = None
class PreloadModelData(BaseModel): class PreloadModelData(BaseModel):
clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None) clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None)
facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None) facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None)
@@ -37,6 +44,7 @@ class PreloadModelData(BaseModel):
del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"] del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"]
clip: ClipSettings = ClipSettings() clip: ClipSettings = ClipSettings()
facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings() facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings()
ocr: OcrSettings = OcrSettings()
class MaxBatchSize(BaseModel): class MaxBatchSize(BaseModel):
@@ -70,6 +78,7 @@ class Settings(BaseSettings):
rknn_threads: int = 1 rknn_threads: int = 1
preload: PreloadModelData | None = None preload: PreloadModelData | None = None
max_batch_size: MaxBatchSize | None = None max_batch_size: MaxBatchSize | None = None
openvino_precision: ModelPrecision = ModelPrecision.FP32
@property @property
def device_id(self) -> str: def device_id(self) -> str:

View File

@@ -103,6 +103,20 @@ async def preload_models(preload: PreloadModelData) -> None:
ModelTask.FACIAL_RECOGNITION, ModelTask.FACIAL_RECOGNITION,
) )
if preload.ocr.detection is not None:
await load_models(
preload.ocr.detection,
ModelType.DETECTION,
ModelTask.OCR,
)
if preload.ocr.recognition is not None:
await load_models(
preload.ocr.recognition,
ModelType.RECOGNITION,
ModelTask.OCR,
)
if preload.clip_fallback is not None: if preload.clip_fallback is not None:
log.warning( log.warning(
"Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. " "Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. "

View File

@@ -78,6 +78,14 @@ _INSIGHTFACE_MODELS = {
_PADDLE_MODELS = { _PADDLE_MODELS = {
"PP-OCRv5_server", "PP-OCRv5_server",
"PP-OCRv5_mobile", "PP-OCRv5_mobile",
"CH__PP-OCRv5_server",
"CH__PP-OCRv5_mobile",
"EL__PP-OCRv5_mobile",
"EN__PP-OCRv5_mobile",
"ESLAV__PP-OCRv5_mobile",
"KOREAN__PP-OCRv5_mobile",
"LATIN__PP-OCRv5_mobile",
"TH__PP-OCRv5_mobile",
} }
SUPPORTED_PROVIDERS = [ SUPPORTED_PROVIDERS = [

View File

@@ -6,7 +6,7 @@ from numpy.typing import NDArray
from PIL import Image from PIL import Image
from rapidocr.ch_ppocr_det.utils import DBPostProcess from rapidocr.ch_ppocr_det.utils import DBPostProcess
from rapidocr.inference_engine.base import FileInfo, InferSession from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType from rapidocr.utils.typings import ModelType as RapidModelType
@@ -23,7 +23,7 @@ class TextDetector(InferenceModel):
identity = (ModelType.DETECTION, ModelTask.OCR) identity = (ModelType.DETECTION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None: def __init__(self, model_name: str, **model_kwargs: Any) -> None:
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX) super().__init__(model_name.split("__")[-1], **model_kwargs, model_format=ModelFormat.ONNX)
self.max_resolution = 736 self.max_resolution = 736
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32) self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0) self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)

View File

@@ -6,7 +6,7 @@ from PIL import Image
from rapidocr.ch_ppocr_rec import TextRecInput from rapidocr.ch_ppocr_rec import TextRecInput
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
from rapidocr.inference_engine.base import FileInfo, InferSession from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangRec, OCRVersion, TaskType from rapidocr.utils.typings import EngineType, LangRec, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType from rapidocr.utils.typings import ModelType as RapidModelType
from rapidocr.utils.vis_res import VisRes from rapidocr.utils.vis_res import VisRes
@@ -25,6 +25,7 @@ class TextRecognizer(InferenceModel):
identity = (ModelType.RECOGNITION, ModelTask.OCR) identity = (ModelType.RECOGNITION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None: def __init__(self, model_name: str, **model_kwargs: Any) -> None:
self.language = LangRec[model_name.split("__")[0]] if "__" in model_name else LangRec.CH
self.min_score = model_kwargs.get("minScore", 0.9) self.min_score = model_kwargs.get("minScore", 0.9)
self._empty: TextRecognitionOutput = { self._empty: TextRecognitionOutput = {
"box": np.empty(0, dtype=np.float32), "box": np.empty(0, dtype=np.float32),
@@ -41,7 +42,7 @@ class TextRecognizer(InferenceModel):
engine_type=EngineType.ONNXRUNTIME, engine_type=EngineType.ONNXRUNTIME,
ocr_version=OCRVersion.PPOCRV5, ocr_version=OCRVersion.PPOCRV5,
task_type=TaskType.REC, task_type=TaskType.REC,
lang_type=LangRec.CH, lang_type=self.language,
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER, model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
) )
) )
@@ -61,6 +62,7 @@ class TextRecognizer(InferenceModel):
session=session.session, session=session.session,
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6, rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
rec_img_shape=(3, 48, 320), rec_img_shape=(3, 48, 320),
lang_type=self.language,
) )
) )
return session return session

View File

@@ -20,8 +20,8 @@ class TextRecognitionOutput(TypedDict):
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes # RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
class OcrOptions(dict[str, Any]): class OcrOptions(dict[str, Any]):
def __init__(self, **options: Any) -> None: def __init__(self, lang_type: LangRec | None = None, **options: Any) -> None:
super().__init__(**options) super().__init__(**options)
self.engine_type = EngineType.ONNXRUNTIME self.engine_type = EngineType.ONNXRUNTIME
self.lang_type = LangRec.CH self.lang_type = lang_type
self.font_path = None self.font_path = None

View File

@@ -46,6 +46,11 @@ class ModelSource(StrEnum):
PADDLE = "paddle" PADDLE = "paddle"
class ModelPrecision(StrEnum):
FP16 = "FP16"
FP32 = "FP32"
ModelIdentity = tuple[ModelType, ModelTask] ModelIdentity = tuple[ModelType, ModelTask]

View File

@@ -93,10 +93,12 @@ class OrtSession:
case "CUDAExecutionProvider" | "ROCMExecutionProvider": case "CUDAExecutionProvider" | "ROCMExecutionProvider":
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id} options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
case "OpenVINOExecutionProvider": case "OpenVINOExecutionProvider":
openvino_dir = self.model_path.parent / "openvino"
device = f"GPU.{settings.device_id}"
options = { options = {
"device_type": f"GPU.{settings.device_id}", "device_type": device,
"precision": "FP32", "precision": settings.openvino_precision.value,
"cache_dir": (self.model_path.parent / "openvino").as_posix(), "cache_dir": openvino_dir.as_posix(),
} }
case "CoreMLExecutionProvider": case "CoreMLExecutionProvider":
options = { options = {

View File

@@ -1,6 +1,6 @@
[project] [project]
name = "immich-ml" name = "immich-ml"
version = "2.2.2" version = "2.2.3"
description = "" description = ""
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }] authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
requires-python = ">=3.10,<4.0" requires-python = ">=3.10,<4.0"

View File

@@ -26,7 +26,7 @@ from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEn
from immich_ml.models.clip.visual import OpenClipVisualEncoder from immich_ml.models.clip.visual import OpenClipVisualEncoder
from immich_ml.models.facial_recognition.detection import FaceDetector from immich_ml.models.facial_recognition.detection import FaceDetector
from immich_ml.models.facial_recognition.recognition import FaceRecognizer from immich_ml.models.facial_recognition.recognition import FaceRecognizer
from immich_ml.schemas import ModelFormat, ModelTask, ModelType from immich_ml.schemas import ModelFormat, ModelPrecision, ModelTask, ModelType
from immich_ml.sessions.ann import AnnSession from immich_ml.sessions.ann import AnnSession
from immich_ml.sessions.ort import OrtSession from immich_ml.sessions.ort import OrtSession
from immich_ml.sessions.rknn import RknnSession, run_inference from immich_ml.sessions.rknn import RknnSession, run_inference
@@ -240,11 +240,16 @@ class TestOrtSession:
@pytest.mark.ov_device_ids(["GPU.0", "CPU"]) @pytest.mark.ov_device_ids(["GPU.0", "CPU"])
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None: def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
model_path = "/cache/ViT-B-32__openai/model.onnx" model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"]) session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
assert session.provider_options == [ assert session.provider_options == [
{"device_type": "GPU.0", "precision": "FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"}, {
"device_type": "GPU.0",
"precision": "FP32",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
},
{"arena_extend_strategy": "kSameAsRequested"}, {"arena_extend_strategy": "kSameAsRequested"},
] ]
@@ -262,6 +267,21 @@ class TestOrtSession:
} }
] ]
def test_sets_openvino_to_fp16_if_enabled(self, mocker: MockerFixture) -> None:
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
mocker.patch.object(settings, "openvino_precision", ModelPrecision.FP16)
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider"])
assert session.provider_options == [
{
"device_type": "GPU.1",
"precision": "FP16",
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
}
]
def test_sets_provider_options_for_cuda(self) -> None: def test_sets_provider_options_for_cuda(self) -> None:
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1" os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
@@ -417,7 +437,7 @@ class TestRknnSession:
session.run(None, input_feed) session.run(None, input_feed)
rknn_session.return_value.put.assert_called_once_with([input1, input2]) rknn_session.return_value.put.assert_called_once_with([input1, input2])
np_spy.call_count == 2 assert np_spy.call_count == 2
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)]) np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
@@ -925,11 +945,34 @@ class TestCache:
any_order=True, any_order=True,
) )
async def test_preloads_ocr_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings()
assert settings.preload is not None
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
await preload_models(settings.preload)
mock_get_model.assert_has_calls(
[
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
],
any_order=True,
)
async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None: async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai" os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai" os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s" os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s" os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
settings = Settings() settings = Settings()
assert settings.preload is not None assert settings.preload is not None
@@ -937,6 +980,8 @@ class TestCache:
assert settings.preload.clip.textual == "ViT-B-32__openai" assert settings.preload.clip.textual == "ViT-B-32__openai"
assert settings.preload.facial_recognition.recognition == "buffalo_s" assert settings.preload.facial_recognition.recognition == "buffalo_s"
assert settings.preload.facial_recognition.detection == "buffalo_s" assert settings.preload.facial_recognition.detection == "buffalo_s"
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
model_cache = ModelCache() model_cache = ModelCache()
monkeypatch.setattr("immich_ml.main.model_cache", model_cache) monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
@@ -948,6 +993,8 @@ class TestCache:
mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH), mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH),
mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION), mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION),
mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION), mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION),
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
], ],
any_order=True, any_order=True,
) )

View File

@@ -3,12 +3,12 @@
# #
# Pump one or both of the server/mobile versions in appropriate files # Pump one or both of the server/mobile versions in appropriate files
# #
# usage: './scripts/pump-version.sh -s <major|minor|patch> <-m> # usage: './scripts/pump-version.sh -s <major|minor|patch> <-m> <true|false>
# #
# examples: # examples:
# ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50 # ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50
# ./scripts/pump-version.sh -s minor -m # 1.0.0+50 => 1.1.0+51 # ./scripts/pump-version.sh -s minor -m true # 1.0.0+50 => 1.1.0+51
# ./scripts/pump-version.sh -m # 1.0.0+50 => 1.0.0+51 # ./scripts/pump-version.sh -m true # 1.0.0+50 => 1.0.0+51
# #
SERVER_PUMP="false" SERVER_PUMP="false"
@@ -88,7 +88,6 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
fi fi
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml

515
mise.toml
View File

@@ -1,7 +1,9 @@
experimental_monorepo_root = true
[tools] [tools]
node = "24.11.0" node = "24.11.0"
flutter = "3.35.7" flutter = "3.35.7"
pnpm = "10.19.0" pnpm = "10.20.0"
terragrunt = "0.91.2" terragrunt = "0.91.2"
opentofu = "1.10.6" opentofu = "1.10.6"
@@ -14,514 +16,21 @@ postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
experimental = true experimental = true
pin = true pin = true
# .github # SDK tasks
[tasks."github:install"]
run = "pnpm install --filter github --frozen-lockfile"
[tasks."github:format"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --check ."
[tasks."github:format-fix"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --write ."
# @immich/cli
[tasks."cli:install"]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks."cli:build"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite build"
[tasks."cli:test"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite"
[tasks."cli:lint"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."cli:lint-fix"]
run = "mise run cli:lint --fix"
[tasks."cli:format"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --check ."
[tasks."cli:format-fix"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --write ."
[tasks."cli:check"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "tsc --noEmit"
# @immich/sdk
[tasks."sdk:install"] [tasks."sdk:install"]
dir = "open-api/typescript-sdk"
run = "pnpm install --filter @immich/sdk --frozen-lockfile" run = "pnpm install --filter @immich/sdk --frozen-lockfile"
[tasks."sdk:build"] [tasks."sdk:build"]
env._.path = "./open-api/typescript-sdk/node_modules/.bin" dir = "open-api/typescript-sdk"
dir = "./open-api/typescript-sdk" env._.path = "./node_modules/.bin"
run = "tsc" run = "tsc"
# docs # i18n tasks
[tasks."docs:install"]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks."docs:start"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus --port 3005"
[tasks."docs:build"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks."docs:preview"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus serve"
[tasks."docs:format"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --check ."
[tasks."docs:format-fix"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --write ."
# e2e
[tasks."e2e:install"]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks."e2e:test"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "vitest --run"
[tasks."e2e:test-web"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "playwright test"
[tasks."e2e:format"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --check ."
[tasks."e2e:format-fix"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --write ."
[tasks."e2e:lint"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."e2e:lint-fix"]
run = "mise run e2e:lint --fix"
[tasks."e2e:check"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "tsc --noEmit"
# i18n
[tasks."i18n:format"] [tasks."i18n:format"]
run = "mise run i18n:format-fix" dir = "i18n"
run = { task = ":i18n:format-fix" }
[tasks."i18n:format-fix"] [tasks."i18n:format-fix"]
run = "pnpm dlx sort-json ./i18n/*.json" dir = "i18n"
run = "pnpm dlx sort-json *.json"
# server
[tasks."server:install"]
run = "pnpm install --filter immich --frozen-lockfile"
[tasks."server:build"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "nest build"
[tasks."server:test"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.mjs"
[tasks."server:test-medium"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.medium.mjs"
[tasks."server:format"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --check ."
[tasks."server:format-fix"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --write ."
[tasks."server:lint"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
[tasks."server:lint-fix"]
run = "mise run server:lint --fix"
[tasks."server:check"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "tsc --noEmit"
[tasks."server:sql"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:open-api"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:migrations"]
dir = "server"
run = "node ./dist/bin/migrations.js"
description = "Run database migration commands (create, generate, run, debug, or query)"
[tasks."server:schema-drop"]
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
[tasks."server:schema-reset"]
run = "mise run server:schema-drop && mise run server:migrations run"
[tasks."server:email-dev"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "email dev -p 3050 --dir src/emails"
[tasks."server:checklist"]
run = [
"mise run server:install",
"mise run server:format",
"mise run server:lint",
"mise run server:check",
"mise run server:test-medium --run",
"mise run server:test --run",
]
# web
[tasks."web:install"]
run = "pnpm install --filter immich-web --frozen-lockfile"
[tasks."web:svelte-kit-sync"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "svelte-kit sync"
[tasks."web:build"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:build-stats"]
env.BUILD_STATS = "true"
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:preview"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite preview"
[tasks."web:start"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vite dev --host 0.0.0.0 --port 3000"
[tasks."web:test"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vitest"
[tasks."web:format"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --check ."
[tasks."web:format-fix"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --write ."
[tasks."web:lint"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint . --max-warnings 0 --concurrency 4"
[tasks."web:lint-fix"]
run = "mise run web:lint --fix"
[tasks."web:check"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "tsc --noEmit"
[tasks."web:check-svelte"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "svelte-check --no-tsconfig --fail-on-warnings"
[tasks."web:checklist"]
run = [
"mise run web:install",
"mise run web:format",
"mise run web:check",
"mise run web:test --run",
"mise run web:lint",
]
# mobile
[tasks."mobile:codegen:dart"]
alias = "mobile:codegen"
description = "Execute build_runner to auto-generate dart code"
dir = "mobile"
sources = [
"pubspec.yaml",
"build.yaml",
"lib/**/*.dart",
"infrastructure/**/*.drift",
]
outputs = { auto = true }
run = "dart run build_runner build --delete-conflicting-outputs"
[tasks."mobile:codegen:pigeon"]
alias = "mobile:pigeon"
description = "Generate pigeon platform code"
dir = "mobile"
depends = [
"mobile:pigeon:native-sync",
"mobile:pigeon:thumbnail",
"mobile:pigeon:background-worker",
"mobile:pigeon:background-worker-lock",
"mobile:pigeon:connectivity",
]
[tasks."mobile:codegen:translation"]
alias = "mobile:translation"
description = "Generate translations from i18n JSONs"
dir = "mobile"
run = [
{ task = "i18n:format-fix" },
{ tasks = [
"mobile:i18n:loader",
"mobile:i18n:keys",
] },
]
[tasks."mobile:codegen:app-icon"]
description = "Generate app icons"
dir = "mobile"
run = "flutter pub run flutter_launcher_icons:main"
[tasks."mobile:codegen:splash"]
description = "Generate splash screen"
dir = "mobile"
run = "flutter pub run flutter_native_splash:create"
[tasks."mobile:test"]
description = "Run mobile tests"
dir = "mobile"
run = "flutter test"
[tasks."mobile:lint"]
description = "Analyze Dart code"
dir = "mobile"
depends = ["mobile:analyze:dart", "mobile:analyze:dcm"]
[tasks."mobile:lint-fix"]
description = "Auto-fix Dart code"
dir = "mobile"
depends = ["mobile:analyze:fix:dart", "mobile:analyze:fix:dcm"]
[tasks."mobile:format"]
description = "Format Dart code"
dir = "mobile"
run = "dart format --set-exit-if-changed $(find lib -name '*.dart' -not \\( -name '*.g.dart' -o -name '*.drift.dart' -o -name '*.gr.dart' \\))"
[tasks."mobile:build:android"]
description = "Build Android release"
dir = "mobile"
run = "flutter build appbundle"
[tasks."mobile:drift:migration"]
alias = "mobile:migration"
description = "Generate database migrations"
dir = "mobile"
run = "dart run drift_dev make-migrations"
# mobile internal tasks
[tasks."mobile:pigeon:native-sync"]
description = "Generate native sync API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/native_sync_api.dart"]
outputs = [
"lib/platform/native_sync_api.g.dart",
"ios/Runner/Sync/Messages.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/sync/Messages.g.kt",
]
run = [
"dart run pigeon --input pigeon/native_sync_api.dart",
"dart format lib/platform/native_sync_api.g.dart",
]
[tasks."mobile:pigeon:thumbnail"]
description = "Generate thumbnail API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/thumbnail_api.dart"]
outputs = [
"lib/platform/thumbnail_api.g.dart",
"ios/Runner/Images/Thumbnails.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/images/Thumbnails.g.kt",
]
run = [
"dart run pigeon --input pigeon/thumbnail_api.dart",
"dart format lib/platform/thumbnail_api.g.dart",
]
[tasks."mobile:pigeon:background-worker"]
description = "Generate background worker API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/background_worker_api.dart"]
outputs = [
"lib/platform/background_worker_api.g.dart",
"ios/Runner/Background/BackgroundWorker.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorker.g.kt",
]
run = [
"dart run pigeon --input pigeon/background_worker_api.dart",
"dart format lib/platform/background_worker_api.g.dart",
]
[tasks."mobile:pigeon:background-worker-lock"]
description = "Generate background worker lock API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/background_worker_lock_api.dart"]
outputs = [
"lib/platform/background_worker_lock_api.g.dart",
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorkerLock.g.kt",
]
run = [
"dart run pigeon --input pigeon/background_worker_lock_api.dart",
"dart format lib/platform/background_worker_lock_api.g.dart",
]
[tasks."mobile:pigeon:connectivity"]
description = "Generate connectivity API pigeon code"
dir = "mobile"
hide = true
sources = ["pigeon/connectivity_api.dart"]
outputs = [
"lib/platform/connectivity_api.g.dart",
"ios/Runner/Connectivity/Connectivity.g.swift",
"android/app/src/main/kotlin/app/alextran/immich/connectivity/Connectivity.g.kt",
]
run = [
"dart run pigeon --input pigeon/connectivity_api.dart",
"dart format lib/platform/connectivity_api.g.dart",
]
[tasks."mobile:i18n:loader"]
description = "Generate i18n loader"
dir = "mobile"
hide = true
sources = ["i18n/"]
outputs = "lib/generated/codegen_loader.g.dart"
run = [
"dart run easy_localization:generate -S ../i18n",
"dart format lib/generated/codegen_loader.g.dart",
]
[tasks."mobile:i18n:keys"]
description = "Generate i18n keys"
dir = "mobile"
hide = true
sources = ["i18n/en.json"]
outputs = "lib/generated/intl_keys.g.dart"
run = [
"dart run bin/generate_keys.dart",
"dart format lib/generated/intl_keys.g.dart",
]
[tasks."mobile:analyze:dart"]
description = "Run Dart analysis"
dir = "mobile"
hide = true
run = "dart analyze --fatal-infos"
[tasks."mobile:analyze:dcm"]
description = "Run Dart Code Metrics"
dir = "mobile"
hide = true
run = "dcm analyze lib --fatal-style --fatal-warnings"
[tasks."mobile:analyze:fix:dart"]
description = "Auto-fix Dart analysis"
dir = "mobile"
hide = true
run = "dart fix --apply"
[tasks."mobile:analyze:fix:dcm"]
description = "Auto-fix Dart Code Metrics"
dir = "mobile"
hide = true
run = "dcm fix lib"
# docs deployment
[tasks."tg:fmt"]
run = "terragrunt hclfmt"
description = "Format terragrunt files"
[tasks.tf]
run = "terragrunt run --all"
description = "Wrapper for terragrunt run-all"
dir = "{{cwd}}"
[tasks."tf:fmt"]
run = "tofu fmt -recursive tf/"
description = "Format terraform files"
[tasks."tf:init"]
run = "mise run tf init -- -reconfigure"
dir = "{{cwd}}"

View File

@@ -143,7 +143,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
val mediaUrls = call.argument<List<String>>("mediaUrls") val mediaUrls = call.argument<List<String>>("mediaUrls")
if (mediaUrls != null) { if (mediaUrls != null) {
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) { if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
moveToTrash(mediaUrls, result) moveToTrash(mediaUrls, result)
} else { } else {
result.error("PERMISSION_DENIED", "Media permission required", null) result.error("PERMISSION_DENIED", "Media permission required", null)
} }
@@ -155,15 +155,23 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
"restoreFromTrash" -> { "restoreFromTrash" -> {
val fileName = call.argument<String>("fileName") val fileName = call.argument<String>("fileName")
val type = call.argument<Int>("type") val type = call.argument<Int>("type")
val mediaId = call.argument<String>("mediaId")
if (fileName != null && type != null) { if (fileName != null && type != null) {
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) { if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
restoreFromTrash(fileName, type, result) restoreFromTrash(fileName, type, result)
} else { } else {
result.error("PERMISSION_DENIED", "Media permission required", null) result.error("PERMISSION_DENIED", "Media permission required", null)
} }
} else { } else
result.error("INVALID_NAME", "The file name is not specified.", null) if (mediaId != null && type != null) {
} if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
restoreFromTrashById(mediaId, type, result)
} else {
result.error("PERMISSION_DENIED", "Media permission required", null)
}
} else {
result.error("INVALID_PARAMS", "Required params are not specified.", null)
}
} }
"requestManageMediaPermission" -> { "requestManageMediaPermission" -> {
@@ -175,6 +183,17 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
} }
} }
"hasManageMediaPermission" -> {
if (hasManageMediaPermission()) {
Log.i("Manage storage permission", "Permission already granted")
result.success(true)
} else {
result.success(false)
}
}
"manageMediaPermission" -> requestManageMediaPermission(result)
else -> result.notImplemented() else -> result.notImplemented()
} }
} }
@@ -224,25 +243,47 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
} }
@RequiresApi(Build.VERSION_CODES.R) @RequiresApi(Build.VERSION_CODES.R)
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) { private fun restoreFromTrashById(mediaId: String, type: Int, result: Result) {
val activity = activityBinding?.activity val id = mediaId.toLongOrNull()
val contentResolver = context?.contentResolver if (id == null) {
if (activity == null || contentResolver == null) { result.error("INVALID_ID", "The file id is not a valid number: $mediaId", null)
result.error("TrashError", "Activity or ContentResolver not available", null) return
return }
} if (!isInTrash(id)) {
result.error("TrashNotFound", "Item with id=$id not found in trash", null)
return
}
try { val uri = ContentUris.withAppendedId(contentUriForType(type), id)
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
pendingResult = result // Store for onActivityResult try {
activity.startIntentSenderForResult( Log.i(TAG, "restoreFromTrashById: uri=$uri (type=$type,id=$id)")
pendingIntent.intentSender, restoreUris(listOf(uri), result)
trashRequestCode, } catch (e: Exception) {
null, 0, 0, 0 Log.w(TAG, "restoreFromTrashById failed", e)
) }
} catch (e: Exception) { }
Log.e("TrashError", "Error creating or starting trash request", e)
result.error("TrashError", "Error creating or starting trash request", null) @RequiresApi(Build.VERSION_CODES.R)
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
val activity = activityBinding?.activity
val contentResolver = context?.contentResolver
if (activity == null || contentResolver == null) {
result.error("TrashError", "Activity or ContentResolver not available", null)
return
}
try {
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
pendingResult = result // Store for onActivityResult
activity.startIntentSenderForResult(
pendingIntent.intentSender,
trashRequestCode,
null, 0, 0, 0
)
} catch (e: Exception) {
Log.e("TrashError", "Error creating or starting trash request", e)
result.error("TrashError", "Error creating or starting trash request", null)
} }
} }
@@ -264,14 +305,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
contentResolver.query(queryUri, projection, queryArgs, null)?.use { cursor -> contentResolver.query(queryUri, projection, queryArgs, null)?.use { cursor ->
if (cursor.moveToFirst()) { if (cursor.moveToFirst()) {
val id = cursor.getLong(cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID)) val id = cursor.getLong(cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID))
// same order as AssetType from dart return ContentUris.withAppendedId(contentUriForType(type), id)
val contentUri = when (type) {
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
else -> queryUri
}
return ContentUris.withAppendedId(contentUri, id)
} }
} }
return null return null
@@ -315,6 +349,40 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
} }
return false return false
} }
@RequiresApi(Build.VERSION_CODES.R)
private fun isInTrash(id: Long): Boolean {
val contentResolver = context?.contentResolver ?: return false
val filesUri = MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
val args = Bundle().apply {
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, "${MediaStore.Files.FileColumns._ID}=?")
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, arrayOf(id.toString()))
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
putInt(ContentResolver.QUERY_ARG_LIMIT, 1)
}
return contentResolver.query(filesUri, arrayOf(MediaStore.Files.FileColumns._ID), args, null)
?.use { it.moveToFirst() } == true
}
@RequiresApi(Build.VERSION_CODES.R)
private fun restoreUris(uris: List<Uri>, result: Result) {
if (uris.isEmpty()) {
result.error("TrashError", "No URIs to restore", null)
return
}
Log.i(TAG, "restoreUris: count=${uris.size}, first=${uris.first()}")
toggleTrash(uris, false, result)
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun contentUriForType(type: Int): Uri =
when (type) {
// same order as AssetType from dart
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
else -> MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
}
} }
private const val TAG = "BackgroundServicePlugin" private const val TAG = "BackgroundServicePlugin"

View File

@@ -305,6 +305,7 @@ interface NativeSyncApi {
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset> fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit) fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
fun cancelHashing() fun cancelHashing()
fun getTrashedAssets(): Map<String, List<PlatformAsset>>
companion object { companion object {
/** The codec used by NativeSyncApi. */ /** The codec used by NativeSyncApi. */
@@ -483,6 +484,21 @@ interface NativeSyncApi {
channel.setMessageHandler(null) channel.setMessageHandler(null)
} }
} }
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$separatedMessageChannelSuffix", codec, taskQueue)
if (api != null) {
channel.setMessageHandler { _, reply ->
val wrapped: List<Any?> = try {
listOf(api.getTrashedAssets())
} catch (exception: Throwable) {
MessagesPigeonUtils.wrapError(exception)
}
reply.reply(wrapped)
}
} else {
channel.setMessageHandler(null)
}
}
} }
} }
} }

View File

@@ -21,4 +21,9 @@ class NativeSyncApiImpl26(context: Context) : NativeSyncApiImplBase(context), Na
override fun getMediaChanges(): SyncDelta { override fun getMediaChanges(): SyncDelta {
throw IllegalStateException("Method not supported on this Android version.") throw IllegalStateException("Method not supported on this Android version.")
} }
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
//Method not supported on this Android version.
return emptyMap()
}
} }

View File

@@ -1,7 +1,9 @@
package app.alextran.immich.sync package app.alextran.immich.sync
import android.content.ContentResolver
import android.content.Context import android.content.Context
import android.os.Build import android.os.Build
import android.os.Bundle
import android.provider.MediaStore import android.provider.MediaStore
import androidx.annotation.RequiresApi import androidx.annotation.RequiresApi
import androidx.annotation.RequiresExtension import androidx.annotation.RequiresExtension
@@ -86,4 +88,29 @@ class NativeSyncApiImpl30(context: Context) : NativeSyncApiImplBase(context), Na
// Unmounted volumes are handled in dart when the album is removed // Unmounted volumes are handled in dart when the album is removed
return SyncDelta(hasChanges, changed, deleted, assetAlbums) return SyncDelta(hasChanges, changed, deleted, assetAlbums)
} }
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
val result = LinkedHashMap<String, MutableList<PlatformAsset>>()
val volumes = MediaStore.getExternalVolumeNames(ctx)
for (volume in volumes) {
val queryArgs = Bundle().apply {
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, MEDIA_SELECTION)
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, MEDIA_SELECTION_ARGS)
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
}
getCursor(volume, queryArgs).use { cursor ->
getAssets(cursor).forEach { res ->
if (res is AssetResult.ValidAsset) {
result.getOrPut(res.albumId) { mutableListOf() }.add(res.asset)
}
}
}
}
return result.mapValues { it.value.toList() }
}
} }

View File

@@ -4,6 +4,8 @@ import android.annotation.SuppressLint
import android.content.ContentUris import android.content.ContentUris
import android.content.Context import android.content.Context
import android.database.Cursor import android.database.Cursor
import android.net.Uri
import android.os.Bundle
import android.provider.MediaStore import android.provider.MediaStore
import android.util.Base64 import android.util.Base64
import androidx.core.database.getStringOrNull import androidx.core.database.getStringOrNull
@@ -81,6 +83,16 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
sortOrder, sortOrder,
) )
protected fun getCursor(
volume: String,
queryArgs: Bundle
): Cursor? = ctx.contentResolver.query(
MediaStore.Files.getContentUri(volume),
ASSET_PROJECTION,
queryArgs,
null
)
protected fun getAssets(cursor: Cursor?): Sequence<AssetResult> { protected fun getAssets(cursor: Cursor?): Sequence<AssetResult> {
return sequence { return sequence {
cursor?.use { c -> cursor?.use { c ->

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle', task: 'bundle',
build_type: 'Release', build_type: 'Release',
properties: { properties: {
"android.injected.version.code" => 3025, "android.injected.version.code" => 3026,
"android.injected.version.name" => "2.2.2", "android.injected.version.name" => "2.2.3",
} }
) )
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab') upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,7 @@
archiveVersion = 1; archiveVersion = 1;
classes = { classes = {
}; };
objectVersion = 77; objectVersion = 54;
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
@@ -32,6 +32,9 @@
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; }; FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; }; FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; }; FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
FEE084F82EC172460045228E /* SQLiteData in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084F72EC172460045228E /* SQLiteData */; };
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */; };
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FC2EC1725A0045228E /* StructuredFieldValues */; };
/* End PBXBuildFile section */ /* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */ /* Begin PBXContainerItemProxy section */
@@ -153,6 +156,13 @@
path = WidgetExtension; path = WidgetExtension;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
FEE084F22EC172080045228E /* Schemas */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
);
path = Schemas;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */ /* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */ /* Begin PBXFrameworksBuildPhase section */
@@ -160,6 +170,9 @@
isa = PBXFrameworksBuildPhase; isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
FEE084F82EC172460045228E /* SQLiteData in Frameworks */,
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */,
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */,
D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */, D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
@@ -254,6 +267,7 @@
97C146F01CF9000F007C117D /* Runner */ = { 97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
FEE084F22EC172080045228E /* Schemas */,
B231F52D2E93A44A00BC45D1 /* Core */, B231F52D2E93A44A00BC45D1 /* Core */,
B25D37792E72CA15008B6CA7 /* Connectivity */, B25D37792E72CA15008B6CA7 /* Connectivity */,
B21E34A62E5AF9760031FDB9 /* Background */, B21E34A62E5AF9760031FDB9 /* Background */,
@@ -341,6 +355,7 @@
fileSystemSynchronizedGroups = ( fileSystemSynchronizedGroups = (
B231F52D2E93A44A00BC45D1 /* Core */, B231F52D2E93A44A00BC45D1 /* Core */,
B2CF7F8C2DDE4EBB00744BF6 /* Sync */, B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
FEE084F22EC172080045228E /* Schemas */,
); );
name = Runner; name = Runner;
productName = Runner; productName = Runner;
@@ -419,6 +434,10 @@
Base, Base,
); );
mainGroup = 97C146E51CF9000F007C117D; mainGroup = 97C146E51CF9000F007C117D;
packageReferences = (
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */,
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */,
);
preferredProjectObjectVersion = 77; preferredProjectObjectVersion = 77;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */; productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = ""; projectDirPath = "";
@@ -1201,6 +1220,43 @@
defaultConfigurationName = Release; defaultConfigurationName = Release;
}; };
/* End XCConfigurationList section */ /* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/pointfreeco/sqlite-data";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.3.0;
};
};
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/apple/swift-http-structured-headers.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.5.0;
};
};
/* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
FEE084F72EC172460045228E /* SQLiteData */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */;
productName = SQLiteData;
};
FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
productName = RawStructuredFieldValues;
};
FEE084FC2EC1725A0045228E /* StructuredFieldValues */ = {
isa = XCSwiftPackageProductDependency;
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
productName = StructuredFieldValues;
};
/* End XCSwiftPackageProductDependency section */
}; };
rootObject = 97C146E61CF9000F007C117D /* Project object */; rootObject = 97C146E61CF9000F007C117D /* Project object */;
} }

View File

@@ -0,0 +1,177 @@
{
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
"pins" : [
{
"identity" : "combine-schedulers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/combine-schedulers",
"state" : {
"revision" : "fd16d76fd8b9a976d88bfb6cacc05ca8d19c91b6",
"version" : "1.1.0"
}
},
{
"identity" : "grdb.swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/groue/GRDB.swift",
"state" : {
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
"version" : "7.8.0"
}
},
{
"identity" : "opencombine",
"kind" : "remoteSourceControl",
"location" : "https://github.com/OpenCombine/OpenCombine.git",
"state" : {
"revision" : "8576f0d579b27020beccbccc3ea6844f3ddfc2c2",
"version" : "0.14.0"
}
},
{
"identity" : "sqlite-data",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/sqlite-data",
"state" : {
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
"version" : "1.3.0"
}
},
{
"identity" : "swift-case-paths",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-case-paths",
"state" : {
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
"version" : "1.7.2"
}
},
{
"identity" : "swift-clocks",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-clocks",
"state" : {
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
"version" : "1.0.6"
}
},
{
"identity" : "swift-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-collections",
"state" : {
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
"version" : "1.3.0"
}
},
{
"identity" : "swift-concurrency-extras",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
"state" : {
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
"version" : "1.3.2"
}
},
{
"identity" : "swift-custom-dump",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-custom-dump",
"state" : {
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
"version" : "1.3.3"
}
},
{
"identity" : "swift-dependencies",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-dependencies",
"state" : {
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
"version" : "1.10.0"
}
},
{
"identity" : "swift-http-structured-headers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-http-structured-headers.git",
"state" : {
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
"version" : "1.5.0"
}
},
{
"identity" : "swift-identified-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-identified-collections",
"state" : {
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
"version" : "1.1.1"
}
},
{
"identity" : "swift-perception",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-perception",
"state" : {
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
"version" : "2.0.9"
}
},
{
"identity" : "swift-sharing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-sharing",
"state" : {
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
"version" : "2.7.4"
}
},
{
"identity" : "swift-snapshot-testing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
"state" : {
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
"version" : "1.18.7"
}
},
{
"identity" : "swift-structured-queries",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-structured-queries",
"state" : {
"revision" : "9c84335373bae5f5c9f7b5f0adf3ae10f2cab5b9",
"version" : "0.25.2"
}
},
{
"identity" : "swift-syntax",
"kind" : "remoteSourceControl",
"location" : "https://github.com/swiftlang/swift-syntax",
"state" : {
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
"version" : "602.0.0"
}
},
{
"identity" : "swift-tagged",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-tagged",
"state" : {
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
"version" : "0.10.0"
}
},
{
"identity" : "xctest-dynamic-overlay",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
"state" : {
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
"version" : "1.7.0"
}
}
],
"version" : 3
}

View File

@@ -0,0 +1,168 @@
{
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
"pins" : [
{
"identity" : "combine-schedulers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/combine-schedulers",
"state" : {
"revision" : "5928286acce13def418ec36d05a001a9641086f2",
"version" : "1.0.3"
}
},
{
"identity" : "grdb.swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/groue/GRDB.swift",
"state" : {
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
"version" : "7.8.0"
}
},
{
"identity" : "sqlite-data",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/sqlite-data",
"state" : {
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
"version" : "1.3.0"
}
},
{
"identity" : "swift-case-paths",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-case-paths",
"state" : {
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
"version" : "1.7.2"
}
},
{
"identity" : "swift-clocks",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-clocks",
"state" : {
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
"version" : "1.0.6"
}
},
{
"identity" : "swift-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-collections",
"state" : {
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
"version" : "1.3.0"
}
},
{
"identity" : "swift-concurrency-extras",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
"state" : {
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
"version" : "1.3.2"
}
},
{
"identity" : "swift-custom-dump",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-custom-dump",
"state" : {
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
"version" : "1.3.3"
}
},
{
"identity" : "swift-dependencies",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-dependencies",
"state" : {
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
"version" : "1.10.0"
}
},
{
"identity" : "swift-http-structured-headers",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-http-structured-headers.git",
"state" : {
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
"version" : "1.5.0"
}
},
{
"identity" : "swift-identified-collections",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-identified-collections",
"state" : {
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
"version" : "1.1.1"
}
},
{
"identity" : "swift-perception",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-perception",
"state" : {
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
"version" : "2.0.9"
}
},
{
"identity" : "swift-sharing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-sharing",
"state" : {
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
"version" : "2.7.4"
}
},
{
"identity" : "swift-snapshot-testing",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
"state" : {
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
"version" : "1.18.7"
}
},
{
"identity" : "swift-structured-queries",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-structured-queries",
"state" : {
"revision" : "1447ea20550f6f02c4b48cc80931c3ed40a9c756",
"version" : "0.25.0"
}
},
{
"identity" : "swift-syntax",
"kind" : "remoteSourceControl",
"location" : "https://github.com/swiftlang/swift-syntax",
"state" : {
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
"version" : "602.0.0"
}
},
{
"identity" : "swift-tagged",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/swift-tagged",
"state" : {
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
"version" : "0.10.0"
}
},
{
"identity" : "xctest-dynamic-overlay",
"kind" : "remoteSourceControl",
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
"state" : {
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
"version" : "1.7.0"
}
}
],
"version" : 3
}

View File

@@ -0,0 +1,177 @@
import SQLiteData
struct Endpoint: Codable {
let url: URL
let status: Status
enum Status: String, Codable {
case loading, valid, error, unknown
}
}
enum StoreKey: Int, CaseIterable, QueryBindable {
// MARK: - Int
case _version = 0
static let version = Typed<Int>(rawValue: ._version)
case _deviceIdHash = 3
static let deviceIdHash = Typed<Int>(rawValue: ._deviceIdHash)
case _backupTriggerDelay = 8
static let backupTriggerDelay = Typed<Int>(rawValue: ._backupTriggerDelay)
case _tilesPerRow = 103
static let tilesPerRow = Typed<Int>(rawValue: ._tilesPerRow)
case _groupAssetsBy = 105
static let groupAssetsBy = Typed<Int>(rawValue: ._groupAssetsBy)
case _uploadErrorNotificationGracePeriod = 106
static let uploadErrorNotificationGracePeriod = Typed<Int>(rawValue: ._uploadErrorNotificationGracePeriod)
case _thumbnailCacheSize = 110
static let thumbnailCacheSize = Typed<Int>(rawValue: ._thumbnailCacheSize)
case _imageCacheSize = 111
static let imageCacheSize = Typed<Int>(rawValue: ._imageCacheSize)
case _albumThumbnailCacheSize = 112
static let albumThumbnailCacheSize = Typed<Int>(rawValue: ._albumThumbnailCacheSize)
case _selectedAlbumSortOrder = 113
static let selectedAlbumSortOrder = Typed<Int>(rawValue: ._selectedAlbumSortOrder)
case _logLevel = 115
static let logLevel = Typed<Int>(rawValue: ._logLevel)
case _mapRelativeDate = 119
static let mapRelativeDate = Typed<Int>(rawValue: ._mapRelativeDate)
case _mapThemeMode = 124
static let mapThemeMode = Typed<Int>(rawValue: ._mapThemeMode)
// MARK: - String
case _assetETag = 1
static let assetETag = Typed<String>(rawValue: ._assetETag)
case _currentUser = 2
static let currentUser = Typed<String>(rawValue: ._currentUser)
case _deviceId = 4
static let deviceId = Typed<String>(rawValue: ._deviceId)
case _accessToken = 11
static let accessToken = Typed<String>(rawValue: ._accessToken)
case _serverEndpoint = 12
static let serverEndpoint = Typed<String>(rawValue: ._serverEndpoint)
case _sslClientCertData = 15
static let sslClientCertData = Typed<String>(rawValue: ._sslClientCertData)
case _sslClientPasswd = 16
static let sslClientPasswd = Typed<String>(rawValue: ._sslClientPasswd)
case _themeMode = 102
static let themeMode = Typed<String>(rawValue: ._themeMode)
case _customHeaders = 127
static let customHeaders = Typed<[String: String]>(rawValue: ._customHeaders)
case _primaryColor = 128
static let primaryColor = Typed<String>(rawValue: ._primaryColor)
case _preferredWifiName = 133
static let preferredWifiName = Typed<String>(rawValue: ._preferredWifiName)
// MARK: - Endpoint
case _externalEndpointList = 135
static let externalEndpointList = Typed<[Endpoint]>(rawValue: ._externalEndpointList)
// MARK: - URL
case _localEndpoint = 134
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
case _serverUrl = 10
static let serverUrl = Typed<URL>(rawValue: ._serverUrl)
// MARK: - Date
case _backupFailedSince = 5
static let backupFailedSince = Typed<Date>(rawValue: ._backupFailedSince)
// MARK: - Bool
case _backupRequireWifi = 6
static let backupRequireWifi = Typed<Bool>(rawValue: ._backupRequireWifi)
case _backupRequireCharging = 7
static let backupRequireCharging = Typed<Bool>(rawValue: ._backupRequireCharging)
case _autoBackup = 13
static let autoBackup = Typed<Bool>(rawValue: ._autoBackup)
case _backgroundBackup = 14
static let backgroundBackup = Typed<Bool>(rawValue: ._backgroundBackup)
case _loadPreview = 100
static let loadPreview = Typed<Bool>(rawValue: ._loadPreview)
case _loadOriginal = 101
static let loadOriginal = Typed<Bool>(rawValue: ._loadOriginal)
case _dynamicLayout = 104
static let dynamicLayout = Typed<Bool>(rawValue: ._dynamicLayout)
case _backgroundBackupTotalProgress = 107
static let backgroundBackupTotalProgress = Typed<Bool>(rawValue: ._backgroundBackupTotalProgress)
case _backgroundBackupSingleProgress = 108
static let backgroundBackupSingleProgress = Typed<Bool>(rawValue: ._backgroundBackupSingleProgress)
case _storageIndicator = 109
static let storageIndicator = Typed<Bool>(rawValue: ._storageIndicator)
case _advancedTroubleshooting = 114
static let advancedTroubleshooting = Typed<Bool>(rawValue: ._advancedTroubleshooting)
case _preferRemoteImage = 116
static let preferRemoteImage = Typed<Bool>(rawValue: ._preferRemoteImage)
case _loopVideo = 117
static let loopVideo = Typed<Bool>(rawValue: ._loopVideo)
case _mapShowFavoriteOnly = 118
static let mapShowFavoriteOnly = Typed<Bool>(rawValue: ._mapShowFavoriteOnly)
case _selfSignedCert = 120
static let selfSignedCert = Typed<Bool>(rawValue: ._selfSignedCert)
case _mapIncludeArchived = 121
static let mapIncludeArchived = Typed<Bool>(rawValue: ._mapIncludeArchived)
case _ignoreIcloudAssets = 122
static let ignoreIcloudAssets = Typed<Bool>(rawValue: ._ignoreIcloudAssets)
case _selectedAlbumSortReverse = 123
static let selectedAlbumSortReverse = Typed<Bool>(rawValue: ._selectedAlbumSortReverse)
case _mapwithPartners = 125
static let mapwithPartners = Typed<Bool>(rawValue: ._mapwithPartners)
case _enableHapticFeedback = 126
static let enableHapticFeedback = Typed<Bool>(rawValue: ._enableHapticFeedback)
case _dynamicTheme = 129
static let dynamicTheme = Typed<Bool>(rawValue: ._dynamicTheme)
case _colorfulInterface = 130
static let colorfulInterface = Typed<Bool>(rawValue: ._colorfulInterface)
case _syncAlbums = 131
static let syncAlbums = Typed<Bool>(rawValue: ._syncAlbums)
case _autoEndpointSwitching = 132
static let autoEndpointSwitching = Typed<Bool>(rawValue: ._autoEndpointSwitching)
case _loadOriginalVideo = 136
static let loadOriginalVideo = Typed<Bool>(rawValue: ._loadOriginalVideo)
case _manageLocalMediaAndroid = 137
static let manageLocalMediaAndroid = Typed<Bool>(rawValue: ._manageLocalMediaAndroid)
case _readonlyModeEnabled = 138
static let readonlyModeEnabled = Typed<Bool>(rawValue: ._readonlyModeEnabled)
case _autoPlayVideo = 139
static let autoPlayVideo = Typed<Bool>(rawValue: ._autoPlayVideo)
case _photoManagerCustomFilter = 1000
static let photoManagerCustomFilter = Typed<Bool>(rawValue: ._photoManagerCustomFilter)
case _betaPromptShown = 1001
static let betaPromptShown = Typed<Bool>(rawValue: ._betaPromptShown)
case _betaTimeline = 1002
static let betaTimeline = Typed<Bool>(rawValue: ._betaTimeline)
case _enableBackup = 1003
static let enableBackup = Typed<Bool>(rawValue: ._enableBackup)
case _useWifiForUploadVideos = 1004
static let useWifiForUploadVideos = Typed<Bool>(rawValue: ._useWifiForUploadVideos)
case _useWifiForUploadPhotos = 1005
static let useWifiForUploadPhotos = Typed<Bool>(rawValue: ._useWifiForUploadPhotos)
case _needBetaMigration = 1006
static let needBetaMigration = Typed<Bool>(rawValue: ._needBetaMigration)
case _shouldResetSync = 1007
static let shouldResetSync = Typed<Bool>(rawValue: ._shouldResetSync)
struct Typed<T>: RawRepresentable {
let rawValue: StoreKey
@_transparent
init(rawValue value: StoreKey) {
self.rawValue = value
}
}
}
enum BackupSelection: Int, QueryBindable {
case selected, none, excluded
}
enum AvatarColor: Int, QueryBindable {
case primary, pink, red, yellow, blue, green, purple, orange, gray, amber
}
enum AlbumUserRole: Int, QueryBindable {
case editor, viewer
}
enum MemoryType: Int, QueryBindable {
case onThisDay
}

View File

@@ -0,0 +1,146 @@
import SQLiteData
enum StoreError: Error {
case invalidJSON(String)
case invalidURL(String)
case encodingFailed
}
protocol StoreConvertible {
associatedtype StorageType
static func fromValue(_ value: StorageType) throws(StoreError) -> Self
static func toValue(_ value: Self) throws(StoreError) -> StorageType
}
extension Int: StoreConvertible {
static func fromValue(_ value: Int) -> Int { value }
static func toValue(_ value: Int) -> Int { value }
}
extension Bool: StoreConvertible {
static func fromValue(_ value: Int) -> Bool { value == 1 }
static func toValue(_ value: Bool) -> Int { value ? 1 : 0 }
}
extension Date: StoreConvertible {
static func fromValue(_ value: Int) -> Date { Date(timeIntervalSince1970: TimeInterval(value) / 1000) }
static func toValue(_ value: Date) -> Int { Int(value.timeIntervalSince1970 * 1000) }
}
extension String: StoreConvertible {
static func fromValue(_ value: String) -> String { value }
static func toValue(_ value: String) -> String { value }
}
extension URL: StoreConvertible {
static func fromValue(_ value: String) throws(StoreError) -> URL {
guard let url = URL(string: value) else {
throw StoreError.invalidURL(value)
}
return url
}
static func toValue(_ value: URL) -> String { value.absoluteString }
}
extension StoreConvertible where Self: Codable, StorageType == String {
static var jsonDecoder: JSONDecoder { JSONDecoder() }
static var jsonEncoder: JSONEncoder { JSONEncoder() }
static func fromValue(_ value: String) throws(StoreError) -> Self {
do {
return try jsonDecoder.decode(Self.self, from: Data(value.utf8))
} catch {
throw StoreError.invalidJSON(value)
}
}
static func toValue(_ value: Self) throws(StoreError) -> String {
let encoded: Data
do {
encoded = try jsonEncoder.encode(value)
} catch {
throw StoreError.encodingFailed
}
guard let string = String(data: encoded, encoding: .utf8) else {
throw StoreError.encodingFailed
}
return string
}
}
extension Array: StoreConvertible where Element: Codable {
typealias StorageType = String
}
extension Dictionary: StoreConvertible where Key == String, Value: Codable {
typealias StorageType = String
}
class StoreRepository {
private let db: DatabasePool
init(db: DatabasePool) {
self.db = db
}
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == Int {
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
return try T.fromValue(value)
}
return nil
}
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == String {
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
return try T.fromValue(value)
}
return nil
}
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == Int {
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
return try T.fromValue(value)
}
return nil
}
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == String {
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
return try T.fromValue(value)
}
return nil
}
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == Int {
let value = try T.toValue(value)
try db.write { conn in
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
}
}
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == String {
let value = try T.toValue(value)
try db.write { conn in
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
}
}
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == Int {
let value = try T.toValue(value)
try await db.write { conn in
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
}
}
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == String {
let value = try T.toValue(value)
try await db.write { conn in
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
}
}
}

View File

@@ -0,0 +1,237 @@
import GRDB
import SQLiteData
@Table("asset_face_entity")
struct AssetFace {
let id: String
let assetId: String
let personId: String?
let imageWidth: Int
let imageHeight: Int
let boundingBoxX1: Int
let boundingBoxY1: Int
let boundingBoxX2: Int
let boundingBoxY2: Int
let sourceType: String
}
@Table("auth_user_entity")
struct AuthUser {
let id: String
let name: String
let email: String
let isAdmin: Bool
let hasProfileImage: Bool
let profileChangedAt: Date
let avatarColor: AvatarColor
let quotaSizeInBytes: Int
let quotaUsageInBytes: Int
let pinCode: String?
}
@Table("local_album_entity")
struct LocalAlbum {
let id: String
let backupSelection: BackupSelection
let linkedRemoteAlbumId: String?
let marker_: Bool?
let name: String
let isIosSharedAlbum: Bool
let updatedAt: Date
}
@Table("local_album_asset_entity")
struct LocalAlbumAsset {
let id: ID
let marker_: String?
@Selection
struct ID {
let assetId: String
let albumId: String
}
}
@Table("local_asset_entity")
struct LocalAsset {
let id: String
let checksum: String?
let createdAt: Date
let durationInSeconds: Int?
let height: Int?
let isFavorite: Bool
let name: String
let orientation: String
let type: Int
let updatedAt: Date
let width: Int?
}
@Table("memory_asset_entity")
struct MemoryAsset {
let id: ID
@Selection
struct ID {
let assetId: String
let albumId: String
}
}
@Table("memory_entity")
struct Memory {
let id: String
let createdAt: Date
let updatedAt: Date
let deletedAt: Date?
let ownerId: String
let type: MemoryType
let data: String
let isSaved: Bool
let memoryAt: Date
let seenAt: Date?
let showAt: Date?
let hideAt: Date?
}
@Table("partner_entity")
struct Partner {
let id: ID
let inTimeline: Bool
@Selection
struct ID {
let sharedById: String
let sharedWithId: String
}
}
@Table("person_entity")
struct Person {
let id: String
let createdAt: Date
let updatedAt: Date
let ownerId: String
let name: String
let faceAssetId: String?
let isFavorite: Bool
let isHidden: Bool
let color: String?
let birthDate: Date?
}
@Table("remote_album_entity")
struct RemoteAlbum {
let id: String
let createdAt: Date
let description: String?
let isActivityEnabled: Bool
let name: String
let order: Int
let ownerId: String
let thumbnailAssetId: String?
let updatedAt: Date
}
@Table("remote_album_asset_entity")
struct RemoteAlbumAsset {
let id: ID
@Selection
struct ID {
let assetId: String
let albumId: String
}
}
@Table("remote_album_user_entity")
struct RemoteAlbumUser {
let id: ID
let role: AlbumUserRole
@Selection
struct ID {
let albumId: String
let userId: String
}
}
@Table("remote_asset_entity")
struct RemoteAsset {
let id: String
let checksum: String?
let deletedAt: Date?
let isFavorite: Int
let libraryId: String?
let livePhotoVideoId: String?
let localDateTime: Date?
let orientation: String
let ownerId: String
let stackId: String?
let visibility: Int
}
@Table("remote_exif_entity")
struct RemoteExif {
@Column(primaryKey: true)
let assetId: String
let city: String?
let state: String?
let country: String?
let dateTimeOriginal: Date?
let description: String?
let height: Int?
let width: Int?
let exposureTime: String?
let fNumber: Double?
let fileSize: Int?
let focalLength: Double?
let latitude: Double?
let longitude: Double?
let iso: Int?
let make: String?
let model: String?
let lens: String?
let orientation: String?
let timeZone: String?
let rating: Int?
let projectionType: String?
}
@Table("stack_entity")
struct Stack {
let id: String
let createdAt: Date
let updatedAt: Date
let ownerId: String
let primaryAssetId: String
}
@Table("store_entity")
struct Store {
let id: StoreKey
let stringValue: String?
let intValue: Int?
}
@Table("user_entity")
struct User {
let id: String
let name: String
let email: String
let hasProfileImage: Bool
let profileChangedAt: Date
let avatarColor: AvatarColor
}
@Table("user_metadata_entity")
struct UserMetadata {
let id: ID
let value: Data
@Selection
struct ID {
let userId: String
let key: Date
}
}

View File

@@ -364,6 +364,7 @@ protocol NativeSyncApi {
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
func cancelHashing() throws func cancelHashing() throws
func getTrashedAssets() throws -> [String: [PlatformAsset]]
} }
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`. /// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
@@ -532,5 +533,20 @@ class NativeSyncApiSetup {
} else { } else {
cancelHashingChannel.setMessageHandler(nil) cancelHashingChannel.setMessageHandler(nil)
} }
let getTrashedAssetsChannel = taskQueue == nil
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
if let api = api {
getTrashedAssetsChannel.setMessageHandler { _, reply in
do {
let result = try api.getTrashedAssets()
reply(wrapResult(result))
} catch {
reply(wrapError(error))
}
}
} else {
getTrashedAssetsChannel.setMessageHandler(nil)
}
} }
} }

View File

@@ -3,15 +3,15 @@ import CryptoKit
struct AssetWrapper: Hashable, Equatable { struct AssetWrapper: Hashable, Equatable {
let asset: PlatformAsset let asset: PlatformAsset
init(with asset: PlatformAsset) { init(with asset: PlatformAsset) {
self.asset = asset self.asset = asset
} }
func hash(into hasher: inout Hasher) { func hash(into hasher: inout Hasher) {
hasher.combine(self.asset.id) hasher.combine(self.asset.id)
} }
static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool { static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool {
return lhs.asset.id == rhs.asset.id return lhs.asset.id == rhs.asset.id
} }
@@ -19,31 +19,31 @@ struct AssetWrapper: Hashable, Equatable {
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin { class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
static let name = "NativeSyncApi" static let name = "NativeSyncApi"
static func register(with registrar: any FlutterPluginRegistrar) { static func register(with registrar: any FlutterPluginRegistrar) {
let instance = NativeSyncApiImpl() let instance = NativeSyncApiImpl()
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance) NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
registrar.publish(instance) registrar.publish(instance)
} }
func detachFromEngine(for registrar: any FlutterPluginRegistrar) { func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
super.detachFromEngine() super.detachFromEngine()
} }
private let defaults: UserDefaults private let defaults: UserDefaults
private let changeTokenKey = "immich:changeToken" private let changeTokenKey = "immich:changeToken"
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum] private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
private let recoveredAlbumSubType = 1000000219 private let recoveredAlbumSubType = 1000000219
private var hashTask: Task<Void?, Error>? private var hashTask: Task<Void?, Error>?
private static let hashCancelledCode = "HASH_CANCELLED" private static let hashCancelledCode = "HASH_CANCELLED"
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil)) private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
init(with defaults: UserDefaults = .standard) { init(with defaults: UserDefaults = .standard) {
self.defaults = defaults self.defaults = defaults
} }
@available(iOS 16, *) @available(iOS 16, *)
private func getChangeToken() -> PHPersistentChangeToken? { private func getChangeToken() -> PHPersistentChangeToken? {
guard let data = defaults.data(forKey: changeTokenKey) else { guard let data = defaults.data(forKey: changeTokenKey) else {
@@ -51,7 +51,7 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
} }
return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data) return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data)
} }
@available(iOS 16, *) @available(iOS 16, *)
private func saveChangeToken(token: PHPersistentChangeToken) -> Void { private func saveChangeToken(token: PHPersistentChangeToken) -> Void {
guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else { guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else {
@@ -59,18 +59,18 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
} }
defaults.set(data, forKey: changeTokenKey) defaults.set(data, forKey: changeTokenKey)
} }
func clearSyncCheckpoint() -> Void { func clearSyncCheckpoint() -> Void {
defaults.removeObject(forKey: changeTokenKey) defaults.removeObject(forKey: changeTokenKey)
} }
func checkpointSync() { func checkpointSync() {
guard #available(iOS 16, *) else { guard #available(iOS 16, *) else {
return return
} }
saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken) saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken)
} }
func shouldFullSync() -> Bool { func shouldFullSync() -> Bool {
guard #available(iOS 16, *), guard #available(iOS 16, *),
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized, PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized,
@@ -78,36 +78,36 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
// When we do not have access to photo library, older iOS version or No token available, fallback to full sync // When we do not have access to photo library, older iOS version or No token available, fallback to full sync
return true return true
} }
guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else { guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else {
// Cannot fetch persistent changes // Cannot fetch persistent changes
return true return true
} }
return false return false
} }
func getAlbums() throws -> [PlatformAlbum] { func getAlbums() throws -> [PlatformAlbum] {
var albums: [PlatformAlbum] = [] var albums: [PlatformAlbum] = []
albumTypes.forEach { type in albumTypes.forEach { type in
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil) let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
for i in 0..<collections.count { for i in 0..<collections.count {
let album = collections.object(at: i) let album = collections.object(at: i)
// Ignore recovered album // Ignore recovered album
if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) { if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) {
continue; continue;
} }
let options = PHFetchOptions() let options = PHFetchOptions()
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)] options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
options.includeHiddenAssets = false options.includeHiddenAssets = false
let assets = getAssetsFromAlbum(in: album, options: options) let assets = getAssetsFromAlbum(in: album, options: options)
let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream
var domainAlbum = PlatformAlbum( var domainAlbum = PlatformAlbum(
id: album.localIdentifier, id: album.localIdentifier,
name: album.localizedTitle!, name: album.localizedTitle!,
@@ -115,57 +115,57 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
isCloud: isCloud, isCloud: isCloud,
assetCount: Int64(assets.count) assetCount: Int64(assets.count)
) )
if let firstAsset = assets.firstObject { if let firstAsset = assets.firstObject {
domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) } domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) }
} }
albums.append(domainAlbum) albums.append(domainAlbum)
} }
} }
return albums.sorted { $0.id < $1.id } return albums.sorted { $0.id < $1.id }
} }
func getMediaChanges() throws -> SyncDelta { func getMediaChanges() throws -> SyncDelta {
guard #available(iOS 16, *) else { guard #available(iOS 16, *) else {
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil) throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil)
} }
guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else { guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else {
throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil) throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil)
} }
guard let storedToken = getChangeToken() else { guard let storedToken = getChangeToken() else {
// No token exists, definitely need a full sync // No token exists, definitely need a full sync
print("MediaManager::getMediaChanges: No token found") print("MediaManager::getMediaChanges: No token found")
throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil) throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil)
} }
let currentToken = PHPhotoLibrary.shared().currentChangeToken let currentToken = PHPhotoLibrary.shared().currentChangeToken
if storedToken == currentToken { if storedToken == currentToken {
return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:]) return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
} }
do { do {
let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken)
var updatedAssets: Set<AssetWrapper> = [] var updatedAssets: Set<AssetWrapper> = []
var deletedAssets: Set<String> = [] var deletedAssets: Set<String> = []
for change in changes { for change in changes {
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue } guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers) let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers)
deletedAssets.formUnion(details.deletedLocalIdentifiers) deletedAssets.formUnion(details.deletedLocalIdentifiers)
if (updated.isEmpty) { continue } if (updated.isEmpty) { continue }
let options = PHFetchOptions() let options = PHFetchOptions()
options.includeHiddenAssets = false options.includeHiddenAssets = false
let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options) let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options)
for i in 0..<result.count { for i in 0..<result.count {
let asset = result.object(at: i) let asset = result.object(at: i)
// Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes // Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes
let predicate = PlatformAsset( let predicate = PlatformAsset(
id: asset.localIdentifier, id: asset.localIdentifier,
@@ -178,25 +178,25 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
if (updatedAssets.contains(AssetWrapper(with: predicate))) { if (updatedAssets.contains(AssetWrapper(with: predicate))) {
continue continue
} }
let domainAsset = AssetWrapper(with: asset.toPlatformAsset()) let domainAsset = AssetWrapper(with: asset.toPlatformAsset())
updatedAssets.insert(domainAsset) updatedAssets.insert(domainAsset)
} }
} }
let updates = Array(updatedAssets.map { $0.asset }) let updates = Array(updatedAssets.map { $0.asset })
return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates)) return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates))
} }
} }
private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] { private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] {
guard !assets.isEmpty else { guard !assets.isEmpty else {
return [:] return [:]
} }
var albumAssets: [String: [String]] = [:] var albumAssets: [String: [String]] = [:]
for type in albumTypes { for type in albumTypes {
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil) let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
collections.enumerateObjects { (album, _, _) in collections.enumerateObjects { (album, _, _) in
@@ -211,13 +211,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
} }
return albumAssets return albumAssets
} }
func getAssetIdsForAlbum(albumId: String) throws -> [String] { func getAssetIdsForAlbum(albumId: String) throws -> [String] {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil) let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else { guard let album = collections.firstObject else {
return [] return []
} }
var ids: [String] = [] var ids: [String] = []
let options = PHFetchOptions() let options = PHFetchOptions()
options.includeHiddenAssets = false options.includeHiddenAssets = false
@@ -227,13 +227,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
} }
return ids return ids
} }
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 { func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil) let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else { guard let album = collections.firstObject else {
return 0 return 0
} }
let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp)) let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp))
let options = PHFetchOptions() let options = PHFetchOptions()
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date) options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
@@ -241,32 +241,32 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
let assets = getAssetsFromAlbum(in: album, options: options) let assets = getAssetsFromAlbum(in: album, options: options)
return Int64(assets.count) return Int64(assets.count)
} }
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] { func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] {
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil) let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
guard let album = collections.firstObject else { guard let album = collections.firstObject else {
return [] return []
} }
let options = PHFetchOptions() let options = PHFetchOptions()
options.includeHiddenAssets = false options.includeHiddenAssets = false
if(updatedTimeCond != nil) { if(updatedTimeCond != nil) {
let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!)) let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!))
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date) options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
} }
let result = getAssetsFromAlbum(in: album, options: options) let result = getAssetsFromAlbum(in: album, options: options)
if(result.count == 0) { if(result.count == 0) {
return [] return []
} }
var assets: [PlatformAsset] = [] var assets: [PlatformAsset] = []
result.enumerateObjects { (asset, _, _) in result.enumerateObjects { (asset, _, _) in
assets.append(asset.toPlatformAsset()) assets.append(asset.toPlatformAsset())
} }
return assets return assets
} }
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) { func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) {
if let prevTask = hashTask { if let prevTask = hashTask {
prevTask.cancel() prevTask.cancel()
@@ -284,11 +284,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
missingAssetIds.remove(asset.localIdentifier) missingAssetIds.remove(asset.localIdentifier)
assets.append(asset) assets.append(asset)
} }
if Task.isCancelled { if Task.isCancelled {
return self?.completeWhenActive(for: completion, with: Self.hashCancelled) return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
} }
await withTaskGroup(of: HashResult?.self) { taskGroup in await withTaskGroup(of: HashResult?.self) { taskGroup in
var results = [HashResult]() var results = [HashResult]()
results.reserveCapacity(assets.count) results.reserveCapacity(assets.count)
@@ -301,28 +301,28 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess) return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess)
} }
} }
for await result in taskGroup { for await result in taskGroup {
guard let result = result else { guard let result = result else {
return self?.completeWhenActive(for: completion, with: Self.hashCancelled) return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
} }
results.append(result) results.append(result)
} }
for missing in missingAssetIds { for missing in missingAssetIds {
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil)) results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
} }
return self?.completeWhenActive(for: completion, with: .success(results)) return self?.completeWhenActive(for: completion, with: .success(results))
} }
} }
} }
func cancelHashing() { func cancelHashing() {
hashTask?.cancel() hashTask?.cancel()
hashTask = nil hashTask = nil
} }
private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? { private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? {
class RequestRef { class RequestRef {
var id: PHAssetResourceDataRequestID? var id: PHAssetResourceDataRequestID?
@@ -332,21 +332,21 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
if Task.isCancelled { if Task.isCancelled {
return nil return nil
} }
guard let resource = asset.getResource() else { guard let resource = asset.getResource() else {
return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil) return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil)
} }
if Task.isCancelled { if Task.isCancelled {
return nil return nil
} }
let options = PHAssetResourceRequestOptions() let options = PHAssetResourceRequestOptions()
options.isNetworkAccessAllowed = allowNetworkAccess options.isNetworkAccessAllowed = allowNetworkAccess
return await withCheckedContinuation { continuation in return await withCheckedContinuation { continuation in
var hasher = Insecure.SHA1() var hasher = Insecure.SHA1()
requestRef.id = PHAssetResourceManager.default().requestData( requestRef.id = PHAssetResourceManager.default().requestData(
for: resource, for: resource,
options: options, options: options,
@@ -377,7 +377,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
PHAssetResourceManager.default().cancelDataRequest(requestId) PHAssetResourceManager.default().cancelDataRequest(requestId)
}) })
} }
func getTrashedAssets() throws -> [String: [PlatformAsset]] {
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature not supported on iOS.", details: nil)
}
private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> { private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
// Ensure to actually getting all assets for the Recents album // Ensure to actually getting all assets for the Recents album
if (album.assetCollectionSubtype == .smartAlbumUserLibrary) { if (album.assetCollectionSubtype == .smartAlbumUserLibrary) {

View File

@@ -32,6 +32,17 @@ platform :ios do
) )
end end
# Helper method to get version from pubspec.yaml
def get_version_from_pubspec
require 'yaml'
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
pubspec = YAML.load_file(pubspec_path)
version_string = pubspec['version']
version_string ? version_string.split('+').first : nil
end
# Helper method to configure code signing for all targets # Helper method to configure code signing for all targets
def configure_code_signing(bundle_id_suffix: "") def configure_code_signing(bundle_id_suffix: "")
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}" bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
@@ -101,7 +112,7 @@ platform :ios do
workspace: "Runner.xcworkspace", workspace: "Runner.xcworkspace",
configuration: configuration, configuration: configuration,
export_method: "app-store", export_method: "app-store",
xcargs: "CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual", xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
export_options: { export_options: {
provisioningProfiles: { provisioningProfiles: {
"#{app_identifier}" => "#{app_identifier} AppStore", "#{app_identifier}" => "#{app_identifier} AppStore",
@@ -158,7 +169,8 @@ platform :ios do
# Build and upload with version number # Build and upload with version number
build_and_upload( build_and_upload(
api_key: api_key, api_key: api_key,
version_number: "2.1.0" version_number: get_version_from_pubspec,
distribute_external: false,
) )
end end
@@ -168,8 +180,9 @@ platform :ios do
path: "./Runner.xcodeproj", path: "./Runner.xcodeproj",
targets: ["Runner", "ShareExtension", "WidgetExtension"] targets: ["Runner", "ShareExtension", "WidgetExtension"]
) )
increment_version_number( increment_version_number(
version_number: "2.2.2" version_number: get_version_from_pubspec
) )
increment_build_number( increment_build_number(
build_number: latest_testflight_build_number + 1, build_number: latest_testflight_build_number + 1,
@@ -182,7 +195,7 @@ platform :ios do
configuration: "Release", configuration: "Release",
export_method: "app-store", export_method: "app-store",
skip_package_ipa: false, skip_package_ipa: false,
xcargs: "-allowProvisioningUpdates", xcargs: "-skipMacroValidation -allowProvisioningUpdates",
export_options: { export_options: {
method: "app-store", method: "app-store",
signingStyle: "automatic", signingStyle: "automatic",
@@ -197,4 +210,37 @@ platform :ios do
) )
end end
desc "iOS Build Only (no TestFlight upload)"
lane :gha_build_only do
# Use the same build process as production, just skip the upload
# This ensures PR builds validate the same way as production builds
# Install provisioning profiles (use development profiles for PR builds)
install_provisioning_profile(path: "profile_dev.mobileprovision")
install_provisioning_profile(path: "profile_dev_share.mobileprovision")
install_provisioning_profile(path: "profile_dev_widget.mobileprovision")
# Configure code signing for dev bundle IDs
configure_code_signing(bundle_id_suffix: "development")
# Build the app (same as gha_testflight_dev but without upload)
build_app(
scheme: "Runner",
workspace: "Runner.xcworkspace",
configuration: "Release",
export_method: "app-store",
skip_package_ipa: true,
xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
export_options: {
provisioningProfiles: {
"#{BASE_BUNDLE_ID}.development" => "#{BASE_BUNDLE_ID}.development AppStore",
"#{BASE_BUNDLE_ID}.development.ShareExtension" => "#{BASE_BUNDLE_ID}.development.ShareExtension AppStore",
"#{BASE_BUNDLE_ID}.development.Widget" => "#{BASE_BUNDLE_ID}.development.Widget AppStore"
},
signingStyle: "manual",
signingCertificate: CODE_SIGN_IDENTITY
}
)
end
end end

View File

@@ -58,3 +58,6 @@ const int kPhotoTabIndex = 0;
const int kSearchTabIndex = 1; const int kSearchTabIndex = 1;
const int kAlbumTabIndex = 2; const int kAlbumTabIndex = 2;
const int kLibraryTabIndex = 3; const int kLibraryTabIndex = 3;
// Workaround for SQLite's variable limit (SQLITE_MAX_VARIABLE_NUMBER = 32766)
const int kDriftMaxChunk = 32000;

View File

@@ -239,7 +239,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? []; final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
return _ref return _ref
?.read(uploadServiceProvider) ?.read(uploadServiceProvider)
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken); .startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
}, },
(error, stack) { (error, stack) {
dPrint(() => "Error in backup zone $error, $stack"); dPrint(() => "Error in backup zone $error, $stack");

View File

@@ -2,8 +2,10 @@ import 'package:flutter/services.dart';
import 'package:immich_mobile/constants/constants.dart'; import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart'; import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart'; import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart'; import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:logging/logging.dart'; import 'package:logging/logging.dart';
@@ -13,6 +15,7 @@ class HashService {
final int _batchSize; final int _batchSize;
final DriftLocalAlbumRepository _localAlbumRepository; final DriftLocalAlbumRepository _localAlbumRepository;
final DriftLocalAssetRepository _localAssetRepository; final DriftLocalAssetRepository _localAssetRepository;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final NativeSyncApi _nativeSyncApi; final NativeSyncApi _nativeSyncApi;
final bool Function()? _cancelChecker; final bool Function()? _cancelChecker;
final _log = Logger('HashService'); final _log = Logger('HashService');
@@ -20,11 +23,13 @@ class HashService {
HashService({ HashService({
required DriftLocalAlbumRepository localAlbumRepository, required DriftLocalAlbumRepository localAlbumRepository,
required DriftLocalAssetRepository localAssetRepository, required DriftLocalAssetRepository localAssetRepository,
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required NativeSyncApi nativeSyncApi, required NativeSyncApi nativeSyncApi,
bool Function()? cancelChecker, bool Function()? cancelChecker,
int? batchSize, int? batchSize,
}) : _localAlbumRepository = localAlbumRepository, }) : _localAlbumRepository = localAlbumRepository,
_localAssetRepository = localAssetRepository, _localAssetRepository = localAssetRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_cancelChecker = cancelChecker, _cancelChecker = cancelChecker,
_nativeSyncApi = nativeSyncApi, _nativeSyncApi = nativeSyncApi,
_batchSize = batchSize ?? kBatchHashFileLimit; _batchSize = batchSize ?? kBatchHashFileLimit;
@@ -49,6 +54,14 @@ class HashService {
await _hashAssets(album, assetsToHash); await _hashAssets(album, assetsToHash);
} }
} }
if (CurrentPlatform.isAndroid && localAlbums.isNotEmpty) {
final backupAlbumIds = localAlbums.map((e) => e.id);
final trashedToHash = await _trashedLocalAssetRepository.getAssetsToHash(backupAlbumIds);
if (trashedToHash.isNotEmpty) {
final pseudoAlbum = LocalAlbum(id: '-pseudoAlbum', name: 'Trash', updatedAt: DateTime.now());
await _hashAssets(pseudoAlbum, trashedToHash, isTrashed: true);
}
}
} on PlatformException catch (e) { } on PlatformException catch (e) {
if (e.code == _kHashCancelledCode) { if (e.code == _kHashCancelledCode) {
_log.warning("Hashing cancelled by platform"); _log.warning("Hashing cancelled by platform");
@@ -65,7 +78,7 @@ class HashService {
/// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB /// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB
/// with hash for those that were successfully hashed. Hashes are looked up in a table /// with hash for those that were successfully hashed. Hashes are looked up in a table
/// [LocalAssetHashEntity] by local id. Only missing entries are newly hashed and added to the DB. /// [LocalAssetHashEntity] by local id. Only missing entries are newly hashed and added to the DB.
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash) async { Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash, {bool isTrashed = false}) async {
final toHash = <String, LocalAsset>{}; final toHash = <String, LocalAsset>{};
for (final asset in assetsToHash) { for (final asset in assetsToHash) {
@@ -76,16 +89,16 @@ class HashService {
toHash[asset.id] = asset; toHash[asset.id] = asset;
if (toHash.length == _batchSize) { if (toHash.length == _batchSize) {
await _processBatch(album, toHash); await _processBatch(album, toHash, isTrashed);
toHash.clear(); toHash.clear();
} }
} }
await _processBatch(album, toHash); await _processBatch(album, toHash, isTrashed);
} }
/// Processes a batch of assets. /// Processes a batch of assets.
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash) async { Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash, bool isTrashed) async {
if (toHash.isEmpty) { if (toHash.isEmpty) {
return; return;
} }
@@ -120,7 +133,10 @@ class HashService {
} }
_log.fine("Hashed ${hashed.length}/${toHash.length} assets"); _log.fine("Hashed ${hashed.length}/${toHash.length} assets");
if (isTrashed) {
await _localAssetRepository.updateHashes(hashed); await _trashedLocalAssetRepository.updateHashes(hashed);
} else {
await _localAssetRepository.updateHashes(hashed);
}
} }
} }

View File

@@ -4,9 +4,14 @@ import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart'; import 'package:flutter/foundation.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart'; import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart'; import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart'; import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart'; import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:immich_mobile/utils/datetime_helpers.dart'; import 'package:immich_mobile/utils/datetime_helpers.dart';
import 'package:immich_mobile/utils/diff.dart'; import 'package:immich_mobile/utils/diff.dart';
import 'package:logging/logging.dart'; import 'package:logging/logging.dart';
@@ -14,15 +19,34 @@ import 'package:logging/logging.dart';
class LocalSyncService { class LocalSyncService {
final DriftLocalAlbumRepository _localAlbumRepository; final DriftLocalAlbumRepository _localAlbumRepository;
final NativeSyncApi _nativeSyncApi; final NativeSyncApi _nativeSyncApi;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final LocalFilesManagerRepository _localFilesManager;
final StorageRepository _storageRepository;
final Logger _log = Logger("DeviceSyncService"); final Logger _log = Logger("DeviceSyncService");
LocalSyncService({required DriftLocalAlbumRepository localAlbumRepository, required NativeSyncApi nativeSyncApi}) LocalSyncService({
: _localAlbumRepository = localAlbumRepository, required DriftLocalAlbumRepository localAlbumRepository,
_nativeSyncApi = nativeSyncApi; required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required LocalFilesManagerRepository localFilesManager,
required StorageRepository storageRepository,
required NativeSyncApi nativeSyncApi,
}) : _localAlbumRepository = localAlbumRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_localFilesManager = localFilesManager,
_storageRepository = storageRepository,
_nativeSyncApi = nativeSyncApi;
Future<void> sync({bool full = false}) async { Future<void> sync({bool full = false}) async {
final Stopwatch stopwatch = Stopwatch()..start(); final Stopwatch stopwatch = Stopwatch()..start();
try { try {
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
final hasPermission = await _localFilesManager.hasManageMediaPermission();
if (hasPermission) {
await _syncTrashedAssets();
} else {
_log.warning("syncTrashedAssets cannot proceed because MANAGE_MEDIA permission is missing");
}
}
if (full || await _nativeSyncApi.shouldFullSync()) { if (full || await _nativeSyncApi.shouldFullSync()) {
_log.fine("Full sync request from ${full ? "user" : "native"}"); _log.fine("Full sync request from ${full ? "user" : "native"}");
return await fullSync(); return await fullSync();
@@ -69,7 +93,6 @@ class LocalSyncService {
await updateAlbum(dbAlbum, album); await updateAlbum(dbAlbum, album);
} }
} }
await _nativeSyncApi.checkpointSync(); await _nativeSyncApi.checkpointSync();
} catch (e, s) { } catch (e, s) {
_log.severe("Error performing device sync", e, s); _log.severe("Error performing device sync", e, s);
@@ -273,6 +296,48 @@ class LocalSyncService {
bool _albumsEqual(LocalAlbum a, LocalAlbum b) { bool _albumsEqual(LocalAlbum a, LocalAlbum b) {
return a.name == b.name && a.assetCount == b.assetCount && a.updatedAt.isAtSameMomentAs(b.updatedAt); return a.name == b.name && a.assetCount == b.assetCount && a.updatedAt.isAtSameMomentAs(b.updatedAt);
} }
Future<void> _syncTrashedAssets() async {
final trashedAssetMap = await _nativeSyncApi.getTrashedAssets();
await processTrashedAssets(trashedAssetMap);
}
@visibleForTesting
Future<void> processTrashedAssets(Map<String, List<PlatformAsset>> trashedAssetMap) async {
if (trashedAssetMap.isEmpty) {
_log.info("syncTrashedAssets, No trashed assets found");
}
final trashedAssets = trashedAssetMap.cast<String, List<Object?>>().entries.expand(
(entry) => entry.value.cast<PlatformAsset>().toTrashedAssets(entry.key),
);
_log.fine("syncTrashedAssets, trashedAssets: ${trashedAssets.map((e) => e.asset.id)}");
await _trashedLocalAssetRepository.processTrashSnapshot(trashedAssets);
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
if (assetsToRestore.isNotEmpty) {
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
} else {
_log.info("syncTrashedAssets, No remote assets found for restoration");
}
final localAssetsToTrash = await _trashedLocalAssetRepository.getToTrash();
if (localAssetsToTrash.isNotEmpty) {
final mediaUrls = await Future.wait(
localAssetsToTrash.values
.expand((e) => e)
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
);
_log.info("Moving to trash ${mediaUrls.join(", ")} assets");
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
if (result) {
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
}
} else {
_log.info("syncTrashedAssets, No assets found in backup-enabled albums for move to trash");
}
}
} }
extension on Iterable<PlatformAlbum> { extension on Iterable<PlatformAlbum> {
@@ -290,20 +355,26 @@ extension on Iterable<PlatformAlbum> {
extension on Iterable<PlatformAsset> { extension on Iterable<PlatformAsset> {
List<LocalAsset> toLocalAssets() { List<LocalAsset> toLocalAssets() {
return map( return map((e) => e.toLocalAsset()).toList();
(e) => LocalAsset( }
id: e.id,
name: e.name, Iterable<TrashedAsset> toTrashedAssets(String albumId) {
checksum: null, return map((e) => (albumId: albumId, asset: e.toLocalAsset()));
type: AssetType.values.elementAtOrNull(e.type) ?? AssetType.other,
createdAt: tryFromSecondsSinceEpoch(e.createdAt, isUtc: true) ?? DateTime.timestamp(),
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt, isUtc: true) ?? DateTime.timestamp(),
width: e.width,
height: e.height,
durationInSeconds: e.durationInSeconds,
orientation: e.orientation,
isFavorite: e.isFavorite,
),
).toList();
} }
} }
extension on PlatformAsset {
LocalAsset toLocalAsset() => LocalAsset(
id: id,
name: name,
checksum: null,
type: AssetType.values.elementAtOrNull(type) ?? AssetType.other,
createdAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
updatedAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
width: width,
height: height,
durationInSeconds: durationInSeconds,
isFavorite: isFavorite,
orientation: orientation,
);
}

View File

@@ -1,8 +1,15 @@
import 'dart:async'; import 'dart:async';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/sync_event.model.dart'; import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:logging/logging.dart'; import 'package:logging/logging.dart';
import 'package:openapi/api.dart'; import 'package:openapi/api.dart';
@@ -11,14 +18,26 @@ class SyncStreamService {
final SyncApiRepository _syncApiRepository; final SyncApiRepository _syncApiRepository;
final SyncStreamRepository _syncStreamRepository; final SyncStreamRepository _syncStreamRepository;
final DriftLocalAssetRepository _localAssetRepository;
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
final LocalFilesManagerRepository _localFilesManager;
final StorageRepository _storageRepository;
final bool Function()? _cancelChecker; final bool Function()? _cancelChecker;
SyncStreamService({ SyncStreamService({
required SyncApiRepository syncApiRepository, required SyncApiRepository syncApiRepository,
required SyncStreamRepository syncStreamRepository, required SyncStreamRepository syncStreamRepository,
required DriftLocalAssetRepository localAssetRepository,
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
required LocalFilesManagerRepository localFilesManager,
required StorageRepository storageRepository,
bool Function()? cancelChecker, bool Function()? cancelChecker,
}) : _syncApiRepository = syncApiRepository, }) : _syncApiRepository = syncApiRepository,
_syncStreamRepository = syncStreamRepository, _syncStreamRepository = syncStreamRepository,
_localAssetRepository = localAssetRepository,
_trashedLocalAssetRepository = trashedLocalAssetRepository,
_localFilesManager = localFilesManager,
_storageRepository = storageRepository,
_cancelChecker = cancelChecker; _cancelChecker = cancelChecker;
bool get isCancelled => _cancelChecker?.call() ?? false; bool get isCancelled => _cancelChecker?.call() ?? false;
@@ -83,7 +102,18 @@ class SyncStreamService {
case SyncEntityType.partnerDeleteV1: case SyncEntityType.partnerDeleteV1:
return _syncStreamRepository.deletePartnerV1(data.cast()); return _syncStreamRepository.deletePartnerV1(data.cast());
case SyncEntityType.assetV1: case SyncEntityType.assetV1:
return _syncStreamRepository.updateAssetsV1(data.cast()); final remoteSyncAssets = data.cast<SyncAssetV1>();
await _syncStreamRepository.updateAssetsV1(remoteSyncAssets);
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
final hasPermission = await _localFilesManager.hasManageMediaPermission();
if (hasPermission) {
await _handleRemoteTrashed(remoteSyncAssets.where((e) => e.deletedAt != null).map((e) => e.checksum));
await _applyRemoteRestoreToLocal();
} else {
_logger.warning("sync Trashed Assets cannot proceed because MANAGE_MEDIA permission is missing");
}
}
return;
case SyncEntityType.assetDeleteV1: case SyncEntityType.assetDeleteV1:
return _syncStreamRepository.deleteAssetsV1(data.cast()); return _syncStreamRepository.deleteAssetsV1(data.cast());
case SyncEntityType.assetExifV1: case SyncEntityType.assetExifV1:
@@ -212,4 +242,36 @@ class SyncStreamService {
_logger.severe("Error processing AssetUploadReadyV1 websocket batch events", error, stackTrace); _logger.severe("Error processing AssetUploadReadyV1 websocket batch events", error, stackTrace);
} }
} }
Future<void> _handleRemoteTrashed(Iterable<String> checksums) async {
if (checksums.isEmpty) {
return Future.value();
} else {
final localAssetsToTrash = await _localAssetRepository.getAssetsFromBackupAlbums(checksums);
if (localAssetsToTrash.isNotEmpty) {
final mediaUrls = await Future.wait(
localAssetsToTrash.values
.expand((e) => e)
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
);
_logger.info("Moving to trash ${mediaUrls.join(", ")} assets");
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
if (result) {
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
}
} else {
_logger.info("No assets found in backup-enabled albums for assets: $checksums");
}
}
}
Future<void> _applyRemoteRestoreToLocal() async {
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
if (assetsToRestore.isNotEmpty) {
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
} else {
_logger.info("No remote assets found for restoration");
}
}
} }

View File

@@ -0,0 +1,40 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)')
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)')
class TrashedLocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
const TrashedLocalAssetEntity();
TextColumn get id => text()();
TextColumn get albumId => text()();
TextColumn get checksum => text().nullable()();
BoolColumn get isFavorite => boolean().withDefault(const Constant(false))();
IntColumn get orientation => integer().withDefault(const Constant(0))();
@override
Set<Column> get primaryKey => {id, albumId};
}
extension TrashedLocalAssetEntityDataDomainExtension on TrashedLocalAssetEntityData {
LocalAsset toLocalAsset() => LocalAsset(
id: id,
name: name,
checksum: checksum,
type: type,
createdAt: createdAt,
updatedAt: updatedAt,
durationInSeconds: durationInSeconds,
isFavorite: isFavorite,
height: height,
width: width,
orientation: orientation,
);
}

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart'; import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart'; import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart'; import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart'; import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart'; import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart'; import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
@@ -62,6 +63,7 @@ class IsarDatabaseRepository implements IDatabaseRepository {
PersonEntity, PersonEntity,
AssetFaceEntity, AssetFaceEntity,
StoreEntity, StoreEntity,
TrashedLocalAssetEntity,
], ],
include: {'package:immich_mobile/infrastructure/entities/merged_asset.drift'}, include: {'package:immich_mobile/infrastructure/entities/merged_asset.drift'},
) )
@@ -93,7 +95,7 @@ class Drift extends $Drift implements IDatabaseRepository {
} }
@override @override
int get schemaVersion => 12; int get schemaVersion => 13;
@override @override
MigrationStrategy get migration => MigrationStrategy( MigrationStrategy get migration => MigrationStrategy(
@@ -178,6 +180,11 @@ class Drift extends $Drift implements IDatabaseRepository {
); );
} }
}, },
from12To13: (m, v13) async {
await m.create(v13.trashedLocalAssetEntity);
await m.createIndex(v13.idxTrashedLocalAssetChecksum);
await m.createIndex(v13.idxTrashedLocalAssetAlbum);
},
), ),
); );

View File

@@ -37,9 +37,11 @@ import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.da
as i17; as i17;
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart' import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
as i18; as i18;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart' import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart'
as i19; as i19;
import 'package:drift/internal/modular.dart' as i20; import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
as i20;
import 'package:drift/internal/modular.dart' as i21;
abstract class $Drift extends i0.GeneratedDatabase { abstract class $Drift extends i0.GeneratedDatabase {
$Drift(i0.QueryExecutor e) : super(e); $Drift(i0.QueryExecutor e) : super(e);
@@ -77,9 +79,11 @@ abstract class $Drift extends i0.GeneratedDatabase {
late final i17.$AssetFaceEntityTable assetFaceEntity = i17 late final i17.$AssetFaceEntityTable assetFaceEntity = i17
.$AssetFaceEntityTable(this); .$AssetFaceEntityTable(this);
late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this); late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this);
i19.MergedAssetDrift get mergedAssetDrift => i20.ReadDatabaseContainer( late final i19.$TrashedLocalAssetEntityTable trashedLocalAssetEntity = i19
.$TrashedLocalAssetEntityTable(this);
i20.MergedAssetDrift get mergedAssetDrift => i21.ReadDatabaseContainer(
this, this,
).accessor<i19.MergedAssetDrift>(i19.MergedAssetDrift.new); ).accessor<i20.MergedAssetDrift>(i20.MergedAssetDrift.new);
@override @override
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables => Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>(); allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
@@ -108,7 +112,10 @@ abstract class $Drift extends i0.GeneratedDatabase {
personEntity, personEntity,
assetFaceEntity, assetFaceEntity,
storeEntity, storeEntity,
trashedLocalAssetEntity,
i11.idxLatLng, i11.idxLatLng,
i19.idxTrashedLocalAssetChecksum,
i19.idxTrashedLocalAssetAlbum,
]; ];
@override @override
i0.StreamQueryUpdateRules i0.StreamQueryUpdateRules
@@ -336,4 +343,9 @@ class $DriftManager {
i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity); i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
i18.$$StoreEntityTableTableManager get storeEntity => i18.$$StoreEntityTableTableManager get storeEntity =>
i18.$$StoreEntityTableTableManager(_db, _db.storeEntity); i18.$$StoreEntityTableTableManager(_db, _db.storeEntity);
i19.$$TrashedLocalAssetEntityTableTableManager get trashedLocalAssetEntity =>
i19.$$TrashedLocalAssetEntityTableTableManager(
_db,
_db.trashedLocalAssetEntity,
);
} }

View File

@@ -5037,6 +5037,454 @@ final class Schema12 extends i0.VersionedSchema {
); );
} }
final class Schema13 extends i0.VersionedSchema {
Schema13({required super.database}) : super(version: 13);
@override
late final List<i1.DatabaseSchemaEntity> entities = [
userEntity,
remoteAssetEntity,
stackEntity,
localAssetEntity,
remoteAlbumEntity,
localAlbumEntity,
localAlbumAssetEntity,
idxLocalAssetChecksum,
idxRemoteAssetOwnerChecksum,
uQRemoteAssetsOwnerChecksum,
uQRemoteAssetsOwnerLibraryChecksum,
idxRemoteAssetChecksum,
authUserEntity,
userMetadataEntity,
partnerEntity,
remoteExifEntity,
remoteAlbumAssetEntity,
remoteAlbumUserEntity,
memoryEntity,
memoryAssetEntity,
personEntity,
assetFaceEntity,
storeEntity,
trashedLocalAssetEntity,
idxLatLng,
idxTrashedLocalAssetChecksum,
idxTrashedLocalAssetAlbum,
];
late final Shape20 userEntity = Shape20(
source: i0.VersionedTable(
entityName: 'user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_3,
_column_84,
_column_85,
_column_91,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape17 remoteAssetEntity = Shape17(
source: i0.VersionedTable(
entityName: 'remote_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_13,
_column_14,
_column_15,
_column_16,
_column_17,
_column_18,
_column_19,
_column_20,
_column_21,
_column_86,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape3 stackEntity = Shape3(
source: i0.VersionedTable(
entityName: 'stack_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
attachedDatabase: database,
),
alias: null,
);
late final Shape2 localAssetEntity = Shape2(
source: i0.VersionedTable(
entityName: 'local_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_22,
_column_14,
_column_23,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape9 remoteAlbumEntity = Shape9(
source: i0.VersionedTable(
entityName: 'remote_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_56,
_column_9,
_column_5,
_column_15,
_column_57,
_column_58,
_column_59,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape19 localAlbumEntity = Shape19(
source: i0.VersionedTable(
entityName: 'local_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_5,
_column_31,
_column_32,
_column_90,
_column_33,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape22 localAlbumAssetEntity = Shape22(
source: i0.VersionedTable(
entityName: 'local_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_34, _column_35, _column_33],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLocalAssetChecksum = i1.Index(
'idx_local_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
);
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
'idx_remote_asset_owner_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
);
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
'UQ_remote_assets_owner_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
);
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
'UQ_remote_assets_owner_library_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
);
final i1.Index idxRemoteAssetChecksum = i1.Index(
'idx_remote_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
);
late final Shape21 authUserEntity = Shape21(
source: i0.VersionedTable(
entityName: 'auth_user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_3,
_column_2,
_column_84,
_column_85,
_column_92,
_column_93,
_column_7,
_column_94,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape4 userMetadataEntity = Shape4(
source: i0.VersionedTable(
entityName: 'user_metadata_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
columns: [_column_25, _column_26, _column_27],
attachedDatabase: database,
),
alias: null,
);
late final Shape5 partnerEntity = Shape5(
source: i0.VersionedTable(
entityName: 'partner_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
columns: [_column_28, _column_29, _column_30],
attachedDatabase: database,
),
alias: null,
);
late final Shape8 remoteExifEntity = Shape8(
source: i0.VersionedTable(
entityName: 'remote_exif_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id)'],
columns: [
_column_36,
_column_37,
_column_38,
_column_39,
_column_40,
_column_41,
_column_11,
_column_10,
_column_42,
_column_43,
_column_44,
_column_45,
_column_46,
_column_47,
_column_48,
_column_49,
_column_50,
_column_51,
_column_52,
_column_53,
_column_54,
_column_55,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape7 remoteAlbumAssetEntity = Shape7(
source: i0.VersionedTable(
entityName: 'remote_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_36, _column_60],
attachedDatabase: database,
),
alias: null,
);
late final Shape10 remoteAlbumUserEntity = Shape10(
source: i0.VersionedTable(
entityName: 'remote_album_user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
columns: [_column_60, _column_25, _column_61],
attachedDatabase: database,
),
alias: null,
);
late final Shape11 memoryEntity = Shape11(
source: i0.VersionedTable(
entityName: 'memory_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_18,
_column_15,
_column_8,
_column_62,
_column_63,
_column_64,
_column_65,
_column_66,
_column_67,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape12 memoryAssetEntity = Shape12(
source: i0.VersionedTable(
entityName: 'memory_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
columns: [_column_36, _column_68],
attachedDatabase: database,
),
alias: null,
);
late final Shape14 personEntity = Shape14(
source: i0.VersionedTable(
entityName: 'person_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_15,
_column_1,
_column_69,
_column_71,
_column_72,
_column_73,
_column_74,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape15 assetFaceEntity = Shape15(
source: i0.VersionedTable(
entityName: 'asset_face_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_36,
_column_76,
_column_77,
_column_78,
_column_79,
_column_80,
_column_81,
_column_82,
_column_83,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape18 storeEntity = Shape18(
source: i0.VersionedTable(
entityName: 'store_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [_column_87, _column_88, _column_89],
attachedDatabase: database,
),
alias: null,
);
late final Shape23 trashedLocalAssetEntity = Shape23(
source: i0.VersionedTable(
entityName: 'trashed_local_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id, album_id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_95,
_column_22,
_column_14,
_column_23,
],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLatLng = i1.Index(
'idx_lat_lng',
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
);
final i1.Index idxTrashedLocalAssetChecksum = i1.Index(
'idx_trashed_local_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)',
);
final i1.Index idxTrashedLocalAssetAlbum = i1.Index(
'idx_trashed_local_asset_album',
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)',
);
}
class Shape23 extends i0.VersionedTable {
Shape23({required super.source, required super.alias}) : super.aliased();
i1.GeneratedColumn<String> get name =>
columnsByName['name']! as i1.GeneratedColumn<String>;
i1.GeneratedColumn<int> get type =>
columnsByName['type']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<DateTime> get createdAt =>
columnsByName['created_at']! as i1.GeneratedColumn<DateTime>;
i1.GeneratedColumn<DateTime> get updatedAt =>
columnsByName['updated_at']! as i1.GeneratedColumn<DateTime>;
i1.GeneratedColumn<int> get width =>
columnsByName['width']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<int> get height =>
columnsByName['height']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<int> get durationInSeconds =>
columnsByName['duration_in_seconds']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<String> get id =>
columnsByName['id']! as i1.GeneratedColumn<String>;
i1.GeneratedColumn<String> get albumId =>
columnsByName['album_id']! as i1.GeneratedColumn<String>;
i1.GeneratedColumn<String> get checksum =>
columnsByName['checksum']! as i1.GeneratedColumn<String>;
i1.GeneratedColumn<bool> get isFavorite =>
columnsByName['is_favorite']! as i1.GeneratedColumn<bool>;
i1.GeneratedColumn<int> get orientation =>
columnsByName['orientation']! as i1.GeneratedColumn<int>;
}
i1.GeneratedColumn<String> _column_95(String aliasedName) =>
i1.GeneratedColumn<String>(
'album_id',
aliasedName,
false,
type: i1.DriftSqlType.string,
);
i0.MigrationStepWithVersion migrationSteps({ i0.MigrationStepWithVersion migrationSteps({
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2, required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3, required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
@@ -5049,6 +5497,7 @@ i0.MigrationStepWithVersion migrationSteps({
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10, required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11, required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12, required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
}) { }) {
return (currentVersion, database) async { return (currentVersion, database) async {
switch (currentVersion) { switch (currentVersion) {
@@ -5107,6 +5556,11 @@ i0.MigrationStepWithVersion migrationSteps({
final migrator = i1.Migrator(database, schema); final migrator = i1.Migrator(database, schema);
await from11To12(migrator, schema); await from11To12(migrator, schema);
return 12; return 12;
case 12:
final schema = Schema13(database: database);
final migrator = i1.Migrator(database, schema);
await from12To13(migrator, schema);
return 13;
default: default:
throw ArgumentError.value('Unknown migration from $currentVersion'); throw ArgumentError.value('Unknown migration from $currentVersion');
} }
@@ -5125,6 +5579,7 @@ i1.OnUpgrade stepByStep({
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10, required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11, required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12, required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
}) => i0.VersionedSchema.stepByStepHelper( }) => i0.VersionedSchema.stepByStepHelper(
step: migrationSteps( step: migrationSteps(
from1To2: from1To2, from1To2: from1To2,
@@ -5138,5 +5593,6 @@ i1.OnUpgrade stepByStep({
from9To10: from9To10, from9To10: from9To10,
from10To11: from10To11, from10To11: from10To11,
from11To12: from11To12, from11To12: from11To12,
from12To13: from12To13,
), ),
); );

View File

@@ -1,4 +1,6 @@
import 'package:collection/collection.dart';
import 'package:drift/drift.dart'; import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart'; import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart'; import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart'; import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
@@ -8,6 +10,7 @@ import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
class DriftLocalAssetRepository extends DriftDatabaseRepository { class DriftLocalAssetRepository extends DriftDatabaseRepository {
final Drift _db; final Drift _db;
const DriftLocalAssetRepository(this._db) : super(_db); const DriftLocalAssetRepository(this._db) : super(_db);
SingleOrNullSelectable<LocalAsset?> _assetSelectable(String id) { SingleOrNullSelectable<LocalAsset?> _assetSelectable(String id) {
@@ -95,4 +98,32 @@ class DriftLocalAssetRepository extends DriftDatabaseRepository {
} }
return query.map((localAlbum) => localAlbum.toDto()).get(); return query.map((localAlbum) => localAlbum.toDto()).get();
} }
Future<Map<String, List<LocalAsset>>> getAssetsFromBackupAlbums(Iterable<String> checksums) async {
if (checksums.isEmpty) {
return {};
}
final result = <String, List<LocalAsset>>{};
for (final slice in checksums.toSet().slices(kDriftMaxChunk)) {
final rows =
await (_db.select(_db.localAlbumAssetEntity).join([
innerJoin(_db.localAlbumEntity, _db.localAlbumAssetEntity.albumId.equalsExp(_db.localAlbumEntity.id)),
innerJoin(_db.localAssetEntity, _db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id)),
])..where(
_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected) &
_db.localAssetEntity.checksum.isIn(slice),
))
.get();
for (final row in rows) {
final albumId = row.readTable(_db.localAlbumAssetEntity).albumId;
final assetData = row.readTable(_db.localAssetEntity);
final asset = assetData.toDto();
(result[albumId] ??= <LocalAsset>[]).add(asset);
}
}
return result;
}
} }

View File

@@ -12,6 +12,7 @@ import 'package:maplibre_gl/maplibre_gl.dart';
class RemoteAssetRepository extends DriftDatabaseRepository { class RemoteAssetRepository extends DriftDatabaseRepository {
final Drift _db; final Drift _db;
const RemoteAssetRepository(this._db) : super(_db); const RemoteAssetRepository(this._db) : super(_db);
/// For testing purposes /// For testing purposes

View File

@@ -0,0 +1,252 @@
import 'package:collection/collection.dart';
import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
typedef TrashedAsset = ({String albumId, LocalAsset asset});
class DriftTrashedLocalAssetRepository extends DriftDatabaseRepository {
final Drift _db;
const DriftTrashedLocalAssetRepository(this._db) : super(_db);
Future<void> updateHashes(Map<String, String> hashes) {
if (hashes.isEmpty) {
return Future.value();
}
return _db.batch((batch) async {
for (final entry in hashes.entries) {
batch.update(
_db.trashedLocalAssetEntity,
TrashedLocalAssetEntityCompanion(checksum: Value(entry.value)),
where: (e) => e.id.equals(entry.key),
);
}
});
}
Future<List<LocalAsset>> getAssetsToHash(Iterable<String> albumIds) {
final query = _db.trashedLocalAssetEntity.select()..where((r) => r.albumId.isIn(albumIds) & r.checksum.isNull());
return query.map((row) => row.toLocalAsset()).get();
}
Future<Iterable<LocalAsset>> getToRestore() async {
final selectedAlbumIds = (_db.selectOnly(_db.localAlbumEntity)
..addColumns([_db.localAlbumEntity.id])
..where(_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected)));
final rows =
await (_db.select(_db.trashedLocalAssetEntity).join([
innerJoin(
_db.remoteAssetEntity,
_db.remoteAssetEntity.checksum.equalsExp(_db.trashedLocalAssetEntity.checksum),
),
])..where(
_db.trashedLocalAssetEntity.albumId.isInQuery(selectedAlbumIds) &
_db.remoteAssetEntity.deletedAt.isNull(),
))
.get();
return rows.map((result) => result.readTable(_db.trashedLocalAssetEntity).toLocalAsset());
}
/// Applies resulted snapshot of trashed assets:
/// - upserts incoming rows
/// - deletes rows that are not present in the snapshot
Future<void> processTrashSnapshot(Iterable<TrashedAsset> trashedAssets) async {
if (trashedAssets.isEmpty) {
await _db.delete(_db.trashedLocalAssetEntity).go();
return;
}
final assetIds = trashedAssets.map((e) => e.asset.id).toSet();
Map<String, String> localChecksumById = await _getCachedChecksums(assetIds);
return _db.transaction(() async {
await _db.batch((batch) {
for (final item in trashedAssets) {
final effectiveChecksum = localChecksumById[item.asset.id] ?? item.asset.checksum;
final companion = TrashedLocalAssetEntityCompanion.insert(
id: item.asset.id,
albumId: item.albumId,
checksum: Value(effectiveChecksum),
name: item.asset.name,
type: item.asset.type,
createdAt: Value(item.asset.createdAt),
updatedAt: Value(item.asset.updatedAt),
width: Value(item.asset.width),
height: Value(item.asset.height),
durationInSeconds: Value(item.asset.durationInSeconds),
isFavorite: Value(item.asset.isFavorite),
orientation: Value(item.asset.orientation),
);
batch.insert<$TrashedLocalAssetEntityTable, TrashedLocalAssetEntityData>(
_db.trashedLocalAssetEntity,
companion,
onConflict: DoUpdate((_) => companion, where: (old) => old.updatedAt.isNotValue(item.asset.updatedAt)),
);
}
});
if (assetIds.length <= kDriftMaxChunk) {
await (_db.delete(_db.trashedLocalAssetEntity)..where((row) => row.id.isNotIn(assetIds))).go();
} else {
final existingIds = await (_db.selectOnly(
_db.trashedLocalAssetEntity,
)..addColumns([_db.trashedLocalAssetEntity.id])).map((r) => r.read(_db.trashedLocalAssetEntity.id)!).get();
final idToDelete = existingIds.where((id) => !assetIds.contains(id));
for (final slice in idToDelete.slices(kDriftMaxChunk)) {
await (_db.delete(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice))).go();
}
}
});
}
Stream<int> watchCount() {
return (_db.selectOnly(_db.trashedLocalAssetEntity)..addColumns([_db.trashedLocalAssetEntity.id.count()]))
.watchSingle()
.map((row) => row.read<int>(_db.trashedLocalAssetEntity.id.count()) ?? 0);
}
Stream<int> watchHashedCount() {
return (_db.selectOnly(_db.trashedLocalAssetEntity)
..addColumns([_db.trashedLocalAssetEntity.id.count()])
..where(_db.trashedLocalAssetEntity.checksum.isNotNull()))
.watchSingle()
.map((row) => row.read<int>(_db.trashedLocalAssetEntity.id.count()) ?? 0);
}
Future<void> trashLocalAsset(Map<String, List<LocalAsset>> assetsByAlbums) async {
if (assetsByAlbums.isEmpty) {
return;
}
final companions = <TrashedLocalAssetEntityCompanion>[];
final idToDelete = <String>{};
for (final entry in assetsByAlbums.entries) {
for (final asset in entry.value) {
idToDelete.add(asset.id);
companions.add(
TrashedLocalAssetEntityCompanion(
id: Value(asset.id),
name: Value(asset.name),
albumId: Value(entry.key),
checksum: Value(asset.checksum),
type: Value(asset.type),
width: Value(asset.width),
height: Value(asset.height),
durationInSeconds: Value(asset.durationInSeconds),
isFavorite: Value(asset.isFavorite),
orientation: Value(asset.orientation),
createdAt: Value(asset.createdAt),
updatedAt: Value(asset.updatedAt),
),
);
}
}
await _db.transaction(() async {
for (final companion in companions) {
await _db.into(_db.trashedLocalAssetEntity).insertOnConflictUpdate(companion);
}
for (final slice in idToDelete.slices(kDriftMaxChunk)) {
await (_db.delete(_db.localAssetEntity)..where((t) => t.id.isIn(slice))).go();
}
});
}
Future<void> applyRestoredAssets(List<String> idList) async {
if (idList.isEmpty) {
return;
}
final trashedAssets = <TrashedLocalAssetEntityData>[];
for (final slice in idList.slices(kDriftMaxChunk)) {
final q = _db.select(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice));
trashedAssets.addAll(await q.get());
}
if (trashedAssets.isEmpty) {
return;
}
final companions = trashedAssets.map((e) {
return LocalAssetEntityCompanion.insert(
id: e.id,
name: e.name,
type: e.type,
createdAt: Value(e.createdAt),
updatedAt: Value(e.updatedAt),
width: Value(e.width),
height: Value(e.height),
durationInSeconds: Value(e.durationInSeconds),
checksum: Value(e.checksum),
isFavorite: Value(e.isFavorite),
orientation: Value(e.orientation),
);
});
await _db.transaction(() async {
for (final companion in companions) {
await _db.into(_db.localAssetEntity).insertOnConflictUpdate(companion);
}
for (final slice in idList.slices(kDriftMaxChunk)) {
await (_db.delete(_db.trashedLocalAssetEntity)..where((t) => t.id.isIn(slice))).go();
}
});
}
Future<Map<String, List<LocalAsset>>> getToTrash() async {
final result = <String, List<LocalAsset>>{};
final rows =
await (_db.select(_db.localAlbumAssetEntity).join([
innerJoin(_db.localAlbumEntity, _db.localAlbumAssetEntity.albumId.equalsExp(_db.localAlbumEntity.id)),
innerJoin(_db.localAssetEntity, _db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id)),
leftOuterJoin(
_db.remoteAssetEntity,
_db.remoteAssetEntity.checksum.equalsExp(_db.localAssetEntity.checksum),
),
])..where(
_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected) &
_db.remoteAssetEntity.deletedAt.isNotNull(),
))
.get();
for (final row in rows) {
final albumId = row.readTable(_db.localAlbumAssetEntity).albumId;
final asset = row.readTable(_db.localAssetEntity).toDto();
(result[albumId] ??= <LocalAsset>[]).add(asset);
}
return result;
}
//attempt to reuse existing checksums
Future<Map<String, String>> _getCachedChecksums(Set<String> assetIds) async {
final localChecksumById = <String, String>{};
for (final slice in assetIds.slices(kDriftMaxChunk)) {
final rows =
await (_db.selectOnly(_db.localAssetEntity)
..where(_db.localAssetEntity.id.isIn(slice) & _db.localAssetEntity.checksum.isNotNull())
..addColumns([_db.localAssetEntity.id, _db.localAssetEntity.checksum]))
.get();
for (final r in rows) {
localChecksumById[r.read(_db.localAssetEntity.id)!] = r.read(_db.localAssetEntity.checksum)!;
}
}
return localChecksumById;
}
}

View File

@@ -65,7 +65,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
if (Store.isBetaTimelineEnabled) { if (Store.isBetaTimelineEnabled) {
bool syncSuccess = false; bool syncSuccess = false;
await Future.wait([ await Future.wait([
backgroundManager.syncLocal(), backgroundManager.syncLocal(full: true),
backgroundManager.syncRemote().then((success) => syncSuccess = success), backgroundManager.syncRemote().then((success) => syncSuccess = success),
]); ]);

View File

@@ -562,4 +562,32 @@ class NativeSyncApi {
return; return;
} }
} }
Future<Map<String, List<PlatformAsset>>> getTrashedAssets() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else if (pigeonVar_replyList[0] == null) {
throw PlatformException(
code: 'null-error',
message: 'Host platform returned null value for non-null return value.',
);
} else {
return (pigeonVar_replyList[0] as Map<Object?, Object?>?)!.cast<String, List<PlatformAsset>>();
}
}
} }

View File

@@ -3,21 +3,13 @@ import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store; import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart'; import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart'; import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart'; import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/datetime_extensions.dart'; import 'package:immich_mobile/widgets/activities/comment_bubble.dart';
import 'package:immich_mobile/models/activities/activity.model.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/like_activity_action_button.widget.dart'; import 'package:immich_mobile/presentation/widgets/action_buttons/like_activity_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart'; import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart';
import 'package:immich_mobile/providers/activity.provider.dart'; import 'package:immich_mobile/providers/activity.provider.dart';
import 'package:immich_mobile/providers/activity_service.provider.dart';
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart'; import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/widgets/activities/dismissible_activity.dart';
import 'package:immich_mobile/widgets/common/user_circle_avatar.dart';
@RoutePage() @RoutePage()
class DriftActivitiesPage extends HookConsumerWidget { class DriftActivitiesPage extends HookConsumerWidget {
@@ -27,10 +19,8 @@ class DriftActivitiesPage extends HookConsumerWidget {
@override @override
Widget build(BuildContext context, WidgetRef ref) { Widget build(BuildContext context, WidgetRef ref) {
final asset = ref.read(currentAssetNotifier) as RemoteAsset?; final activityNotifier = ref.read(albumActivityProvider(album.id).notifier);
final activities = ref.watch(albumActivityProvider(album.id));
final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier);
final activities = ref.watch(albumActivityProvider(album.id, asset?.id));
final listViewScrollController = useScrollController(); final listViewScrollController = useScrollController();
void scrollToBottom() { void scrollToBottom() {
@@ -46,7 +36,7 @@ class DriftActivitiesPage extends HookConsumerWidget {
overrides: [currentRemoteAlbumScopedProvider.overrideWithValue(album)], overrides: [currentRemoteAlbumScopedProvider.overrideWithValue(album)],
child: Scaffold( child: Scaffold(
appBar: AppBar( appBar: AppBar(
title: asset == null ? Text(album.name) : null, title: Text(album.name),
actions: [const LikeActivityActionButton(menuItem: true)], actions: [const LikeActivityActionButton(menuItem: true)],
actionsPadding: const EdgeInsets.only(right: 8), actionsPadding: const EdgeInsets.only(right: 8),
), ),
@@ -57,7 +47,7 @@ class DriftActivitiesPage extends HookConsumerWidget {
activityWidgets.add( activityWidgets.add(
Padding( Padding(
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 6), padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 6),
child: _CommentBubble(activity: activity), child: CommentBubble(activity: activity),
), ),
); );
} }
@@ -91,139 +81,3 @@ class DriftActivitiesPage extends HookConsumerWidget {
); );
} }
} }
class _CommentBubble extends ConsumerWidget {
final Activity activity;
const _CommentBubble({required this.activity});
@override
Widget build(BuildContext context, WidgetRef ref) {
final user = ref.watch(currentUserProvider);
final album = ref.watch(currentRemoteAlbumProvider)!;
final isOwn = activity.user.id == user?.id;
final canDelete = isOwn || album.ownerId == user?.id;
final hasAsset = activity.assetId != null && activity.assetId!.isNotEmpty;
final isLike = activity.type == ActivityType.like;
final bgColor = isOwn ? context.colorScheme.primaryContainer : context.colorScheme.surfaceContainer;
final activityNotifier = ref.read(albumActivityProvider(album.id, activity.assetId).notifier);
Future<void> openAssetViewer() async {
final activityService = ref.read(activityServiceProvider);
final route = await activityService.buildAssetViewerRoute(activity.assetId!, ref);
if (route != null) await context.pushRoute(route);
}
Widget avatar() {
if (isOwn) {
return const SizedBox.shrink();
}
return UserCircleAvatar(user: activity.user, size: 28, radius: 14);
}
Widget? thumbnail() {
if (!hasAsset) {
return null;
}
return ConstrainedBox(
constraints: const BoxConstraints(maxWidth: 150, maxHeight: 150),
child: Stack(
children: [
GestureDetector(
onTap: openAssetViewer,
child: ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(10)),
child: Image(
image: ImmichRemoteThumbnailProvider(assetId: activity.assetId!),
fit: BoxFit.cover,
),
),
),
if (isLike)
Positioned(
right: 6,
bottom: 6,
child: Container(
padding: const EdgeInsets.all(4),
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
),
),
],
),
);
}
// Likes Album widget (for likes without asset)
Widget? likesToAlbum() {
if (!isLike || hasAsset) {
return null;
}
return Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(color: Colors.white.withValues(alpha: 0.7), shape: BoxShape.circle),
child: Icon(Icons.favorite, color: Colors.red[600], size: 18),
);
}
Widget? commentBubble() {
if (activity.comment == null || activity.comment!.isEmpty) {
return null;
}
return ConstrainedBox(
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.5),
child: Container(
padding: const EdgeInsets.all(10),
decoration: BoxDecoration(color: bgColor, borderRadius: const BorderRadius.all(Radius.circular(12))),
child: Text(
activity.comment ?? '',
style: context.textTheme.bodyLarge?.copyWith(color: context.colorScheme.onSurface),
),
),
);
}
// Combined content widgets
final List<Widget> contentChildren = [thumbnail(), likesToAlbum(), commentBubble()].whereType<Widget>().toList();
return DismissibleActivity(
onDismiss: canDelete ? (id) async => await activityNotifier.removeActivity(id) : null,
activity.id,
Align(
alignment: isOwn ? Alignment.centerRight : Alignment.centerLeft,
child: ConstrainedBox(
constraints: BoxConstraints(maxWidth: MediaQuery.of(context).size.width * 0.86),
child: Container(
margin: const EdgeInsets.symmetric(vertical: 6, horizontal: 10),
child: Row(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
if (!isOwn) ...[avatar(), const SizedBox(width: 8)],
// Content column
Column(
crossAxisAlignment: isOwn ? CrossAxisAlignment.end : CrossAxisAlignment.start,
children: [
...contentChildren.map((w) => Padding(padding: const EdgeInsets.only(bottom: 8.0), child: w)),
Text(
'${activity.user.name}${activity.createdAt.timeAgo()}',
style: context.textTheme.labelMedium?.copyWith(
color: context.colorScheme.onSurface.withValues(alpha: 0.6),
),
),
],
),
if (isOwn) const SizedBox(width: 8),
],
),
),
),
),
);
}
}

View File

@@ -15,6 +15,7 @@ import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/models/search/search_filter.model.dart'; import 'package:immich_mobile/models/search/search_filter.model.dart';
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart'; import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/general_bottom_sheet.widget.dart'; import 'package:immich_mobile/presentation/widgets/bottom_sheet/general_bottom_sheet.widget.dart';
import 'package:immich_mobile/presentation/widgets/search/quick_date_picker.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart'; import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart'; import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/search/search_input_focus.provider.dart'; import 'package:immich_mobile/providers/search/search_input_focus.provider.dart';
@@ -54,6 +55,7 @@ class DriftSearchPage extends HookConsumerWidget {
); );
final previousFilter = useState<SearchFilter?>(null); final previousFilter = useState<SearchFilter?>(null);
final dateInputFilter = useState<DateFilterInputModel?>(null);
final peopleCurrentFilterWidget = useState<Widget?>(null); final peopleCurrentFilterWidget = useState<Widget?>(null);
final dateRangeCurrentFilterWidget = useState<Widget?>(null); final dateRangeCurrentFilterWidget = useState<Widget?>(null);
@@ -245,19 +247,54 @@ class DriftSearchPage extends HookConsumerWidget {
); );
} }
datePicked(DateFilterInputModel? selectedDate) {
dateInputFilter.value = selectedDate;
if (selectedDate == null) {
filter.value = filter.value.copyWith(date: SearchDateFilter());
dateRangeCurrentFilterWidget.value = null;
unawaited(search());
return;
}
final date = selectedDate.asDateTimeRange();
filter.value = filter.value.copyWith(
date: SearchDateFilter(
takenAfter: date.start,
takenBefore: date.end.add(const Duration(hours: 23, minutes: 59, seconds: 59)),
),
);
dateRangeCurrentFilterWidget.value = Text(
selectedDate.asHumanReadable(context),
style: context.textTheme.labelLarge,
);
unawaited(search());
}
showDatePicker() async { showDatePicker() async {
final firstDate = DateTime(1900); final firstDate = DateTime(1900);
final lastDate = DateTime.now(); final lastDate = DateTime.now();
var dateRange = DateTimeRange(
start: filter.value.date.takenAfter ?? lastDate,
end: filter.value.date.takenBefore ?? lastDate,
);
// datePicked() may increase the date, this will make the date picker fail an assertion
// Fixup the end date to be at most now.
if (dateRange.end.isAfter(lastDate)) {
dateRange = DateTimeRange(start: dateRange.start, end: lastDate);
}
final date = await showDateRangePicker( final date = await showDateRangePicker(
context: context, context: context,
firstDate: firstDate, firstDate: firstDate,
lastDate: lastDate, lastDate: lastDate,
currentDate: DateTime.now(), currentDate: DateTime.now(),
initialDateRange: DateTimeRange( initialDateRange: dateRange,
start: filter.value.date.takenAfter ?? lastDate,
end: filter.value.date.takenBefore ?? lastDate,
),
helpText: 'search_filter_date_title'.t(context: context), helpText: 'search_filter_date_title'.t(context: context),
cancelText: 'cancel'.t(context: context), cancelText: 'cancel'.t(context: context),
confirmText: 'select'.t(context: context), confirmText: 'select'.t(context: context),
@@ -271,40 +308,32 @@ class DriftSearchPage extends HookConsumerWidget {
); );
if (date == null) { if (date == null) {
filter.value = filter.value.copyWith(date: SearchDateFilter()); datePicked(null);
dateRangeCurrentFilterWidget.value = null;
unawaited(search());
return;
}
filter.value = filter.value.copyWith(
date: SearchDateFilter(
takenAfter: date.start,
takenBefore: date.end.add(const Duration(hours: 23, minutes: 59, seconds: 59)),
),
);
// If date range is less than 24 hours, set the end date to the end of the day
if (date.end.difference(date.start).inHours < 24) {
dateRangeCurrentFilterWidget.value = Text(
DateFormat.yMMMd().format(date.start.toLocal()),
style: context.textTheme.labelLarge,
);
} else { } else {
dateRangeCurrentFilterWidget.value = Text( datePicked(CustomDateFilter.fromRange(date));
'search_filter_date_interval'.t( }
context: context, }
args: {
"start": DateFormat.yMMMd().format(date.start.toLocal()), showQuickDatePicker() {
"end": DateFormat.yMMMd().format(date.end.toLocal()), showFilterBottomSheet(
context: context,
child: FilterBottomSheetScaffold(
title: "pick_date_range".tr(),
expanded: true,
onClear: () => datePicked(null),
child: QuickDatePicker(
currentInput: dateInputFilter.value,
onRequestPicker: () {
context.pop();
showDatePicker();
},
onSelect: (date) {
context.pop();
datePicked(date);
}, },
), ),
style: context.textTheme.labelLarge, ),
); );
}
unawaited(search());
} }
// MEDIA PICKER // MEDIA PICKER
@@ -589,7 +618,7 @@ class DriftSearchPage extends HookConsumerWidget {
), ),
SearchFilterChip( SearchFilterChip(
icon: Icons.date_range_outlined, icon: Icons.date_range_outlined,
onTap: showDatePicker, onTap: showQuickDatePicker,
label: 'search_filter_date'.t(context: context), label: 'search_filter_date'.t(context: context),
currentFilter: dateRangeCurrentFilterWidget.value, currentFilter: dateRangeCurrentFilterWidget.value,
), ),

View File

@@ -0,0 +1,191 @@
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/unarchive_action_button.widget.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/presentation/widgets/album/album_selector.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/archive_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/move_to_lock_folder_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
enum AddToMenuItem { album, archive, unarchive, lockedFolder }
class AddActionButton extends ConsumerWidget {
const AddActionButton({super.key});
Future<void> _showAddOptions(BuildContext context, WidgetRef ref) async {
final asset = ref.read(currentAssetNotifier);
if (asset == null) return;
final user = ref.read(currentUserProvider);
final isOwner = asset is RemoteAsset && asset.ownerId == user?.id;
final isInLockedView = ref.watch(inLockedViewProvider);
final isArchived = asset is RemoteAsset && asset.visibility == AssetVisibility.archive;
final hasRemote = asset is RemoteAsset;
final showArchive = isOwner && !isInLockedView && hasRemote && !isArchived;
final showUnarchive = isOwner && !isInLockedView && hasRemote && isArchived;
final menuItemHeight = 30.0;
final List<PopupMenuEntry<AddToMenuItem>> items = [
PopupMenuItem(
enabled: false,
textStyle: context.textTheme.labelMedium,
height: 40,
child: Text("add_to_bottom_bar".tr()),
),
PopupMenuItem(
height: menuItemHeight,
value: AddToMenuItem.album,
child: ListTile(leading: const Icon(Icons.photo_album_outlined), title: Text("album".tr())),
),
const PopupMenuDivider(),
PopupMenuItem(enabled: false, textStyle: context.textTheme.labelMedium, height: 40, child: Text("move_to".tr())),
if (isOwner) ...[
if (showArchive)
PopupMenuItem(
height: menuItemHeight,
value: AddToMenuItem.archive,
child: ListTile(leading: const Icon(Icons.archive_outlined), title: Text("archive".tr())),
),
if (showUnarchive)
PopupMenuItem(
height: menuItemHeight,
value: AddToMenuItem.unarchive,
child: ListTile(leading: const Icon(Icons.unarchive_outlined), title: Text("unarchive".tr())),
),
PopupMenuItem(
height: menuItemHeight,
value: AddToMenuItem.lockedFolder,
child: ListTile(leading: const Icon(Icons.lock_outline), title: Text("locked_folder".tr())),
),
],
];
final AddToMenuItem? selected = await showMenu<AddToMenuItem>(
context: context,
color: context.themeData.scaffoldBackgroundColor,
position: _menuPosition(context),
items: items,
);
if (selected == null) {
return;
}
switch (selected) {
case AddToMenuItem.album:
_openAlbumSelector(context, ref);
break;
case AddToMenuItem.archive:
await performArchiveAction(context, ref, source: ActionSource.viewer);
break;
case AddToMenuItem.unarchive:
await performUnArchiveAction(context, ref, source: ActionSource.viewer);
break;
case AddToMenuItem.lockedFolder:
await performMoveToLockFolderAction(context, ref, source: ActionSource.viewer);
break;
}
}
RelativeRect _menuPosition(BuildContext context) {
final renderObject = context.findRenderObject();
if (renderObject is! RenderBox) {
return RelativeRect.fill;
}
final size = renderObject.size;
final position = renderObject.localToGlobal(Offset.zero);
return RelativeRect.fromLTRB(position.dx, position.dy - size.height - 200, position.dx + size.width, position.dy);
}
void _openAlbumSelector(BuildContext context, WidgetRef ref) {
final currentAsset = ref.read(currentAssetNotifier);
if (currentAsset == null) {
ImmichToast.show(context: context, msg: "Cannot load asset information.", toastType: ToastType.error);
return;
}
final List<Widget> slivers = [
AlbumSelector(onAlbumSelected: (album) => _addCurrentAssetToAlbum(context, ref, album)),
];
showModalBottomSheet(
context: context,
isScrollControlled: true,
backgroundColor: Colors.transparent,
builder: (_) {
return BaseBottomSheet(
actions: const [],
slivers: slivers,
initialChildSize: 0.6,
minChildSize: 0.3,
maxChildSize: 0.95,
expand: false,
backgroundColor: context.isDarkTheme ? Colors.black : Colors.white,
);
},
);
}
Future<void> _addCurrentAssetToAlbum(BuildContext context, WidgetRef ref, RemoteAlbum album) async {
final latest = ref.read(currentAssetNotifier);
if (latest == null) {
ImmichToast.show(context: context, msg: "Cannot load asset information.", toastType: ToastType.error);
return;
}
final addedCount = await ref.read(remoteAlbumProvider.notifier).addAssets(album.id, [latest.remoteId!]);
if (!context.mounted) {
return;
}
if (addedCount == 0) {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_already_exists'.tr(namedArgs: {'album': album.name}),
);
} else {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_added'.tr(namedArgs: {'album': album.name}),
);
}
if (!context.mounted) {
return;
}
await Navigator.of(context).maybePop();
}
@override
Widget build(BuildContext context, WidgetRef ref) {
final asset = ref.watch(currentAssetNotifier);
if (asset == null) {
return const SizedBox.shrink();
}
return Builder(
builder: (buttonContext) {
return BaseActionButton(
iconData: Icons.add,
label: "add_to_bottom_bar".tr(),
onPressed: () => _showAddOptions(buttonContext, ref),
);
},
);
}
}

View File

@@ -10,33 +10,36 @@ import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart'; import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart'; import 'package:immich_mobile/widgets/common/immich_toast.dart';
// used to allow performing archive action from different sources (without duplicating code)
Future<void> performArchiveAction(BuildContext context, WidgetRef ref, {required ActionSource source}) async {
if (!context.mounted) return;
final result = await ref.read(actionProvider.notifier).archive(source);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {
EventStream.shared.emit(const ViewerReloadAssetEvent());
}
final successMessage = 'archive_action_prompt'.t(context: context, args: {'count': result.count.toString()});
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
}
class ArchiveActionButton extends ConsumerWidget { class ArchiveActionButton extends ConsumerWidget {
final ActionSource source; final ActionSource source;
const ArchiveActionButton({super.key, required this.source}); const ArchiveActionButton({super.key, required this.source});
void _onTap(BuildContext context, WidgetRef ref) async { Future<void> _onTap(BuildContext context, WidgetRef ref) async {
if (!context.mounted) { await performArchiveAction(context, ref, source: source);
return;
}
final result = await ref.read(actionProvider.notifier).archive(source);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {
EventStream.shared.emit(const ViewerReloadAssetEvent());
}
final successMessage = 'archive_action_prompt'.t(context: context, args: {'count': result.count.toString()});
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
} }
@override @override

View File

@@ -10,36 +10,39 @@ import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart'; import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart'; import 'package:immich_mobile/widgets/common/immich_toast.dart';
// Reusable helper: move to locked folder from any source (e.g called from menu)
Future<void> performMoveToLockFolderAction(BuildContext context, WidgetRef ref, {required ActionSource source}) async {
if (!context.mounted) return;
final result = await ref.read(actionProvider.notifier).moveToLockFolder(source);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {
EventStream.shared.emit(const ViewerReloadAssetEvent());
}
final successMessage = 'move_to_lock_folder_action_prompt'.t(
context: context,
args: {'count': result.count.toString()},
);
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
}
class MoveToLockFolderActionButton extends ConsumerWidget { class MoveToLockFolderActionButton extends ConsumerWidget {
final ActionSource source; final ActionSource source;
const MoveToLockFolderActionButton({super.key, required this.source}); const MoveToLockFolderActionButton({super.key, required this.source});
void _onTap(BuildContext context, WidgetRef ref) async { Future<void> _onTap(BuildContext context, WidgetRef ref) async {
if (!context.mounted) { await performMoveToLockFolderAction(context, ref, source: source);
return;
}
final result = await ref.read(actionProvider.notifier).moveToLockFolder(source);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {
EventStream.shared.emit(const ViewerReloadAssetEvent());
}
final successMessage = 'move_to_lock_folder_action_prompt'.t(
context: context,
args: {'count': result.count.toString()},
);
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
} }
@override @override

View File

@@ -1,3 +1,5 @@
// dart
// File: `lib/presentation/widgets/action_buttons/unarchive_action_button.widget.dart`
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:fluttertoast/fluttertoast.dart'; import 'package:fluttertoast/fluttertoast.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart';
@@ -7,30 +9,39 @@ import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_bu
import 'package:immich_mobile/providers/infrastructure/action.provider.dart'; import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart'; import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart'; import 'package:immich_mobile/widgets/common/immich_toast.dart';
import 'package:immich_mobile/domain/utils/event_stream.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
// used to allow performing unarchive action from different sources (without duplicating code)
Future<void> performUnArchiveAction(BuildContext context, WidgetRef ref, {required ActionSource source}) async {
if (!context.mounted) return;
final result = await ref.read(actionProvider.notifier).unArchive(source);
ref.read(multiSelectProvider.notifier).reset();
if (source == ActionSource.viewer) {
EventStream.shared.emit(const ViewerReloadAssetEvent());
}
final successMessage = 'unarchive_action_prompt'.t(context: context, args: {'count': result.count.toString()});
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
}
class UnArchiveActionButton extends ConsumerWidget { class UnArchiveActionButton extends ConsumerWidget {
final ActionSource source; final ActionSource source;
const UnArchiveActionButton({super.key, required this.source}); const UnArchiveActionButton({super.key, required this.source});
void _onTap(BuildContext context, WidgetRef ref) async { Future<void> _onTap(BuildContext context, WidgetRef ref) async {
if (!context.mounted) { await performUnArchiveAction(context, ref, source: source);
return;
}
final result = await ref.read(actionProvider.notifier).unArchive(source);
ref.read(multiSelectProvider.notifier).reset();
final successMessage = 'unarchive_action_prompt'.t(context: context, args: {'count': result.count.toString()});
if (context.mounted) {
ImmichToast.show(
context: context,
msg: result.success ? successMessage : 'scaffold_body_error_occurred'.t(context: context),
gravity: ToastGravity.BOTTOM,
toastType: result.success ? ToastType.success : ToastType.error,
);
}
} }
@override @override

View File

@@ -3,14 +3,12 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart'; import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart'; import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart'; import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/widgets/activities/comment_bubble.dart';
import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart'; import 'package:immich_mobile/presentation/widgets/album/drift_activity_text_field.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart'; import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
import 'package:immich_mobile/providers/activity.provider.dart'; import 'package:immich_mobile/providers/activity.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart'; import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart'; import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/widgets/activities/activity_tile.dart';
import 'package:immich_mobile/widgets/activities/dismissible_activity.dart';
class ActivitiesBottomSheet extends HookConsumerWidget { class ActivitiesBottomSheet extends HookConsumerWidget {
final DraggableScrollableController controller; final DraggableScrollableController controller;
@@ -28,7 +26,6 @@ class ActivitiesBottomSheet extends HookConsumerWidget {
Widget build(BuildContext context, WidgetRef ref) { Widget build(BuildContext context, WidgetRef ref) {
final album = ref.watch(currentRemoteAlbumProvider)!; final album = ref.watch(currentRemoteAlbumProvider)!;
final asset = ref.watch(currentAssetNotifier) as RemoteAsset?; final asset = ref.watch(currentAssetNotifier) as RemoteAsset?;
final user = ref.watch(currentUserProvider);
final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier); final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier);
final activities = ref.watch(albumActivityProvider(album.id, asset?.id)); final activities = ref.watch(albumActivityProvider(album.id, asset?.id));
@@ -47,16 +44,9 @@ class ActivitiesBottomSheet extends HookConsumerWidget {
return const SizedBox.shrink(); return const SizedBox.shrink();
} }
final activity = data[data.length - 1 - index]; final activity = data[data.length - 1 - index];
final canDelete = activity.user.id == user?.id || album.ownerId == user?.id;
return Padding( return Padding(
padding: const EdgeInsets.symmetric(vertical: 1), padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
child: DismissibleActivity( child: CommentBubble(activity: activity, isAssetActivity: true),
activity.id,
ActivityTile(activity, isBottomSheet: true),
onDismiss: canDelete
? (activityId) async => await activityNotifier.removeActivity(activity.id)
: null,
),
); );
}, childCount: data.length + 1), }, childCount: data.length + 1),
); );

View File

@@ -627,10 +627,10 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
// Rebuild the widget when the asset viewer state changes // Rebuild the widget when the asset viewer state changes
// Using multiple selectors to avoid unnecessary rebuilds for other state changes // Using multiple selectors to avoid unnecessary rebuilds for other state changes
ref.watch(assetViewerProvider.select((s) => s.showingBottomSheet)); ref.watch(assetViewerProvider.select((s) => s.showingBottomSheet));
ref.watch(assetViewerProvider.select((s) => s.showingControls));
ref.watch(assetViewerProvider.select((s) => s.backgroundOpacity)); ref.watch(assetViewerProvider.select((s) => s.backgroundOpacity));
ref.watch(assetViewerProvider.select((s) => s.stackIndex)); ref.watch(assetViewerProvider.select((s) => s.stackIndex));
ref.watch(isPlayingMotionVideoProvider); ref.watch(isPlayingMotionVideoProvider);
final showingControls = ref.watch(assetViewerProvider.select((s) => s.showingControls));
// Listen for casting changes and send initial asset to the cast provider // Listen for casting changes and send initial asset to the cast provider
ref.listen(castProvider.select((value) => value.isCasting), (_, isCasting) async { ref.listen(castProvider.select((value) => value.isCasting), (_, isCasting) async {
@@ -663,7 +663,14 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
appBar: const ViewerTopAppBar(), appBar: const ViewerTopAppBar(),
extendBody: true, extendBody: true,
extendBodyBehindAppBar: true, extendBodyBehindAppBar: true,
floatingActionButton: const DownloadStatusFloatingButton(), floatingActionButton: IgnorePointer(
ignoring: !showingControls,
child: AnimatedOpacity(
opacity: showingControls ? 1.0 : 0.0,
duration: Durations.short2,
child: const DownloadStatusFloatingButton(),
),
),
body: Stack( body: Stack(
children: [ children: [
PhotoViewGallery.builder( PhotoViewGallery.builder(

View File

@@ -127,13 +127,18 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
if (exifInfo == null) { if (exifInfo == null) {
return null; return null;
} }
final fNumber = exifInfo.fNumber.isNotEmpty ? 'ƒ/${exifInfo.fNumber}' : null;
final exposureTime = exifInfo.exposureTime.isNotEmpty ? exifInfo.exposureTime : null; final exposureTime = exifInfo.exposureTime.isNotEmpty ? exifInfo.exposureTime : null;
final focalLength = exifInfo.focalLength.isNotEmpty ? '${exifInfo.focalLength} mm' : null;
final iso = exifInfo.iso != null ? 'ISO ${exifInfo.iso}' : null; final iso = exifInfo.iso != null ? 'ISO ${exifInfo.iso}' : null;
return [exposureTime, iso].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator);
}
return [fNumber, exposureTime, focalLength, iso].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator); String? _getLensInfoSubtitle(ExifInfo? exifInfo) {
if (exifInfo == null) {
return null;
}
final fNumber = exifInfo.fNumber.isNotEmpty ? 'ƒ/${exifInfo.fNumber}' : null;
final focalLength = exifInfo.focalLength.isNotEmpty ? '${exifInfo.focalLength} mm' : null;
return [fNumber, focalLength].where((spec) => spec != null && spec.isNotEmpty).join(_kSeparator);
} }
Future<void> _editDateTime(BuildContext context, WidgetRef ref) async { Future<void> _editDateTime(BuildContext context, WidgetRef ref) async {
@@ -141,20 +146,20 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
} }
Widget _buildAppearsInList(WidgetRef ref, BuildContext context) { Widget _buildAppearsInList(WidgetRef ref, BuildContext context) {
final aseet = ref.watch(currentAssetNotifier); final asset = ref.watch(currentAssetNotifier);
if (aseet == null) { if (asset == null) {
return const SizedBox.shrink(); return const SizedBox.shrink();
} }
if (!aseet.hasRemote) { if (!asset.hasRemote) {
return const SizedBox.shrink(); return const SizedBox.shrink();
} }
String? remoteAssetId; String? remoteAssetId;
if (aseet is RemoteAsset) { if (asset is RemoteAsset) {
remoteAssetId = aseet.id; remoteAssetId = asset.id;
} else if (aseet is LocalAsset) { } else if (asset is LocalAsset) {
remoteAssetId = aseet.remoteAssetId; remoteAssetId = asset.remoteAssetId;
} }
if (remoteAssetId == null) { if (remoteAssetId == null) {
@@ -217,6 +222,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
final exifInfo = ref.watch(currentAssetExifProvider).valueOrNull; final exifInfo = ref.watch(currentAssetExifProvider).valueOrNull;
final cameraTitle = _getCameraInfoTitle(exifInfo); final cameraTitle = _getCameraInfoTitle(exifInfo);
final lensTitle = exifInfo?.lens != null && exifInfo!.lens!.isNotEmpty ? exifInfo.lens : null;
final isOwner = ref.watch(currentUserProvider)?.id == (asset is RemoteAsset ? asset.ownerId : null); final isOwner = ref.watch(currentUserProvider)?.id == (asset is RemoteAsset ? asset.ownerId : null);
// Build file info tile based on asset type // Build file info tile based on asset type
@@ -287,12 +293,23 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
_SheetTile( _SheetTile(
title: cameraTitle, title: cameraTitle,
titleStyle: context.textTheme.labelLarge, titleStyle: context.textTheme.labelLarge,
leading: Icon(Icons.camera_outlined, size: 24, color: context.textTheme.labelLarge?.color), leading: Icon(Icons.camera_alt_outlined, size: 24, color: context.textTheme.labelLarge?.color),
subtitle: _getCameraInfoSubtitle(exifInfo), subtitle: _getCameraInfoSubtitle(exifInfo),
subtitleStyle: context.textTheme.bodyMedium?.copyWith( subtitleStyle: context.textTheme.bodyMedium?.copyWith(
color: context.textTheme.bodyMedium?.color?.withAlpha(155), color: context.textTheme.bodyMedium?.color?.withAlpha(155),
), ),
), ),
// Lens info
if (lensTitle != null)
_SheetTile(
title: lensTitle,
titleStyle: context.textTheme.labelLarge,
leading: Icon(Icons.camera_outlined, size: 24, color: context.textTheme.labelLarge?.color),
subtitle: _getLensInfoSubtitle(exifInfo),
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
),
// Appears in (Albums) // Appears in (Albums)
_buildAppearsInList(ref, context), _buildAppearsInList(ref, context),
// padding at the bottom to avoid cut-off // padding at the bottom to avoid cut-off

View File

@@ -86,13 +86,9 @@ class _BaseDraggableScrollableSheetState extends ConsumerState<BaseBottomSheet>
SliverToBoxAdapter( SliverToBoxAdapter(
child: Column( child: Column(
children: [ children: [
SizedBox( SingleChildScrollView(
height: 115, scrollDirection: Axis.horizontal,
child: ListView( child: Row(crossAxisAlignment: CrossAxisAlignment.start, children: widget.actions),
shrinkWrap: true,
scrollDirection: Axis.horizontal,
children: widget.actions,
),
), ),
const Divider(indent: 16, endIndent: 16), const Divider(indent: 16, endIndent: 16),
const SizedBox(height: 16), const SizedBox(height: 16),

View File

@@ -0,0 +1,208 @@
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
sealed class DateFilterInputModel {
DateTimeRange<DateTime> asDateTimeRange();
String asHumanReadable(BuildContext context) {
// General implementation for arbitrary date and time ranges
// If date range is less than 24 hours, set the end date to the end of the day
final date = asDateTimeRange();
if (date.end.difference(date.start).inHours < 24) {
return DateFormat.yMMMd().format(date.start.toLocal());
} else {
return 'search_filter_date_interval'.t(
context: context,
args: {
"start": DateFormat.yMMMd().format(date.start.toLocal()),
"end": DateFormat.yMMMd().format(date.end.toLocal()),
},
);
}
}
}
class RecentMonthRangeFilter extends DateFilterInputModel {
final int monthDelta;
RecentMonthRangeFilter(this.monthDelta);
@override
DateTimeRange<DateTime> asDateTimeRange() {
final now = DateTime.now();
// Note that DateTime's constructor properly handles month overflow.
final from = DateTime(now.year, now.month - monthDelta, 1);
return DateTimeRange<DateTime>(start: from, end: now);
}
@override
String asHumanReadable(BuildContext context) {
return 'last_months'.t(context: context, args: {"count": monthDelta.toString()});
}
}
class YearFilter extends DateFilterInputModel {
final int year;
YearFilter(this.year);
@override
DateTimeRange<DateTime> asDateTimeRange() {
final now = DateTime.now();
final from = DateTime(year, 1, 1);
if (now.year == year) {
// To not go beyond today if the user picks the current year
return DateTimeRange<DateTime>(start: from, end: now);
}
final to = DateTime(year, 12, 31, 23, 59, 59);
return DateTimeRange<DateTime>(start: from, end: to);
}
@override
String asHumanReadable(BuildContext context) {
return 'in_year'.tr(namedArgs: {"year": year.toString()});
}
}
class CustomDateFilter extends DateFilterInputModel {
final DateTime start;
final DateTime end;
CustomDateFilter(this.start, this.end);
factory CustomDateFilter.fromRange(DateTimeRange<DateTime> range) {
return CustomDateFilter(range.start, range.end);
}
@override
DateTimeRange<DateTime> asDateTimeRange() {
return DateTimeRange<DateTime>(start: start, end: end);
}
}
enum _QuickPickerType { last1Month, last3Months, last9Months, year, custom }
class QuickDatePicker extends HookWidget {
QuickDatePicker({super.key, required this.currentInput, required this.onSelect, required this.onRequestPicker})
: _selection = _selectionFromModel(currentInput),
_initialYear = _initialYearFromModel(currentInput);
final Function() onRequestPicker;
final Function(DateFilterInputModel range) onSelect;
final DateFilterInputModel? currentInput;
final _QuickPickerType? _selection;
final int _initialYear;
// Generate a list of recent years from 2000 to the current year (including the current one)
final List<int> _recentYears = List.generate(1 + DateTime.now().year - 2000, (index) {
return index + 2000;
});
static int _initialYearFromModel(DateFilterInputModel? model) {
return model?.asDateTimeRange().start.year ?? DateTime.now().year;
}
static _QuickPickerType? _selectionFromModel(DateFilterInputModel? model) {
if (model is RecentMonthRangeFilter) {
return switch (model.monthDelta) {
1 => _QuickPickerType.last1Month,
3 => _QuickPickerType.last3Months,
9 => _QuickPickerType.last9Months,
_ => _QuickPickerType.custom,
};
} else if (model is YearFilter) {
return _QuickPickerType.year;
} else if (model is CustomDateFilter) {
return _QuickPickerType.custom;
}
return null;
}
Text _monthLabel(BuildContext context, int monthsFromNow) =>
const Text('last_months').t(context: context, args: {"count": monthsFromNow.toString()});
Widget _yearPicker(BuildContext context) {
final size = MediaQuery.of(context).size;
return Row(
children: [
const Text("in_year_selector").tr(),
const SizedBox(width: 15),
Expanded(
child: DropdownMenu(
initialSelection: _initialYear,
menuStyle: MenuStyle(maximumSize: WidgetStateProperty.all(Size(size.width, size.height * 0.5))),
dropdownMenuEntries: _recentYears.map((e) => DropdownMenuEntry(value: e, label: e.toString())).toList(),
onSelected: (year) {
if (year == null) return;
onSelect(YearFilter(year));
},
),
),
],
);
}
// We want the exact date picker to always be selectable.
// Even if it's already toggled it should always open the full date picker, RadioListTiles don't do that by default
// so we wrap it in a InkWell
Widget _exactPicker(BuildContext context) {
final hasPreviousInput = currentInput != null && currentInput is CustomDateFilter;
return InkWell(
onTap: onRequestPicker,
child: IgnorePointer(
ignoring: true,
child: RadioListTile(
title: const Text('pick_custom_range').tr(),
subtitle: hasPreviousInput ? Text(currentInput!.asHumanReadable(context)) : null,
secondary: hasPreviousInput ? const Icon(Icons.edit) : null,
value: _QuickPickerType.custom,
toggleable: true,
),
),
);
}
@override
Widget build(BuildContext context) {
return Padding(
padding: EdgeInsets.only(bottom: MediaQuery.of(context).viewInsets.bottom),
child: Scrollbar(
// Depending on the screen size the last option might get cut off
// Add a clear visual cue that there are more options when scrolling
// When the screen size is large enough the scrollbar is hidden automatically
trackVisibility: true,
thumbVisibility: true,
child: SingleChildScrollView(
child: RadioGroup(
onChanged: (value) {
if (value == null) return;
final _ = switch (value) {
_QuickPickerType.custom => onRequestPicker(),
_QuickPickerType.last1Month => onSelect(RecentMonthRangeFilter(1)),
_QuickPickerType.last3Months => onSelect(RecentMonthRangeFilter(3)),
_QuickPickerType.last9Months => onSelect(RecentMonthRangeFilter(9)),
// When a year is selected the combobox triggers onSelect() on its own.
// Here we handle the radio button being selected which can only ever be the initial year
_QuickPickerType.year => onSelect(YearFilter(_initialYear)),
};
},
groupValue: _selection,
child: Column(
children: [
RadioListTile(title: _monthLabel(context, 1), value: _QuickPickerType.last1Month, toggleable: true),
RadioListTile(title: _monthLabel(context, 3), value: _QuickPickerType.last3Months, toggleable: true),
RadioListTile(title: _monthLabel(context, 9), value: _QuickPickerType.last9Months, toggleable: true),
RadioListTile(title: _yearPicker(context), value: _QuickPickerType.year, toggleable: true),
_exactPicker(context),
],
),
),
),
),
);
}
}

View File

@@ -1,3 +1,4 @@
import 'package:collection/collection.dart';
import 'package:immich_mobile/models/activities/activity.model.dart'; import 'package:immich_mobile/models/activities/activity.model.dart';
import 'package:immich_mobile/providers/activity_service.provider.dart'; import 'package:immich_mobile/providers/activity_service.provider.dart';
import 'package:immich_mobile/providers/activity_statistics.provider.dart'; import 'package:immich_mobile/providers/activity_statistics.provider.dart';
@@ -16,13 +17,20 @@ class AlbumActivity extends _$AlbumActivity {
Future<void> removeActivity(String id) async { Future<void> removeActivity(String id) async {
if (await ref.watch(activityServiceProvider).removeActivity(id)) { if (await ref.watch(activityServiceProvider).removeActivity(id)) {
final activities = state.valueOrNull ?? []; final removedActivity = _removeFromState(id);
final removedActivity = activities.firstWhere((a) => a.id == id); if (removedActivity == null) {
activities.remove(removedActivity); return;
state = AsyncData(activities); }
// Decrement activity count only for comments
if (assetId != null) {
ref.read(albumActivityProvider(albumId).notifier)._removeFromState(id);
}
if (removedActivity.type == ActivityType.comment) { if (removedActivity.type == ActivityType.comment) {
ref.watch(activityStatisticsProvider(albumId, assetId).notifier).removeActivity(); ref.watch(activityStatisticsProvider(albumId, assetId).notifier).removeActivity();
if (assetId != null) {
ref.watch(activityStatisticsProvider(albumId).notifier).removeActivity();
}
} }
} }
} }
@@ -30,8 +38,10 @@ class AlbumActivity extends _$AlbumActivity {
Future<void> addLike() async { Future<void> addLike() async {
final activity = await ref.watch(activityServiceProvider).addActivity(albumId, ActivityType.like, assetId: assetId); final activity = await ref.watch(activityServiceProvider).addActivity(albumId, ActivityType.like, assetId: assetId);
if (activity.hasValue) { if (activity.hasValue) {
final activities = state.asData?.value ?? []; _addToState(activity.requireValue);
state = AsyncData([...activities, activity.requireValue]); if (assetId != null) {
ref.read(albumActivityProvider(albumId).notifier)._addToState(activity.requireValue);
}
} }
} }
@@ -41,8 +51,10 @@ class AlbumActivity extends _$AlbumActivity {
.addActivity(albumId, ActivityType.comment, assetId: assetId, comment: comment); .addActivity(albumId, ActivityType.comment, assetId: assetId, comment: comment);
if (activity.hasValue) { if (activity.hasValue) {
final activities = state.valueOrNull ?? []; _addToState(activity.requireValue);
state = AsyncData([...activities, activity.requireValue]); if (assetId != null) {
ref.read(albumActivityProvider(albumId).notifier)._addToState(activity.requireValue);
}
ref.watch(activityStatisticsProvider(albumId, assetId).notifier).addActivity(); ref.watch(activityStatisticsProvider(albumId, assetId).notifier).addActivity();
// The previous addActivity call would increase the count of an asset if assetId != null // The previous addActivity call would increase the count of an asset if assetId != null
// To also increase the activity count of the album, calling it once again with assetId set to null // To also increase the activity count of the album, calling it once again with assetId set to null
@@ -51,6 +63,29 @@ class AlbumActivity extends _$AlbumActivity {
} }
} }
} }
void _addToState(Activity activity) {
final activities = state.valueOrNull ?? [];
if (activities.any((a) => a.id == activity.id)) {
return;
}
state = AsyncData([...activities, activity]);
}
Activity? _removeFromState(String id) {
final activities = state.valueOrNull;
if (activities == null) {
return null;
}
final activity = activities.firstWhereOrNull((a) => a.id == id);
if (activity == null) {
return null;
}
final updated = [...activities]..remove(activity);
state = AsyncData(updated);
return activity;
}
} }
/// Mock class for testing /// Mock class for testing

View File

@@ -6,7 +6,7 @@ part of 'activity.provider.dart';
// RiverpodGenerator // RiverpodGenerator
// ************************************************************************** // **************************************************************************
String _$albumActivityHash() => r'3b0d7acee4d41c84b3f220784c3b904c83f836e6'; String _$albumActivityHash() => r'154e8ae98da3efc142369eae46d4005468fd67da';
/// Copied from Dart SDK /// Copied from Dart SDK
class _SystemHash { class _SystemHash {

View File

@@ -2,6 +2,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/services/asset.service.dart'; import 'package:immich_mobile/domain/services/asset.service.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/remote_asset.repository.dart'; import 'package:immich_mobile/infrastructure/repositories/remote_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart'; import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart'; import 'package:immich_mobile/providers/user.provider.dart';
@@ -13,6 +14,10 @@ final remoteAssetRepositoryProvider = Provider<RemoteAssetRepository>(
(ref) => RemoteAssetRepository(ref.watch(driftProvider)), (ref) => RemoteAssetRepository(ref.watch(driftProvider)),
); );
final trashedLocalAssetRepository = Provider<DriftTrashedLocalAssetRepository>(
(ref) => DriftTrashedLocalAssetRepository(ref.watch(driftProvider)),
);
final assetServiceProvider = Provider( final assetServiceProvider = Provider(
(ref) => AssetService( (ref) => AssetService(
remoteAssetRepository: ref.watch(remoteAssetRepositoryProvider), remoteAssetRepository: ref.watch(remoteAssetRepositoryProvider),

View File

@@ -10,11 +10,17 @@ import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart'; import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart'; import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart'; import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
final syncStreamServiceProvider = Provider( final syncStreamServiceProvider = Provider(
(ref) => SyncStreamService( (ref) => SyncStreamService(
syncApiRepository: ref.watch(syncApiRepositoryProvider), syncApiRepository: ref.watch(syncApiRepositoryProvider),
syncStreamRepository: ref.watch(syncStreamRepositoryProvider), syncStreamRepository: ref.watch(syncStreamRepositoryProvider),
localAssetRepository: ref.watch(localAssetRepository),
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
localFilesManager: ref.watch(localFilesManagerRepositoryProvider),
storageRepository: ref.watch(storageRepositoryProvider),
cancelChecker: ref.watch(cancellationProvider), cancelChecker: ref.watch(cancellationProvider),
), ),
); );
@@ -26,6 +32,9 @@ final syncStreamRepositoryProvider = Provider((ref) => SyncStreamRepository(ref.
final localSyncServiceProvider = Provider( final localSyncServiceProvider = Provider(
(ref) => LocalSyncService( (ref) => LocalSyncService(
localAlbumRepository: ref.watch(localAlbumRepository), localAlbumRepository: ref.watch(localAlbumRepository),
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
localFilesManager: ref.watch(localFilesManagerRepositoryProvider),
storageRepository: ref.watch(storageRepositoryProvider),
nativeSyncApi: ref.watch(nativeSyncApiProvider), nativeSyncApi: ref.watch(nativeSyncApiProvider),
), ),
); );
@@ -35,5 +44,6 @@ final hashServiceProvider = Provider(
localAlbumRepository: ref.watch(localAlbumRepository), localAlbumRepository: ref.watch(localAlbumRepository),
localAssetRepository: ref.watch(localAssetRepository), localAssetRepository: ref.watch(localAssetRepository),
nativeSyncApi: ref.watch(nativeSyncApiProvider), nativeSyncApi: ref.watch(nativeSyncApiProvider),
trashedLocalAssetRepository: ref.watch(trashedLocalAssetRepository),
), ),
); );

View File

@@ -0,0 +1,12 @@
import 'package:async/async.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
typedef TrashedAssetsCount = ({int total, int hashed});
final trashedAssetsCountProvider = StreamProvider<TrashedAssetsCount>((ref) {
final repo = ref.watch(trashedLocalAssetRepository);
final total$ = repo.watchCount();
final hashed$ = repo.watchHashedCount();
return StreamZip<int>([total$, hashed$]).map((values) => (total: values[0], hashed: values[1]));
});

View File

@@ -67,7 +67,7 @@ class ServerInfoNotifier extends StateNotifier<ServerInfo> {
return; return;
} }
if (clientVersion < serverVersion) { if (clientVersion < serverVersion && clientVersion.differenceType(serverVersion) != SemVerType.patch) {
state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate); state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate);
return; return;
} }

View File

@@ -1,6 +1,6 @@
import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/services/trash.service.dart';
import 'package:immich_mobile/entities/asset.entity.dart'; import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/services/trash.service.dart';
import 'package:logging/logging.dart'; import 'package:logging/logging.dart';
class TrashNotifier extends StateNotifier<bool> { class TrashNotifier extends StateNotifier<bool> {

View File

@@ -89,9 +89,16 @@ class AssetMediaRepository {
return null; return null;
} }
// titleAsync gets the correct original filename for some assets on iOS try {
// otherwise using the `entity.title` would return a random GUID // titleAsync gets the correct original filename for some assets on iOS
return await entity.titleAsync; // otherwise using the `entity.title` would return a random GUID
final originalFilename = await entity.titleAsync;
// treat empty filename as missing
return originalFilename.isNotEmpty ? originalFilename : null;
} catch (e) {
_log.warning("Failed to get original filename for asset: $id. Error: $e");
return null;
}
} }
// TODO: make this more efficient // TODO: make this more efficient

View File

@@ -8,7 +8,7 @@ import 'package:openapi/api.dart';
final folderApiRepositoryProvider = Provider((ref) => FolderApiRepository(ref.watch(apiServiceProvider).viewApi)); final folderApiRepositoryProvider = Provider((ref) => FolderApiRepository(ref.watch(apiServiceProvider).viewApi));
class FolderApiRepository extends ApiRepository { class FolderApiRepository extends ApiRepository {
final ViewApi _api; final ViewsApi _api;
final Logger _log = Logger("FolderApiRepository"); final Logger _log = Logger("FolderApiRepository");
FolderApiRepository(this._api); FolderApiRepository(this._api);

View File

@@ -1,13 +1,16 @@
import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/services/local_files_manager.service.dart'; import 'package:immich_mobile/services/local_files_manager.service.dart';
import 'package:logging/logging.dart';
final localFilesManagerRepositoryProvider = Provider( final localFilesManagerRepositoryProvider = Provider(
(ref) => LocalFilesManagerRepository(ref.watch(localFileManagerServiceProvider)), (ref) => LocalFilesManagerRepository(ref.watch(localFileManagerServiceProvider)),
); );
class LocalFilesManagerRepository { class LocalFilesManagerRepository {
const LocalFilesManagerRepository(this._service); LocalFilesManagerRepository(this._service);
final Logger _logger = Logger('SyncStreamService');
final LocalFilesManagerService _service; final LocalFilesManagerService _service;
Future<bool> moveToTrash(List<String> mediaUrls) async { Future<bool> moveToTrash(List<String> mediaUrls) async {
@@ -21,4 +24,26 @@ class LocalFilesManagerRepository {
Future<bool> requestManageMediaPermission() async { Future<bool> requestManageMediaPermission() async {
return await _service.requestManageMediaPermission(); return await _service.requestManageMediaPermission();
} }
Future<bool> hasManageMediaPermission() async {
return await _service.hasManageMediaPermission();
}
Future<bool> manageMediaPermission() async {
return await _service.manageMediaPermission();
}
Future<List<String>> restoreAssetsFromTrash(Iterable<LocalAsset> assets) async {
final restoredIds = <String>[];
for (final asset in assets) {
_logger.info("Restoring from trash, localId: ${asset.id}, remoteId: ${asset.checksum}");
try {
await _service.restoreFromTrashById(asset.id, asset.type.index);
restoredIds.add(asset.id);
} catch (e) {
_logger.warning("Restoring failure: $e");
}
}
return restoredIds;
}
} }

View File

@@ -313,6 +313,7 @@ class AppRouter extends RootStackRouter {
settings: page, settings: page,
pageBuilder: (_, __, ___) => child, pageBuilder: (_, __, ___) => child,
opaque: false, opaque: false,
transitionsBuilder: TransitionsBuilders.fadeIn,
), ),
), ),
), ),

Some files were not shown because too many files have changed in this diff Show More