mirror of
https://github.com/immich-app/immich.git
synced 2025-12-09 17:23:13 +03:00
Compare commits
1 Commits
chore/requ
...
pnpm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
345e14921c |
@@ -4,7 +4,6 @@
|
||||
|
||||
design/
|
||||
docker/
|
||||
Dockerfile
|
||||
!docker/scripts
|
||||
docs/
|
||||
!docs/package.json
|
||||
@@ -20,7 +19,6 @@ mobile/
|
||||
cli/coverage/
|
||||
cli/dist/
|
||||
cli/node_modules/
|
||||
cli/Dockerfile
|
||||
|
||||
open-api/typescript-sdk/build/
|
||||
open-api/typescript-sdk/node_modules/
|
||||
@@ -31,11 +29,9 @@ server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
server/Dockerfile
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
web/Dockerfile
|
||||
|
||||
2
.github/.nvmrc
vendored
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.17.0
|
||||
22.16.0
|
||||
|
||||
6
.github/package-lock.json
generated
vendored
6
.github/package-lock.json
generated
vendored
@@ -9,9 +9,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.6.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.1.tgz",
|
||||
"integrity": "sha512-5xGWRa90Sp2+x1dQtNpIpeOQpTDBs9cZDmA/qs2vDNN2i18PdapqY7CmBeyLlMuGqXJRIOPaCaVZTLNQRWUH/A==",
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
|
||||
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
|
||||
54
.github/workflows/build-mobile.yml
vendored
54
.github/workflows/build-mobile.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: mich
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -66,40 +66,24 @@ jobs:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: build-mobile-gradle-${{ runner.os }}-main
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
|
||||
with:
|
||||
packages: ''
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: echo $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- name: Get Packages
|
||||
working-directory: ./mobile
|
||||
@@ -119,30 +103,12 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
if [[ $IS_MAIN == 'true' ]]; then
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
else
|
||||
flutter build apk --debug --split-per-abi --target-platform android-arm64
|
||||
fi
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
82
.github/workflows/check-team-approval.yml
vendored
82
.github/workflows/check-team-approval.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Check Team Approval
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check-approval:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Check for team/admin review
|
||||
id: check-review
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo;
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
|
||||
console.log(`Checking reviews for PR #${prNumber}`);
|
||||
|
||||
try {
|
||||
// Fetch the users.json file from immich-app/devtools repository
|
||||
const { data: usersFile } = await github.rest.repos.getContent({
|
||||
owner: 'immich-app',
|
||||
repo: 'devtools',
|
||||
path: 'tf/deployment/data/users.json'
|
||||
});
|
||||
|
||||
const usersData = JSON.parse(Buffer.from(usersFile.content, 'base64').toString());
|
||||
console.log(`Loaded ${usersData.length} users from devtools repo`);
|
||||
|
||||
// Create a map of GitHub IDs to user roles for efficient lookup
|
||||
const userRoles = new Map();
|
||||
for (const user of usersData) {
|
||||
if (user.github && user.github.id && (user.role === 'team' || user.role === 'admin')) {
|
||||
userRoles.set(user.github.id, {
|
||||
username: user.github.username,
|
||||
role: user.role
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Found ${userRoles.size} team/admin users`);
|
||||
|
||||
// Get all reviews for the pull request
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
console.log(`Found ${reviews.length} reviews`);
|
||||
|
||||
// Check if any review is from a team/admin member
|
||||
let hasValidReview = false;
|
||||
|
||||
for (const review of reviews) {
|
||||
console.log(`Review by ${review.user.login} (ID: ${review.user.id}): state=${review.state}`);
|
||||
|
||||
// Check if the reviewer is a team/admin member and the review is approved
|
||||
const userInfo = userRoles.get(review.user.id);
|
||||
if (userInfo && review.state === 'APPROVED') {
|
||||
console.log(`✅ Found approved review from ${userInfo.role} member: ${review.user.login}`);
|
||||
hasValidReview = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasValidReview) {
|
||||
console.log('❌ No approved review from team/admin member found');
|
||||
core.setFailed('This pull request requires an approved review from a team or admin member');
|
||||
} else {
|
||||
console.log('✅ Required team/admin member review found');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking reviews:', error);
|
||||
core.setFailed(`Failed to check reviews: ${error.message}`);
|
||||
}
|
||||
19
.github/workflows/cli.yml
vendored
19
.github/workflows/cli.yml
vendored
@@ -33,21 +33,22 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build SDK
|
||||
run: npm run build --prefix ../open-api/typescript-sdk/
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
run: pnpm --dir ../open-api/typescript-sdk/ build
|
||||
- run: pnpm build
|
||||
- run: pnpm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -70,7 +71,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
@@ -99,7 +100,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
4
.github/workflows/docker.yml
vendored
@@ -177,7 +177,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
@@ -188,6 +188,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
6
.github/workflows/fix-format.yml
vendored
6
.github/workflows/fix-format.yml
vendored
@@ -28,12 +28,14 @@ jobs:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
|
||||
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
|
||||
2
.github/workflows/prepare-release.yml
vendored
2
.github/workflows/prepare-release.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
name: release-apk-signed
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
|
||||
11
.github/workflows/required-reviewers.yml
vendored
11
.github/workflows/required-reviewers.yml
vendored
@@ -1,11 +0,0 @@
|
||||
name: Required Reviewers Check
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
|
||||
jobs:
|
||||
check-member-review:
|
||||
uses: ./.github/workflows/check-team-approval.yml
|
||||
permissions:
|
||||
pull-requests: read
|
||||
contents: read
|
||||
12
.github/workflows/sdk.yml
vendored
12
.github/workflows/sdk.yml
vendored
@@ -20,18 +20,20 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
- name: Publish
|
||||
run: npm publish
|
||||
run: pnpm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
6
.github/workflows/static_analysis.yml
vendored
6
.github/workflows/static_analysis.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run DCM
|
||||
run: dcm analyze lib --fatal-style --fatal-warnings
|
||||
run: dcm analyze lib
|
||||
working-directory: ./mobile
|
||||
|
||||
zizmor:
|
||||
@@ -134,7 +134,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
196
.github/workflows/test.yml
vendored
196
.github/workflows/test.yml
vendored
@@ -80,30 +80,32 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run small tests & coverage
|
||||
run: npm test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
@@ -123,34 +125,36 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
@@ -170,27 +174,29 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-lint:
|
||||
@@ -210,30 +216,32 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint:p
|
||||
run: pnpm lint:p
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run svelte checks
|
||||
run: npm run check:svelte
|
||||
run: pnpm check:svelte
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
@@ -253,26 +261,28 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check:typescript
|
||||
run: pnpm run check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
i18n-tests:
|
||||
@@ -288,18 +298,20 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm --prefix=web ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Format
|
||||
run: npm --prefix=web run format:i18n
|
||||
run: pnpm --dir=web format:i18n
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
@@ -334,32 +346,34 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-medium-tests:
|
||||
@@ -379,18 +393,20 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run medium tests
|
||||
run: npm run test:medium
|
||||
run: pnpm test:medium
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-server-cli:
|
||||
@@ -414,25 +430,27 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup cli
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./cli
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
@@ -440,7 +458,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (api & cli)
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-web:
|
||||
@@ -464,20 +482,22 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
@@ -499,7 +519,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
@@ -516,7 +536,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -584,18 +604,20 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
shellcheck:
|
||||
@@ -609,7 +631,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
@@ -627,18 +649,20 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
run: pnpm --dir=server build
|
||||
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
@@ -668,7 +692,7 @@ jobs:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:1f5583fe3397210a0fbc7f11b0cec18bacc4a99e3e8ea0548e9bd6bcf26ec37a
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -690,28 +714,30 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build the app
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
|
||||
- name: Run existing migrations
|
||||
run: npm run migrations:run
|
||||
run: pnpm migrations:run
|
||||
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run schema:reset
|
||||
- name: Test pnpm schema:reset command works
|
||||
run: pnpm schema:reset
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run migrations:generate src/TestMigration
|
||||
run: pnpm migrations:generate src/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
@@ -730,7 +756,7 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sync:sql
|
||||
run: pnpm sync:sql
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
|
||||
2
.github/workflows/weblate-lock.yml
vendored
2
.github/workflows/weblate-lock.yml
vendored
@@ -52,6 +52,6 @@ jobs:
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
58
Makefile
58
Makefile
@@ -1,33 +1,27 @@
|
||||
dev:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
|
||||
|
||||
dev-down:
|
||||
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
|
||||
|
||||
dev-update:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-down:
|
||||
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
prod:
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
.PHONY: open-api
|
||||
open-api:
|
||||
@@ -40,7 +34,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sync:sql
|
||||
pnpm --dir server run sync:sql
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
@@ -51,30 +45,30 @@ renovate:
|
||||
MODULES = e2e server web cli sdk docs .github
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
pnpm --dir $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
pnpm --dir $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
ci-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
|
||||
pnpm --dir $(subst sdk,open-api/typescript-sdk,$*) install --frozen-lockfile
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
|
||||
pnpm --dir $(subst sdk,open-api/typescript-sdk,$*) build
|
||||
format-%:
|
||||
npm --prefix $* run format:fix
|
||||
pnpm --dir $* format:fix
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
pnpm --dir $* lint:fix
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
pnpm --dir $* check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
pnpm --dir web check:typescript
|
||||
pnpm --dir web check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
pnpm --dir $* test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
pnpm --dir e2e test
|
||||
pnpm --dir e2e test:web
|
||||
test-medium:
|
||||
docker run \
|
||||
--rm \
|
||||
@@ -84,12 +78,12 @@ test-medium:
|
||||
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
|
||||
-e NODE_ENV=development \
|
||||
immich-server:latest \
|
||||
-c "npm ci && npm run test:medium -- --run"
|
||||
-c "pnpm install --frozen-lockfile && pnpm test:medium -- --run"
|
||||
test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
docker exec -it immich_server /bin/sh -c "pnpm test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
install-all: $(foreach M,$(filter-out .github,$(MODULES)),install-$M) ;
|
||||
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||
@@ -99,11 +93,9 @@ hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} +
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
setup-dev: install-server install-sdk build-sdk install-web
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
@@ -9,7 +9,6 @@ upload/**
|
||||
.prettierignore
|
||||
.prettierrc
|
||||
Dockerfile
|
||||
package-lock.json
|
||||
tsconfig.json
|
||||
vite.config.ts
|
||||
vitest.config.ts
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.0
|
||||
22.16.0
|
||||
|
||||
@@ -2,17 +2,17 @@ FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
RUN npm ci
|
||||
RUN pnpm install --frozen-lockfile
|
||||
COPY open-api/typescript-sdk/ ./
|
||||
RUN npm run build
|
||||
RUN pnpm build
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY cli/package.json cli/package-lock.json ./
|
||||
RUN npm ci
|
||||
COPY cli/package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
COPY cli .
|
||||
RUN npm run build
|
||||
RUN pnpm build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
|
||||
|
||||
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
$ pnpm install
|
||||
$ pnpm build
|
||||
|
||||
Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
@@ -15,8 +15,8 @@ Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
To run the Immich CLI from source, run the following in the cli folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
$ pnpm install
|
||||
$ pnpm build
|
||||
$ ts-node .
|
||||
|
||||
You'll need ts-node, the easiest way to install it is to use npm:
|
||||
@@ -25,6 +25,6 @@ You'll need ts-node, the easiest way to install it is to use npm:
|
||||
|
||||
You can also build and install the CLI using
|
||||
|
||||
$ npm run build
|
||||
$ npm install -g .
|
||||
$ pnpm build
|
||||
$ pnpm install -g .
|
||||
****
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import '../dist/index.js';
|
||||
4617
cli/package-lock.json
generated
4617
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
"bin": {
|
||||
"immich": "./bin/immich"
|
||||
"immich": "dist/index.js"
|
||||
},
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"keywords": [
|
||||
@@ -15,13 +15,13 @@
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@immich/sdk": "workspace:^",
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.15.33",
|
||||
"@types/node": "^22.15.32",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,7 +134,6 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -75,7 +75,6 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -83,7 +82,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:7a34573f0b9c952286b33d537f233cd5b708e12263733aa646e50c33f598f16c
|
||||
image: prom/prometheus@sha256:9abc6cf6aea7710d163dbb28d8eeb7dc5baef01e38fa4cd146a406dd9f07f70d
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
@@ -67,7 +67,6 @@ services:
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.0
|
||||
22.16.0
|
||||
|
||||
@@ -490,7 +490,7 @@ You can also scan the Postgres database file structure for errors:
|
||||
<details>
|
||||
<summary>Scan for file structure errors</summary>
|
||||
```bash
|
||||
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
```
|
||||
|
||||
A normal result will end something like this and return with an exit code of `0`:
|
||||
|
||||
@@ -57,7 +57,7 @@ Then please follow the steps in the following section for restoring the database
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@@ -79,7 +79,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres))
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -150,10 +150,12 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
|
||||
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
@@ -199,6 +201,7 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
|
||||
@@ -20,6 +20,7 @@ Immich supports 3rd party authentication via [OpenID Connect][oidc] (OIDC), an i
|
||||
Before enabling OAuth in Immich, a new client application needs to be configured in the 3rd-party authentication server. While the specifics of this setup vary from provider to provider, the general approach should be the same.
|
||||
|
||||
1. Create a new (Client) Application
|
||||
|
||||
1. The **Provider** type should be `OpenID Connect` or `OAuth2`
|
||||
2. The **Client type** should be `Confidential`
|
||||
3. The **Application** type should be `Web`
|
||||
@@ -28,6 +29,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
2. Configure Redirect URIs/Origins
|
||||
|
||||
The **Sign-in redirect URIs** should include:
|
||||
|
||||
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
|
||||
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
|
||||
@@ -35,17 +37,21 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
Redirect URIs should contain all the domains you will be using to access Immich. Some examples include:
|
||||
|
||||
Mobile
|
||||
|
||||
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
|
||||
|
||||
Localhost
|
||||
|
||||
- `http://localhost:2283/auth/login`
|
||||
- `http://localhost:2283/user-settings`
|
||||
|
||||
Local IP
|
||||
|
||||
- `http://192.168.0.200:2283/auth/login`
|
||||
- `http://192.168.0.200:2283/user-settings`
|
||||
|
||||
Hostname
|
||||
|
||||
- `https://immich.example.com/auth/login`
|
||||
- `https://immich.example.com/user-settings`
|
||||
|
||||
|
||||
@@ -199,11 +199,13 @@ To use your SSH key for commit signing, see the [GitHub guide on SSH commit sign
|
||||
When the Dev Container starts, it automatically:
|
||||
|
||||
1. **Runs post-create script** (`container-server-post-create.sh`):
|
||||
|
||||
- Adjusts file permissions for the `node` user
|
||||
- Installs dependencies: `npm install` in all packages
|
||||
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
|
||||
|
||||
2. **Starts development servers** via VS Code tasks:
|
||||
|
||||
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
|
||||
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
|
||||
- Both servers watch for file changes and recompile automatically
|
||||
@@ -333,12 +335,14 @@ make install-all # Install all dependencies
|
||||
The Dev Container is pre-configured for debugging:
|
||||
|
||||
1. **API Server Debugging**:
|
||||
|
||||
- Set breakpoints in VS Code
|
||||
- Press `F5` or use "Run and Debug" panel
|
||||
- Select "Attach to Server" configuration
|
||||
- Debug port: 9231
|
||||
|
||||
2. **Worker Debugging**:
|
||||
|
||||
- Use "Attach to Workers" configuration
|
||||
- Debug port: 9230
|
||||
|
||||
@@ -424,6 +428,7 @@ While the Dev Container focuses on server and web development, you can connect m
|
||||
```
|
||||
|
||||
2. **Configure mobile app**:
|
||||
|
||||
- Server URL: `http://YOUR_IP:2283/api`
|
||||
- Ensure firewall allows port 2283
|
||||
|
||||
|
||||
@@ -52,9 +52,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||
### Local
|
||||
|
||||
# Backup Immich database
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
|
||||
### Append to local Borg repository
|
||||
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/
|
||||
|
||||
@@ -75,6 +75,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||
|
||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||
|
||||
|
||||
1531
docs/package-lock.json
generated
1531
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -16,9 +16,8 @@
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "~3.8.0",
|
||||
"@docusaurus/preset-classic": "~3.8.0",
|
||||
"@docusaurus/theme-common": "~3.8.0",
|
||||
"@docusaurus/core": "~3.7.0",
|
||||
"@docusaurus/preset-classic": "~3.7.0",
|
||||
"@mdi/js": "^7.3.67",
|
||||
"@mdi/react": "^1.6.1",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
@@ -27,7 +26,6 @@
|
||||
"clsx": "^2.0.0",
|
||||
"docusaurus-lunr-search": "^3.3.2",
|
||||
"docusaurus-preset-openapi": "^0.7.5",
|
||||
"lunr": "^2.3.9",
|
||||
"postcss": "^8.4.25",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"raw-loader": "^4.0.2",
|
||||
@@ -37,7 +35,7 @@
|
||||
"url": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "~3.8.0",
|
||||
"@docusaurus/module-type-aliases": "~3.7.0",
|
||||
"@docusaurus/tsconfig": "^3.7.0",
|
||||
"@docusaurus/types": "^3.7.0",
|
||||
"prettier": "^3.2.4",
|
||||
@@ -59,6 +57,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,12 +58,6 @@ const guides: CommunityGuidesProps[] = [
|
||||
description: 'Access Immich with an end-to-end encrypted connection.',
|
||||
url: 'https://meshnet.nordvpn.com/how-to/remote-files-media-access/immich-remote-access',
|
||||
},
|
||||
{
|
||||
title: 'Trust Self Signed Certificates with Immich - OAuth Setup',
|
||||
description:
|
||||
'Set up Certificate Authority trust with Immich, and your private OAuth2/OpenID service, while using a private CA for HTTPS commication.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/18614',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.0
|
||||
22.16.0
|
||||
|
||||
7469
e2e/package-lock.json
generated
7469
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -21,11 +21,11 @@
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/cli": "file:../cli",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@immich/cli": "workspace:^",
|
||||
"@immich/sdk": "workspace:^",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.15.33",
|
||||
"@types/node": "^22.15.32",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -44,7 +44,6 @@
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.34.0",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -53,6 +52,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1225,411 +1190,6 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreateDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
146
e2e/src/api/specs/auth.e2e-spec.ts
Normal file
146
e2e/src/api/specs/auth.e2e-spec.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { LoginResponseDto, login, signUpAdmin } from '@immich/sdk';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
import { errorDto, loginResponseDto, signupResponseDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const { email, password } = signupDto.admin;
|
||||
|
||||
describe(`/auth/admin-sign-up`, () => {
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
});
|
||||
|
||||
describe('POST /auth/admin-sign-up', () => {
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(signupResponseDto.admin);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.alreadyHasAdmin);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('/auth/*', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
admin = await login({ loginCredentialDto: loginDto.admin });
|
||||
});
|
||||
|
||||
describe(`POST /auth/login`, () => {
|
||||
it('should reject an incorrect password', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email, password: 'incorrect' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.incorrectLogin);
|
||||
});
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(loginResponseDto.admin);
|
||||
|
||||
const token = body.accessToken;
|
||||
expect(token).toBeDefined();
|
||||
|
||||
const cookies = headers['set-cookie'];
|
||||
expect(cookies).toHaveLength(3);
|
||||
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
|
||||
`immich_access_token=${token}`,
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_auth_type=password',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_is_authenticated=true',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/validateToken', () => {
|
||||
it('should reject an invalid token', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/validateToken`).set('Authorization', 'Bearer 123');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.invalidToken);
|
||||
});
|
||||
|
||||
it('should accept a valid token', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/validateToken`)
|
||||
.send({})
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ authStatus: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/change-password', () => {
|
||||
it('should require the current password', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password: 'wrong-password', newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.wrongPassword);
|
||||
});
|
||||
|
||||
it('should change the password', async () => {
|
||||
const { status } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password, newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
|
||||
await login({
|
||||
loginCredentialDto: {
|
||||
email: 'admin@immich.cloud',
|
||||
password: 'Password1234',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/logout', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/logout`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should logout the user', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/logout`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
createMemory,
|
||||
getMemory,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -17,6 +17,7 @@ describe('/memories', () => {
|
||||
let user: LoginResponseDto;
|
||||
let adminAsset: AssetMediaResponseDto;
|
||||
let userAsset1: AssetMediaResponseDto;
|
||||
let userAsset2: AssetMediaResponseDto;
|
||||
let userMemory: MemoryResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -24,9 +25,10 @@ describe('/memories', () => {
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
[adminAsset, userAsset1] = await Promise.all([
|
||||
[adminAsset, userAsset1, userAsset2] = await Promise.all([
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
userMemory = await createMemory(
|
||||
{
|
||||
@@ -41,7 +43,121 @@ describe('/memories', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('GET /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should validate data when type is on this day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: {},
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
errorDto.badRequest(['data.year must be a positive number', 'data.year must be an integer number']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
isSaved: false,
|
||||
memoryAt: expect.any(String),
|
||||
ownerId: user.userId,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, userAsset2.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({ id: userAsset1.id }),
|
||||
expect.objectContaining({ id: userAsset2.id }),
|
||||
]),
|
||||
});
|
||||
expect(body.assets).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, adminAsset.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: userAsset1.id })],
|
||||
});
|
||||
expect(body.assets).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${userMemory.id}`)
|
||||
@@ -60,6 +176,22 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/memories/${uuidDto.invalid}`).send({ isSaved: true });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}`)
|
||||
@@ -86,6 +218,23 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -95,6 +244,15 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require asset access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -121,6 +279,23 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -130,6 +305,15 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should only remove assets in the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -156,6 +340,21 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}`)
|
||||
|
||||
@@ -117,13 +117,6 @@ describe('/shared-links', () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="http://127.0.0.1:2285`);
|
||||
});
|
||||
|
||||
it('should fall back to my.immich.app og:image meta tag for shared asset if Host header is not present', async () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`).set('Host', '');
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="https://my.immich.app`);
|
||||
});
|
||||
|
||||
|
||||
230
e2e/src/api/specs/timeline.e2e-spec.ts
Normal file
230
e2e/src/api/specs/timeline.e2e-spec.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
AssetVisibility,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
TimeBucketAssetResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
// TODO this should probably be a test util function
|
||||
const today = DateTime.fromObject({
|
||||
year: 2023,
|
||||
month: 11,
|
||||
day: 3,
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
describe('/timeline', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
[user, timeBucketUser] = await Promise.all([
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('1')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
]);
|
||||
|
||||
user1Assets = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken, {
|
||||
isFavorite: true,
|
||||
fileCreatedAt: yesterday.toISO(),
|
||||
fileModifiedAt: yesterday.toISO(),
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
user2Assets = await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-12').toISOString() }),
|
||||
]);
|
||||
|
||||
await utils.deleteAssets(timeBucketUser.accessToken, [user2Assets[4].id]);
|
||||
});
|
||||
|
||||
describe('GET /timeline/buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/buckets');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ count: 3, timeBucket: '1970-02-01' },
|
||||
{ count: 1, timeBucket: '1970-01-01' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: user1Assets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, visibility: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const req = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /timeline/bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||
timeBucket: '1900-01-01',
|
||||
});
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.query({ timeBucket: '012345-01-01' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
// it('should fail if time bucket is invalid', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/timeline/bucket')
|
||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||
// .query({ timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
// });
|
||||
|
||||
it('should return time bucket', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-10' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-01T00:00:00.000Z', isTrashed: true });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const timeBucket: TimeBucketAssetResponseDto = body;
|
||||
expect(timeBucket.isTrashed).toEqual([true]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
Submodule e2e/test-assets updated: 18736fc27a...8885d6d01c
@@ -427,7 +427,6 @@
|
||||
"app_settings": "App Settings",
|
||||
"appears_in": "Appears in",
|
||||
"archive": "Archive",
|
||||
"archive_action_prompt": "{count} added to Archive",
|
||||
"archive_or_unarchive_photo": "Archive or unarchive photo",
|
||||
"archive_page_no_archived_assets": "No archived assets found",
|
||||
"archive_page_title": "Archive ({count})",
|
||||
@@ -703,7 +702,7 @@
|
||||
"daily_title_text_date": "E, MMM dd",
|
||||
"daily_title_text_date_year": "E, MMM dd, yyyy",
|
||||
"dark": "Dark",
|
||||
"dark_theme": "Toggle dark theme",
|
||||
"darkTheme": "Toggle dark theme",
|
||||
"date_after": "Date after",
|
||||
"date_and_time": "Date and Time",
|
||||
"date_before": "Date before",
|
||||
@@ -984,7 +983,6 @@
|
||||
"failed_to_load_assets": "Failed to load assets",
|
||||
"failed_to_load_folder": "Failed to load folder",
|
||||
"favorite": "Favorite",
|
||||
"favorite_action_prompt": "{count} added to Favorites",
|
||||
"favorite_or_unfavorite_photo": "Favorite or unfavorite photo",
|
||||
"favorites": "Favorites",
|
||||
"favorites_page_no_favorites": "No favorite assets found",
|
||||
@@ -1247,7 +1245,6 @@
|
||||
"more": "More",
|
||||
"move": "Move",
|
||||
"move_off_locked_folder": "Move out of locked folder",
|
||||
"move_to_lock_folder_action_prompt": "{count} added to the locked folder",
|
||||
"move_to_locked_folder": "Move to locked folder",
|
||||
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
|
||||
"moved_to_archive": "Moved {count, plural, one {# asset} other {# assets}} to archive",
|
||||
@@ -1498,7 +1495,6 @@
|
||||
"remove_deleted_assets": "Remove Deleted Assets",
|
||||
"remove_from_album": "Remove from album",
|
||||
"remove_from_favorites": "Remove from favorites",
|
||||
"remove_from_lock_folder_action_prompt": "{count} removed from the locked folder",
|
||||
"remove_from_locked_folder": "Remove from locked folder",
|
||||
"remove_from_locked_folder_confirmation": "Are you sure you want to move these photos and videos out of the locked folder? They will be visible in your library.",
|
||||
"remove_from_shared_link": "Remove from shared link",
|
||||
|
||||
@@ -59,7 +59,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends g++
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9653efd4380d5a0e5511e337dcfc3b8ba5bc4e6ea7fa3be7716598261d5503fa /uv /uvx /bin/
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:cda0fdc9b6066975ba4c791597870d18bc3a441dfc18ab24c5e888c16e15780c /uv /uvx /bin/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
|
||||
@@ -146,7 +146,6 @@ dart_code_metrics:
|
||||
# - no-empty-block
|
||||
# - no-equal-then-else
|
||||
# - prefer-correct-test-file-name
|
||||
- prefer-const-border-radius
|
||||
# - prefer-match-file-name
|
||||
# - prefer-return-await
|
||||
# - avoid-self-assignment
|
||||
@@ -291,8 +290,7 @@ dart_code_metrics:
|
||||
# Style
|
||||
# - prefer-trailing-comma
|
||||
# - unnecessary-trailing-comma
|
||||
- prefer-declaring-const-constructor
|
||||
# - prefer-declaring-const-constructor
|
||||
# - prefer-single-widget-per-file
|
||||
- prefer-switch-expression
|
||||
# - prefer-prefixed-global-constants
|
||||
# - prefer-correct-callback-field-name
|
||||
|
||||
@@ -2,6 +2,4 @@ org.gradle.jvmargs=-Xmx4096M
|
||||
android.useAndroidX=true
|
||||
android.enableJetifier=true
|
||||
android.nonTransitiveRClass=false
|
||||
android.nonFinalResIds=false
|
||||
org.gradle.caching=true
|
||||
org.gradle.parallel=true
|
||||
android.nonFinalResIds=false
|
||||
2
mobile/drift_schemas/main/drift_schema_v1.json
generated
2
mobile/drift_schemas/main/drift_schema_v1.json
generated
File diff suppressed because one or more lines are too long
@@ -7,7 +7,7 @@ import 'general_helper.dart';
|
||||
class ImmichTestLoginHelper {
|
||||
final WidgetTester tester;
|
||||
|
||||
const ImmichTestLoginHelper(this.tester);
|
||||
ImmichTestLoginHelper(this.tester);
|
||||
|
||||
Future<void> waitForLoginScreen() async {
|
||||
await pumpUntilFound(tester, find.text("Login"));
|
||||
@@ -60,11 +60,11 @@ class ImmichTestLoginHelper {
|
||||
await tester.tap(button);
|
||||
}
|
||||
|
||||
Future<void> assertLoginSuccess() async {
|
||||
Future<void> assertLoginSuccess({int timeoutSeconds = 15}) async {
|
||||
await pumpUntilFound(tester, find.text("home_page_building_timeline".tr()));
|
||||
}
|
||||
|
||||
Future<void> assertLoginFailed() async {
|
||||
Future<void> assertLoginFailed({int timeoutSeconds = 15}) async {
|
||||
await pumpUntilFound(tester, find.text("login_form_failed_login".tr()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ import WidgetKit
|
||||
|
||||
func buildEntry(
|
||||
api: ImmichAPI,
|
||||
asset: Asset,
|
||||
asset: SearchResult,
|
||||
dateOffset: Int,
|
||||
subtitle: String? = nil
|
||||
)
|
||||
@@ -15,8 +15,7 @@ func buildEntry(
|
||||
to: Date.now
|
||||
)!
|
||||
let image = try await api.fetchImage(asset: asset)
|
||||
|
||||
return ImageEntry(date: entryDate, image: image, subtitle: subtitle, deepLink: asset.deepLink)
|
||||
return ImageEntry(date: entryDate, image: image, subtitle: subtitle)
|
||||
}
|
||||
|
||||
func generateRandomEntries(
|
||||
|
||||
@@ -6,7 +6,6 @@ struct ImageEntry: TimelineEntry {
|
||||
var image: UIImage?
|
||||
var subtitle: String? = nil
|
||||
var error: WidgetError? = nil
|
||||
var deepLink: URL? = nil
|
||||
|
||||
// Resizes the stored image to a maximum width of 450 pixels
|
||||
mutating func resize() {
|
||||
@@ -55,7 +54,6 @@ struct ImmichWidgetView: View {
|
||||
}
|
||||
.padding(16)
|
||||
}
|
||||
.widgetURL(entry.deepLink)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,13 +43,9 @@ enum AssetType: String, Codable {
|
||||
case other = "OTHER"
|
||||
}
|
||||
|
||||
struct Asset: Codable {
|
||||
struct SearchResult: Codable {
|
||||
let id: String
|
||||
let type: AssetType
|
||||
|
||||
var deepLink: URL? {
|
||||
return URL(string: "immich://asset?id=\(id)")
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchFilters: Codable {
|
||||
@@ -60,7 +56,7 @@ struct SearchFilters: Codable {
|
||||
|
||||
struct MemoryResult: Codable {
|
||||
let id: String
|
||||
var assets: [Asset]
|
||||
var assets: [SearchResult]
|
||||
let type: String
|
||||
|
||||
struct MemoryData: Codable {
|
||||
@@ -131,7 +127,7 @@ class ImmichAPI {
|
||||
}
|
||||
|
||||
func fetchSearchResults(with filters: SearchFilters) async throws
|
||||
-> [Asset]
|
||||
-> [SearchResult]
|
||||
{
|
||||
// get URL
|
||||
guard
|
||||
@@ -151,7 +147,7 @@ class ImmichAPI {
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
|
||||
// decode data
|
||||
return try JSONDecoder().decode([Asset].self, from: data)
|
||||
return try JSONDecoder().decode([SearchResult].self, from: data)
|
||||
}
|
||||
|
||||
func fetchMemory(for date: Date) async throws -> [MemoryResult] {
|
||||
@@ -176,7 +172,7 @@ class ImmichAPI {
|
||||
return try JSONDecoder().decode([MemoryResult].self, from: data)
|
||||
}
|
||||
|
||||
func fetchImage(asset: Asset) async throws(WidgetError) -> UIImage {
|
||||
func fetchImage(asset: SearchResult) async throws(WidgetError) -> UIImage {
|
||||
let thumbnailParams = [URLQueryItem(name: "size", value: "preview")]
|
||||
let assetEndpoint = "/assets/" + asset.id + "/thumbnail"
|
||||
|
||||
|
||||
@@ -12,5 +12,3 @@ enum TextSearchType {
|
||||
enum AssetVisibilityEnum { timeline, hidden, archive, locked }
|
||||
|
||||
enum SortUserBy { id }
|
||||
|
||||
enum ActionSource { timeline, viewer }
|
||||
|
||||
@@ -4,8 +4,6 @@ sealed class ImmichErrors {
|
||||
}
|
||||
|
||||
class NoResponseDtoError extends ImmichErrors implements Exception {
|
||||
const NoResponseDtoError();
|
||||
|
||||
@override
|
||||
String toString() => "Response Dto is null";
|
||||
}
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
enum AlbumAssetOrder {
|
||||
// do not change this order!
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
enum AlbumUserRole {
|
||||
// do not change this order!
|
||||
editor,
|
||||
viewer,
|
||||
}
|
||||
|
||||
// Model for an album stored in the server
|
||||
class Album {
|
||||
final String id;
|
||||
final String name;
|
||||
final String ownerId;
|
||||
final String description;
|
||||
final DateTime createdAt;
|
||||
final DateTime updatedAt;
|
||||
final String? thumbnailAssetId;
|
||||
final bool isActivityEnabled;
|
||||
final AlbumAssetOrder order;
|
||||
final int assetCount;
|
||||
final String ownerName;
|
||||
|
||||
const Album({
|
||||
required this.id,
|
||||
required this.name,
|
||||
required this.ownerId,
|
||||
required this.description,
|
||||
required this.createdAt,
|
||||
required this.updatedAt,
|
||||
this.thumbnailAssetId,
|
||||
required this.isActivityEnabled,
|
||||
required this.order,
|
||||
required this.assetCount,
|
||||
required this.ownerName,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return '''Album {
|
||||
id: $id,
|
||||
name: $name,
|
||||
ownerId: $ownerId,
|
||||
description: $description,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
isActivityEnabled: $isActivityEnabled,
|
||||
order: $order,
|
||||
thumbnailAssetId: ${thumbnailAssetId ?? "<NA>"}
|
||||
assetCount: $assetCount
|
||||
ownerName: $ownerName
|
||||
}''';
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (other is! Album) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return id == other.id &&
|
||||
name == other.name &&
|
||||
ownerId == other.ownerId &&
|
||||
description == other.description &&
|
||||
createdAt == other.createdAt &&
|
||||
updatedAt == other.updatedAt &&
|
||||
thumbnailAssetId == other.thumbnailAssetId &&
|
||||
isActivityEnabled == other.isActivityEnabled &&
|
||||
order == other.order &&
|
||||
assetCount == other.assetCount &&
|
||||
ownerName == other.ownerName;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return id.hashCode ^
|
||||
name.hashCode ^
|
||||
ownerId.hashCode ^
|
||||
description.hashCode ^
|
||||
createdAt.hashCode ^
|
||||
updatedAt.hashCode ^
|
||||
thumbnailAssetId.hashCode ^
|
||||
isActivityEnabled.hashCode ^
|
||||
order.hashCode ^
|
||||
assetCount.hashCode ^
|
||||
ownerName.hashCode;
|
||||
}
|
||||
}
|
||||
@@ -8,18 +8,16 @@ enum AssetVisibility {
|
||||
}
|
||||
|
||||
// Model for an asset stored in the server
|
||||
class RemoteAsset extends BaseAsset {
|
||||
class Asset extends BaseAsset {
|
||||
final String id;
|
||||
final String? localId;
|
||||
final String? thumbHash;
|
||||
final AssetVisibility visibility;
|
||||
final String ownerId;
|
||||
|
||||
const RemoteAsset({
|
||||
const Asset({
|
||||
required this.id,
|
||||
this.localId,
|
||||
required super.name,
|
||||
required this.ownerId,
|
||||
required super.checksum,
|
||||
required super.type,
|
||||
required super.createdAt,
|
||||
@@ -39,17 +37,16 @@ class RemoteAsset extends BaseAsset {
|
||||
@override
|
||||
String toString() {
|
||||
return '''Asset {
|
||||
id: $id,
|
||||
name: $name,
|
||||
ownerId: $ownerId,
|
||||
type: $type,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
width: ${width ?? "<NA>"},
|
||||
height: ${height ?? "<NA>"},
|
||||
durationInSeconds: ${durationInSeconds ?? "<NA>"},
|
||||
localId: ${localId ?? "<NA>"},
|
||||
isFavorite: $isFavorite,
|
||||
id: $id,
|
||||
name: $name,
|
||||
type: $type,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
width: ${width ?? "<NA>"},
|
||||
height: ${height ?? "<NA>"},
|
||||
durationInSeconds: ${durationInSeconds ?? "<NA>"},
|
||||
localId: ${localId ?? "<NA>"},
|
||||
isFavorite: $isFavorite,
|
||||
thumbHash: ${thumbHash ?? "<NA>"},
|
||||
visibility: $visibility,
|
||||
}''';
|
||||
@@ -57,11 +54,10 @@ class RemoteAsset extends BaseAsset {
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (other is! RemoteAsset) return false;
|
||||
if (other is! Asset) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return super == other &&
|
||||
id == other.id &&
|
||||
ownerId == other.ownerId &&
|
||||
localId == other.localId &&
|
||||
thumbHash == other.thumbHash &&
|
||||
visibility == other.visibility;
|
||||
@@ -71,7 +67,6 @@ class RemoteAsset extends BaseAsset {
|
||||
int get hashCode =>
|
||||
super.hashCode ^
|
||||
id.hashCode ^
|
||||
ownerId.hashCode ^
|
||||
localId.hashCode ^
|
||||
thumbHash.hashCode ^
|
||||
visibility.hashCode;
|
||||
@@ -1,4 +1,4 @@
|
||||
part 'remote_asset.model.dart';
|
||||
part 'asset.model.dart';
|
||||
part 'local_asset.model.dart';
|
||||
|
||||
enum AssetType {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import 'dart:convert';
|
||||
|
||||
class Person {
|
||||
const Person({
|
||||
Person({
|
||||
required this.id,
|
||||
this.birthDate,
|
||||
required this.isHidden,
|
||||
|
||||
@@ -2,8 +2,10 @@ import 'dart:async';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:flutter/widgets.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/services/store.service.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
|
||||
@@ -15,16 +17,22 @@ class LocalSyncService {
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final Platform _platform;
|
||||
final StoreService _storeService;
|
||||
final Logger _log = Logger("DeviceSyncService");
|
||||
|
||||
LocalSyncService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
required StoreService storeService,
|
||||
Platform? platform,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_nativeSyncApi = nativeSyncApi,
|
||||
_storeService = storeService,
|
||||
_platform = platform ?? const LocalPlatform();
|
||||
|
||||
bool get _ignoreIcloudAssets =>
|
||||
_storeService.get(StoreKey.ignoreIcloudAssets, false) == true;
|
||||
|
||||
Future<void> sync({bool full = false}) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
try {
|
||||
@@ -76,7 +84,11 @@ class LocalSyncService {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
await updateAlbum(dbAlbum, album);
|
||||
if (_ignoreIcloudAssets) {
|
||||
await removeAlbum(dbAlbum);
|
||||
} else {
|
||||
await updateAlbum(dbAlbum, album);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,7 +106,12 @@ class LocalSyncService {
|
||||
try {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
|
||||
final deviceAlbums = await _nativeSyncApi.getAlbums();
|
||||
List<PlatformAlbum> deviceAlbums =
|
||||
List.of(await _nativeSyncApi.getAlbums());
|
||||
if (_platform.isIOS && _ignoreIcloudAssets) {
|
||||
deviceAlbums.removeWhere((album) => album.isCloud);
|
||||
}
|
||||
|
||||
final dbAlbums =
|
||||
await _localAlbumRepository.getAll(sortBy: {SortLocalAlbumsBy.id});
|
||||
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/remote_album.repository.dart';
|
||||
import 'package:immich_mobile/models/albums/album_search.model.dart';
|
||||
import 'package:immich_mobile/utils/remote_album.utils.dart';
|
||||
|
||||
class RemoteAlbumService {
|
||||
final DriftRemoteAlbumRepository _repository;
|
||||
|
||||
const RemoteAlbumService(this._repository);
|
||||
|
||||
Future<List<Album>> getAll() {
|
||||
return _repository.getAll();
|
||||
}
|
||||
|
||||
List<Album> sortAlbums(
|
||||
List<Album> albums,
|
||||
RemoteAlbumSortMode sortMode, {
|
||||
bool isReverse = false,
|
||||
}) {
|
||||
return sortMode.sortFn(albums, isReverse);
|
||||
}
|
||||
|
||||
List<Album> searchAlbums(
|
||||
List<Album> albums,
|
||||
String query,
|
||||
String? userId, [
|
||||
QuickFilterMode filterMode = QuickFilterMode.all,
|
||||
]) {
|
||||
final lowerQuery = query.toLowerCase();
|
||||
List<Album> filtered = albums;
|
||||
|
||||
// Apply text search filter
|
||||
if (query.isNotEmpty) {
|
||||
filtered = filtered
|
||||
.where(
|
||||
(album) =>
|
||||
album.name.toLowerCase().contains(lowerQuery) ||
|
||||
album.description.toLowerCase().contains(lowerQuery),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (userId != null) {
|
||||
switch (filterMode) {
|
||||
case QuickFilterMode.myAlbums:
|
||||
filtered =
|
||||
filtered.where((album) => album.ownerId == userId).toList();
|
||||
break;
|
||||
case QuickFilterMode.sharedWithMe:
|
||||
filtered =
|
||||
filtered.where((album) => album.ownerId != userId).toList();
|
||||
break;
|
||||
case QuickFilterMode.all:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
}
|
||||
@@ -76,76 +76,11 @@ class SyncStreamService {
|
||||
case SyncEntityType.assetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(data.cast());
|
||||
case SyncEntityType.partnerAssetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner',
|
||||
);
|
||||
case SyncEntityType.partnerAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner backfill',
|
||||
);
|
||||
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetDeleteV1:
|
||||
return _syncStreamRepository.deleteAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: "partner",
|
||||
);
|
||||
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner',
|
||||
);
|
||||
case SyncEntityType.partnerAssetExifBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner backfill',
|
||||
);
|
||||
case SyncEntityType.albumV1:
|
||||
return _syncStreamRepository.updateAlbumsV1(data.cast());
|
||||
case SyncEntityType.albumDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumsV1(data.cast());
|
||||
case SyncEntityType.albumUserV1:
|
||||
return _syncStreamRepository.updateAlbumUsersV1(data.cast());
|
||||
case SyncEntityType.albumUserBackfillV1:
|
||||
return _syncStreamRepository.updateAlbumUsersV1(
|
||||
data.cast(),
|
||||
debugLabel: 'backfill',
|
||||
);
|
||||
case SyncEntityType.albumUserDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumUsersV1(data.cast());
|
||||
case SyncEntityType.albumAssetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album',
|
||||
);
|
||||
case SyncEntityType.albumAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album backfill',
|
||||
);
|
||||
case SyncEntityType.albumAssetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album',
|
||||
);
|
||||
case SyncEntityType.albumAssetExifBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album backfill',
|
||||
);
|
||||
case SyncEntityType.albumToAssetV1:
|
||||
return _syncStreamRepository.updateAlbumToAssetsV1(data.cast());
|
||||
case SyncEntityType.albumToAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAlbumToAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'backfill',
|
||||
);
|
||||
case SyncEntityType.albumToAssetDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumToAssetsV1(data.cast());
|
||||
// No-op. SyncAckV1 entities are checkpoints in the sync stream
|
||||
// to acknowledge that the client has processed all the backfill events
|
||||
case SyncEntityType.syncAckV1:
|
||||
return;
|
||||
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
|
||||
default:
|
||||
_logger.warning("Unknown sync data type: $type");
|
||||
}
|
||||
|
||||
@@ -45,13 +45,6 @@ class TimelineFactory {
|
||||
bucketSource: () =>
|
||||
_timelineRepository.watchLocalBucket(albumId, groupBy: groupBy),
|
||||
);
|
||||
|
||||
TimelineService remoteAlbum({required String albumId}) => TimelineService(
|
||||
assetSource: (offset, count) => _timelineRepository
|
||||
.getRemoteBucketAssets(albumId, offset: offset, count: count),
|
||||
bucketSource: () =>
|
||||
_timelineRepository.watchRemoteBucket(albumId, groupBy: groupBy),
|
||||
);
|
||||
}
|
||||
|
||||
class TimelineService {
|
||||
@@ -64,7 +57,7 @@ class TimelineService {
|
||||
}) : _assetSource = assetSource,
|
||||
_bucketSource = bucketSource {
|
||||
_bucketSubscription =
|
||||
_bucketSource().listen((_) => unawaited(reloadBucket()));
|
||||
_bucketSource().listen((_) => unawaited(_reloadBucket()));
|
||||
}
|
||||
|
||||
final AsyncMutex _mutex = AsyncMutex();
|
||||
@@ -74,7 +67,7 @@ class TimelineService {
|
||||
|
||||
Stream<List<Bucket>> Function() get watchBuckets => _bucketSource;
|
||||
|
||||
Future<void> reloadBucket() => _mutex.run(() async {
|
||||
Future<void> _reloadBucket() => _mutex.run(() async {
|
||||
_buffer = await _assetSource(_bufferOffset, _buffer.length);
|
||||
});
|
||||
|
||||
|
||||
@@ -554,12 +554,18 @@ class Asset {
|
||||
}""";
|
||||
}
|
||||
|
||||
static getVisibility(AssetVisibility visibility) => switch (visibility) {
|
||||
AssetVisibility.archive => AssetVisibilityEnum.archive,
|
||||
AssetVisibility.hidden => AssetVisibilityEnum.hidden,
|
||||
AssetVisibility.locked => AssetVisibilityEnum.locked,
|
||||
AssetVisibility.timeline || _ => AssetVisibilityEnum.timeline,
|
||||
};
|
||||
static getVisibility(AssetVisibility visibility) {
|
||||
switch (visibility) {
|
||||
case AssetVisibility.timeline:
|
||||
return AssetVisibilityEnum.timeline;
|
||||
case AssetVisibility.archive:
|
||||
return AssetVisibilityEnum.archive;
|
||||
case AssetVisibility.hidden:
|
||||
return AssetVisibilityEnum.hidden;
|
||||
case AssetVisibility.locked:
|
||||
return AssetVisibilityEnum.locked;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum AssetType {
|
||||
|
||||
@@ -11,7 +11,7 @@ class SSLClientCertStoreVal {
|
||||
final Uint8List data;
|
||||
final String? password;
|
||||
|
||||
const SSLClientCertStoreVal(this.data, this.password);
|
||||
SSLClientCertStoreVal(this.data, this.password);
|
||||
|
||||
void save() {
|
||||
final b64Str = base64Encode(data);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class LocalAlbumEntity extends Table with DriftDefaultsMixin {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart' as i2;
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart'
|
||||
as i3;
|
||||
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumEntity();
|
||||
|
||||
TextColumn get id => text()();
|
||||
|
||||
TextColumn get name => text()();
|
||||
|
||||
TextColumn get description => text().withDefault(const Constant(''))();
|
||||
|
||||
DateTimeColumn get createdAt => dateTime().withDefault(currentDateAndTime)();
|
||||
|
||||
DateTimeColumn get updatedAt => dateTime().withDefault(currentDateAndTime)();
|
||||
|
||||
TextColumn get ownerId =>
|
||||
text().references(UserEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get thumbnailAssetId => text()
|
||||
.references(RemoteAssetEntity, #id, onDelete: KeyAction.setNull)
|
||||
.nullable()();
|
||||
|
||||
BoolColumn get isActivityEnabled =>
|
||||
boolean().withDefault(const Constant(true))();
|
||||
|
||||
IntColumn get order => intEnum<AlbumAssetOrder>()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {id};
|
||||
}
|
||||
@@ -1,946 +0,0 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart'
|
||||
as i3;
|
||||
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
as i5;
|
||||
import 'package:drift/internal/modular.dart' as i6;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
as i7;
|
||||
|
||||
typedef $$RemoteAlbumEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumEntityCompanion Function({
|
||||
required String id,
|
||||
required String name,
|
||||
i0.Value<String> description,
|
||||
i0.Value<DateTime> createdAt,
|
||||
i0.Value<DateTime> updatedAt,
|
||||
required String ownerId,
|
||||
i0.Value<String?> thumbnailAssetId,
|
||||
i0.Value<bool> isActivityEnabled,
|
||||
required i2.AlbumAssetOrder order,
|
||||
});
|
||||
typedef $$RemoteAlbumEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumEntityCompanion Function({
|
||||
i0.Value<String> id,
|
||||
i0.Value<String> name,
|
||||
i0.Value<String> description,
|
||||
i0.Value<DateTime> createdAt,
|
||||
i0.Value<DateTime> updatedAt,
|
||||
i0.Value<String> ownerId,
|
||||
i0.Value<String?> thumbnailAssetId,
|
||||
i0.Value<bool> isActivityEnabled,
|
||||
i0.Value<i2.AlbumAssetOrder> order,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData> {
|
||||
$$RemoteAlbumEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i5.$UserEntityTable _ownerIdTable(i0.GeneratedDatabase db) =>
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.ownerId,
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity')
|
||||
.id));
|
||||
|
||||
i5.$$UserEntityTableProcessedTableManager get ownerId {
|
||||
final $_column = $_itemColumn<String>('owner_id')!;
|
||||
|
||||
final manager = i5
|
||||
.$$UserEntityTableTableManager(
|
||||
$_db,
|
||||
i6.ReadDatabaseContainer($_db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_ownerIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i7.$RemoteAssetEntityTable _thumbnailAssetIdTable(
|
||||
i0.GeneratedDatabase db) =>
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.thumbnailAssetId,
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.id));
|
||||
|
||||
i7.$$RemoteAssetEntityTableProcessedTableManager? get thumbnailAssetId {
|
||||
final $_column = $_itemColumn<String>('thumbnail_asset_id');
|
||||
if ($_column == null) return null;
|
||||
final manager = i7
|
||||
.$$RemoteAssetEntityTableTableManager(
|
||||
$_db,
|
||||
i6.ReadDatabaseContainer($_db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_thumbnailAssetIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnFilters<String> get id => $composableBuilder(
|
||||
column: $table.id, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<String> get name => $composableBuilder(
|
||||
column: $table.name, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<String> get description => $composableBuilder(
|
||||
column: $table.description,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<DateTime> get createdAt => $composableBuilder(
|
||||
column: $table.createdAt, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<DateTime> get updatedAt => $composableBuilder(
|
||||
column: $table.updatedAt, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnWithTypeConverterFilters<i2.AlbumAssetOrder, i2.AlbumAssetOrder, int>
|
||||
get order => $composableBuilder(
|
||||
column: $table.order,
|
||||
builder: (column) => i0.ColumnWithTypeConverterFilters(column));
|
||||
|
||||
i5.$$UserEntityTableFilterComposer get ownerId {
|
||||
final i5.$$UserEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableFilterComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnOrderings<String> get id => $composableBuilder(
|
||||
column: $table.id, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<String> get name => $composableBuilder(
|
||||
column: $table.name, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<String> get description => $composableBuilder(
|
||||
column: $table.description,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<DateTime> get createdAt => $composableBuilder(
|
||||
column: $table.createdAt,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<DateTime> get updatedAt => $composableBuilder(
|
||||
column: $table.updatedAt,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get order => $composableBuilder(
|
||||
column: $table.order, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i5.$$UserEntityTableOrderingComposer get ownerId {
|
||||
final i5.$$UserEntityTableOrderingComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableOrderingComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.GeneratedColumn<String> get id =>
|
||||
$composableBuilder(column: $table.id, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<String> get name =>
|
||||
$composableBuilder(column: $table.name, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<String> get description => $composableBuilder(
|
||||
column: $table.description, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<DateTime> get createdAt =>
|
||||
$composableBuilder(column: $table.createdAt, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<DateTime> get updatedAt =>
|
||||
$composableBuilder(column: $table.updatedAt, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumnWithTypeConverter<i2.AlbumAssetOrder, int> get order =>
|
||||
$composableBuilder(column: $table.order, builder: (column) => column);
|
||||
|
||||
i5.$$UserEntityTableAnnotationComposer get ownerId {
|
||||
final i5.$$UserEntityTableAnnotationComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableAnnotationComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData,
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumEntityData, i1.$$RemoteAlbumEntityTableReferences),
|
||||
i1.RemoteAlbumEntityData,
|
||||
i0.PrefetchHooks Function({bool ownerId, bool thumbnailAssetId})> {
|
||||
$$RemoteAlbumEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer($db: db, $table: table),
|
||||
createOrderingComposer: () => i1
|
||||
.$$RemoteAlbumEntityTableOrderingComposer($db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> id = const i0.Value.absent(),
|
||||
i0.Value<String> name = const i0.Value.absent(),
|
||||
i0.Value<String> description = const i0.Value.absent(),
|
||||
i0.Value<DateTime> createdAt = const i0.Value.absent(),
|
||||
i0.Value<DateTime> updatedAt = const i0.Value.absent(),
|
||||
i0.Value<String> ownerId = const i0.Value.absent(),
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
i0.Value<bool> isActivityEnabled = const i0.Value.absent(),
|
||||
i0.Value<i2.AlbumAssetOrder> order = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumEntityCompanion(
|
||||
id: id,
|
||||
name: name,
|
||||
description: description,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
ownerId: ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled,
|
||||
order: order,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String id,
|
||||
required String name,
|
||||
i0.Value<String> description = const i0.Value.absent(),
|
||||
i0.Value<DateTime> createdAt = const i0.Value.absent(),
|
||||
i0.Value<DateTime> updatedAt = const i0.Value.absent(),
|
||||
required String ownerId,
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
i0.Value<bool> isActivityEnabled = const i0.Value.absent(),
|
||||
required i2.AlbumAssetOrder order,
|
||||
}) =>
|
||||
i1.RemoteAlbumEntityCompanion.insert(
|
||||
id: id,
|
||||
name: name,
|
||||
description: description,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
ownerId: ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled,
|
||||
order: order,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({ownerId = false, thumbnailAssetId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (ownerId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.ownerId,
|
||||
referencedTable:
|
||||
i1.$$RemoteAlbumEntityTableReferences._ownerIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumEntityTableReferences
|
||||
._ownerIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (thumbnailAssetId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.thumbnailAssetId,
|
||||
referencedTable: i1.$$RemoteAlbumEntityTableReferences
|
||||
._thumbnailAssetIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumEntityTableReferences
|
||||
._thumbnailAssetIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData,
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumEntityData, i1.$$RemoteAlbumEntityTableReferences),
|
||||
i1.RemoteAlbumEntityData,
|
||||
i0.PrefetchHooks Function({bool ownerId, bool thumbnailAssetId})>;
|
||||
|
||||
class $RemoteAlbumEntityTable extends i3.RemoteAlbumEntity
|
||||
with i0.TableInfo<$RemoteAlbumEntityTable, i1.RemoteAlbumEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> id = i0.GeneratedColumn<String>(
|
||||
'id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string, requiredDuringInsert: true);
|
||||
static const i0.VerificationMeta _nameMeta =
|
||||
const i0.VerificationMeta('name');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> name = i0.GeneratedColumn<String>(
|
||||
'name', aliasedName, false,
|
||||
type: i0.DriftSqlType.string, requiredDuringInsert: true);
|
||||
static const i0.VerificationMeta _descriptionMeta =
|
||||
const i0.VerificationMeta('description');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> description =
|
||||
i0.GeneratedColumn<String>('description', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: const i4.Constant(''));
|
||||
static const i0.VerificationMeta _createdAtMeta =
|
||||
const i0.VerificationMeta('createdAt');
|
||||
@override
|
||||
late final i0.GeneratedColumn<DateTime> createdAt =
|
||||
i0.GeneratedColumn<DateTime>('created_at', aliasedName, false,
|
||||
type: i0.DriftSqlType.dateTime,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: i4.currentDateAndTime);
|
||||
static const i0.VerificationMeta _updatedAtMeta =
|
||||
const i0.VerificationMeta('updatedAt');
|
||||
@override
|
||||
late final i0.GeneratedColumn<DateTime> updatedAt =
|
||||
i0.GeneratedColumn<DateTime>('updated_at', aliasedName, false,
|
||||
type: i0.DriftSqlType.dateTime,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: i4.currentDateAndTime);
|
||||
static const i0.VerificationMeta _ownerIdMeta =
|
||||
const i0.VerificationMeta('ownerId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> ownerId = i0.GeneratedColumn<String>(
|
||||
'owner_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES user_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _thumbnailAssetIdMeta =
|
||||
const i0.VerificationMeta('thumbnailAssetId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> thumbnailAssetId =
|
||||
i0.GeneratedColumn<String>('thumbnail_asset_id', aliasedName, true,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_asset_entity (id) ON DELETE SET NULL'));
|
||||
static const i0.VerificationMeta _isActivityEnabledMeta =
|
||||
const i0.VerificationMeta('isActivityEnabled');
|
||||
@override
|
||||
late final i0.GeneratedColumn<bool> isActivityEnabled =
|
||||
i0.GeneratedColumn<bool>('is_activity_enabled', aliasedName, false,
|
||||
type: i0.DriftSqlType.bool,
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'CHECK ("is_activity_enabled" IN (0, 1))'),
|
||||
defaultValue: const i4.Constant(true));
|
||||
@override
|
||||
late final i0.GeneratedColumnWithTypeConverter<i2.AlbumAssetOrder, int>
|
||||
order = i0.GeneratedColumn<int>('order', aliasedName, false,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: true)
|
||||
.withConverter<i2.AlbumAssetOrder>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder);
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
ownerId,
|
||||
thumbnailAssetId,
|
||||
isActivityEnabled,
|
||||
order
|
||||
];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('id')) {
|
||||
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_idMeta);
|
||||
}
|
||||
if (data.containsKey('name')) {
|
||||
context.handle(
|
||||
_nameMeta, name.isAcceptableOrUnknown(data['name']!, _nameMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_nameMeta);
|
||||
}
|
||||
if (data.containsKey('description')) {
|
||||
context.handle(
|
||||
_descriptionMeta,
|
||||
description.isAcceptableOrUnknown(
|
||||
data['description']!, _descriptionMeta));
|
||||
}
|
||||
if (data.containsKey('created_at')) {
|
||||
context.handle(_createdAtMeta,
|
||||
createdAt.isAcceptableOrUnknown(data['created_at']!, _createdAtMeta));
|
||||
}
|
||||
if (data.containsKey('updated_at')) {
|
||||
context.handle(_updatedAtMeta,
|
||||
updatedAt.isAcceptableOrUnknown(data['updated_at']!, _updatedAtMeta));
|
||||
}
|
||||
if (data.containsKey('owner_id')) {
|
||||
context.handle(_ownerIdMeta,
|
||||
ownerId.isAcceptableOrUnknown(data['owner_id']!, _ownerIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_ownerIdMeta);
|
||||
}
|
||||
if (data.containsKey('thumbnail_asset_id')) {
|
||||
context.handle(
|
||||
_thumbnailAssetIdMeta,
|
||||
thumbnailAssetId.isAcceptableOrUnknown(
|
||||
data['thumbnail_asset_id']!, _thumbnailAssetIdMeta));
|
||||
}
|
||||
if (data.containsKey('is_activity_enabled')) {
|
||||
context.handle(
|
||||
_isActivityEnabledMeta,
|
||||
isActivityEnabled.isAcceptableOrUnknown(
|
||||
data['is_activity_enabled']!, _isActivityEnabledMeta));
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
i1.RemoteAlbumEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumEntityData(
|
||||
id: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}id'])!,
|
||||
name: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}name'])!,
|
||||
description: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}description'])!,
|
||||
createdAt: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.dateTime, data['${effectivePrefix}created_at'])!,
|
||||
updatedAt: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.dateTime, data['${effectivePrefix}updated_at'])!,
|
||||
ownerId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}owner_id'])!,
|
||||
thumbnailAssetId: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.string, data['${effectivePrefix}thumbnail_asset_id']),
|
||||
isActivityEnabled: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.bool, data['${effectivePrefix}is_activity_enabled'])!,
|
||||
order: i1.$RemoteAlbumEntityTable.$converterorder.fromSql(attachedDatabase
|
||||
.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}order'])!),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
static i0.JsonTypeConverter2<i2.AlbumAssetOrder, int, int> $converterorder =
|
||||
const i0.EnumIndexConverter<i2.AlbumAssetOrder>(
|
||||
i2.AlbumAssetOrder.values);
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumEntityData> {
|
||||
final String id;
|
||||
final String name;
|
||||
final String description;
|
||||
final DateTime createdAt;
|
||||
final DateTime updatedAt;
|
||||
final String ownerId;
|
||||
final String? thumbnailAssetId;
|
||||
final bool isActivityEnabled;
|
||||
final i2.AlbumAssetOrder order;
|
||||
const RemoteAlbumEntityData(
|
||||
{required this.id,
|
||||
required this.name,
|
||||
required this.description,
|
||||
required this.createdAt,
|
||||
required this.updatedAt,
|
||||
required this.ownerId,
|
||||
this.thumbnailAssetId,
|
||||
required this.isActivityEnabled,
|
||||
required this.order});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['id'] = i0.Variable<String>(id);
|
||||
map['name'] = i0.Variable<String>(name);
|
||||
map['description'] = i0.Variable<String>(description);
|
||||
map['created_at'] = i0.Variable<DateTime>(createdAt);
|
||||
map['updated_at'] = i0.Variable<DateTime>(updatedAt);
|
||||
map['owner_id'] = i0.Variable<String>(ownerId);
|
||||
if (!nullToAbsent || thumbnailAssetId != null) {
|
||||
map['thumbnail_asset_id'] = i0.Variable<String>(thumbnailAssetId);
|
||||
}
|
||||
map['is_activity_enabled'] = i0.Variable<bool>(isActivityEnabled);
|
||||
{
|
||||
map['order'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toSql(order));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumEntityData(
|
||||
id: serializer.fromJson<String>(json['id']),
|
||||
name: serializer.fromJson<String>(json['name']),
|
||||
description: serializer.fromJson<String>(json['description']),
|
||||
createdAt: serializer.fromJson<DateTime>(json['createdAt']),
|
||||
updatedAt: serializer.fromJson<DateTime>(json['updatedAt']),
|
||||
ownerId: serializer.fromJson<String>(json['ownerId']),
|
||||
thumbnailAssetId: serializer.fromJson<String?>(json['thumbnailAssetId']),
|
||||
isActivityEnabled: serializer.fromJson<bool>(json['isActivityEnabled']),
|
||||
order: i1.$RemoteAlbumEntityTable.$converterorder
|
||||
.fromJson(serializer.fromJson<int>(json['order'])),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'id': serializer.toJson<String>(id),
|
||||
'name': serializer.toJson<String>(name),
|
||||
'description': serializer.toJson<String>(description),
|
||||
'createdAt': serializer.toJson<DateTime>(createdAt),
|
||||
'updatedAt': serializer.toJson<DateTime>(updatedAt),
|
||||
'ownerId': serializer.toJson<String>(ownerId),
|
||||
'thumbnailAssetId': serializer.toJson<String?>(thumbnailAssetId),
|
||||
'isActivityEnabled': serializer.toJson<bool>(isActivityEnabled),
|
||||
'order': serializer.toJson<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toJson(order)),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumEntityData copyWith(
|
||||
{String? id,
|
||||
String? name,
|
||||
String? description,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? ownerId,
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
bool? isActivityEnabled,
|
||||
i2.AlbumAssetOrder? order}) =>
|
||||
i1.RemoteAlbumEntityData(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
description: description ?? this.description,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
ownerId: ownerId ?? this.ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId.present
|
||||
? thumbnailAssetId.value
|
||||
: this.thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled ?? this.isActivityEnabled,
|
||||
order: order ?? this.order,
|
||||
);
|
||||
RemoteAlbumEntityData copyWithCompanion(i1.RemoteAlbumEntityCompanion data) {
|
||||
return RemoteAlbumEntityData(
|
||||
id: data.id.present ? data.id.value : this.id,
|
||||
name: data.name.present ? data.name.value : this.name,
|
||||
description:
|
||||
data.description.present ? data.description.value : this.description,
|
||||
createdAt: data.createdAt.present ? data.createdAt.value : this.createdAt,
|
||||
updatedAt: data.updatedAt.present ? data.updatedAt.value : this.updatedAt,
|
||||
ownerId: data.ownerId.present ? data.ownerId.value : this.ownerId,
|
||||
thumbnailAssetId: data.thumbnailAssetId.present
|
||||
? data.thumbnailAssetId.value
|
||||
: this.thumbnailAssetId,
|
||||
isActivityEnabled: data.isActivityEnabled.present
|
||||
? data.isActivityEnabled.value
|
||||
: this.isActivityEnabled,
|
||||
order: data.order.present ? data.order.value : this.order,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumEntityData(')
|
||||
..write('id: $id, ')
|
||||
..write('name: $name, ')
|
||||
..write('description: $description, ')
|
||||
..write('createdAt: $createdAt, ')
|
||||
..write('updatedAt: $updatedAt, ')
|
||||
..write('ownerId: $ownerId, ')
|
||||
..write('thumbnailAssetId: $thumbnailAssetId, ')
|
||||
..write('isActivityEnabled: $isActivityEnabled, ')
|
||||
..write('order: $order')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(id, name, description, createdAt, updatedAt,
|
||||
ownerId, thumbnailAssetId, isActivityEnabled, order);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumEntityData &&
|
||||
other.id == this.id &&
|
||||
other.name == this.name &&
|
||||
other.description == this.description &&
|
||||
other.createdAt == this.createdAt &&
|
||||
other.updatedAt == this.updatedAt &&
|
||||
other.ownerId == this.ownerId &&
|
||||
other.thumbnailAssetId == this.thumbnailAssetId &&
|
||||
other.isActivityEnabled == this.isActivityEnabled &&
|
||||
other.order == this.order);
|
||||
}
|
||||
|
||||
class RemoteAlbumEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumEntityData> {
|
||||
final i0.Value<String> id;
|
||||
final i0.Value<String> name;
|
||||
final i0.Value<String> description;
|
||||
final i0.Value<DateTime> createdAt;
|
||||
final i0.Value<DateTime> updatedAt;
|
||||
final i0.Value<String> ownerId;
|
||||
final i0.Value<String?> thumbnailAssetId;
|
||||
final i0.Value<bool> isActivityEnabled;
|
||||
final i0.Value<i2.AlbumAssetOrder> order;
|
||||
const RemoteAlbumEntityCompanion({
|
||||
this.id = const i0.Value.absent(),
|
||||
this.name = const i0.Value.absent(),
|
||||
this.description = const i0.Value.absent(),
|
||||
this.createdAt = const i0.Value.absent(),
|
||||
this.updatedAt = const i0.Value.absent(),
|
||||
this.ownerId = const i0.Value.absent(),
|
||||
this.thumbnailAssetId = const i0.Value.absent(),
|
||||
this.isActivityEnabled = const i0.Value.absent(),
|
||||
this.order = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumEntityCompanion.insert({
|
||||
required String id,
|
||||
required String name,
|
||||
this.description = const i0.Value.absent(),
|
||||
this.createdAt = const i0.Value.absent(),
|
||||
this.updatedAt = const i0.Value.absent(),
|
||||
required String ownerId,
|
||||
this.thumbnailAssetId = const i0.Value.absent(),
|
||||
this.isActivityEnabled = const i0.Value.absent(),
|
||||
required i2.AlbumAssetOrder order,
|
||||
}) : id = i0.Value(id),
|
||||
name = i0.Value(name),
|
||||
ownerId = i0.Value(ownerId),
|
||||
order = i0.Value(order);
|
||||
static i0.Insertable<i1.RemoteAlbumEntityData> custom({
|
||||
i0.Expression<String>? id,
|
||||
i0.Expression<String>? name,
|
||||
i0.Expression<String>? description,
|
||||
i0.Expression<DateTime>? createdAt,
|
||||
i0.Expression<DateTime>? updatedAt,
|
||||
i0.Expression<String>? ownerId,
|
||||
i0.Expression<String>? thumbnailAssetId,
|
||||
i0.Expression<bool>? isActivityEnabled,
|
||||
i0.Expression<int>? order,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (id != null) 'id': id,
|
||||
if (name != null) 'name': name,
|
||||
if (description != null) 'description': description,
|
||||
if (createdAt != null) 'created_at': createdAt,
|
||||
if (updatedAt != null) 'updated_at': updatedAt,
|
||||
if (ownerId != null) 'owner_id': ownerId,
|
||||
if (thumbnailAssetId != null) 'thumbnail_asset_id': thumbnailAssetId,
|
||||
if (isActivityEnabled != null) 'is_activity_enabled': isActivityEnabled,
|
||||
if (order != null) 'order': order,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumEntityCompanion copyWith(
|
||||
{i0.Value<String>? id,
|
||||
i0.Value<String>? name,
|
||||
i0.Value<String>? description,
|
||||
i0.Value<DateTime>? createdAt,
|
||||
i0.Value<DateTime>? updatedAt,
|
||||
i0.Value<String>? ownerId,
|
||||
i0.Value<String?>? thumbnailAssetId,
|
||||
i0.Value<bool>? isActivityEnabled,
|
||||
i0.Value<i2.AlbumAssetOrder>? order}) {
|
||||
return i1.RemoteAlbumEntityCompanion(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
description: description ?? this.description,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
ownerId: ownerId ?? this.ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId ?? this.thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled ?? this.isActivityEnabled,
|
||||
order: order ?? this.order,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (id.present) {
|
||||
map['id'] = i0.Variable<String>(id.value);
|
||||
}
|
||||
if (name.present) {
|
||||
map['name'] = i0.Variable<String>(name.value);
|
||||
}
|
||||
if (description.present) {
|
||||
map['description'] = i0.Variable<String>(description.value);
|
||||
}
|
||||
if (createdAt.present) {
|
||||
map['created_at'] = i0.Variable<DateTime>(createdAt.value);
|
||||
}
|
||||
if (updatedAt.present) {
|
||||
map['updated_at'] = i0.Variable<DateTime>(updatedAt.value);
|
||||
}
|
||||
if (ownerId.present) {
|
||||
map['owner_id'] = i0.Variable<String>(ownerId.value);
|
||||
}
|
||||
if (thumbnailAssetId.present) {
|
||||
map['thumbnail_asset_id'] = i0.Variable<String>(thumbnailAssetId.value);
|
||||
}
|
||||
if (isActivityEnabled.present) {
|
||||
map['is_activity_enabled'] = i0.Variable<bool>(isActivityEnabled.value);
|
||||
}
|
||||
if (order.present) {
|
||||
map['order'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toSql(order.value));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumEntityCompanion(')
|
||||
..write('id: $id, ')
|
||||
..write('name: $name, ')
|
||||
..write('description: $description, ')
|
||||
..write('createdAt: $createdAt, ')
|
||||
..write('updatedAt: $updatedAt, ')
|
||||
..write('ownerId: $ownerId, ')
|
||||
..write('thumbnailAssetId: $thumbnailAssetId, ')
|
||||
..write('isActivityEnabled: $isActivityEnabled, ')
|
||||
..write('order: $order')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumAssetEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumAssetEntity();
|
||||
|
||||
TextColumn get assetId =>
|
||||
text().references(RemoteAssetEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get albumId =>
|
||||
text().references(RemoteAlbumEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {assetId, albumId};
|
||||
}
|
||||
@@ -1,565 +0,0 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.dart'
|
||||
as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
as i3;
|
||||
import 'package:drift/internal/modular.dart' as i4;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i5;
|
||||
|
||||
typedef $$RemoteAlbumAssetEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumAssetEntityCompanion Function({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
});
|
||||
typedef $$RemoteAlbumAssetEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumAssetEntityCompanion Function({
|
||||
i0.Value<String> assetId,
|
||||
i0.Value<String> albumId,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumAssetEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData> {
|
||||
$$RemoteAlbumAssetEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i3.$RemoteAssetEntityTable _assetIdTable(i0.GeneratedDatabase db) =>
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumAssetEntityTable>(
|
||||
'remote_album_asset_entity')
|
||||
.assetId,
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.id));
|
||||
|
||||
i3.$$RemoteAssetEntityTableProcessedTableManager get assetId {
|
||||
final $_column = $_itemColumn<String>('asset_id')!;
|
||||
|
||||
final manager = i3
|
||||
.$$RemoteAssetEntityTableTableManager(
|
||||
$_db,
|
||||
i4.ReadDatabaseContainer($_db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_assetIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i5.$RemoteAlbumEntityTable _albumIdTable(i0.GeneratedDatabase db) =>
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumAssetEntityTable>(
|
||||
'remote_album_asset_entity')
|
||||
.albumId,
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.id));
|
||||
|
||||
i5.$$RemoteAlbumEntityTableProcessedTableManager get albumId {
|
||||
final $_column = $_itemColumn<String>('album_id')!;
|
||||
|
||||
final manager = i5
|
||||
.$$RemoteAlbumEntityTableTableManager(
|
||||
$_db,
|
||||
i4.ReadDatabaseContainer($_db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_albumIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableFilterComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableFilterComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableOrderingComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableOrderingComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableAnnotationComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableAnnotationComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumAssetEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumAssetEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumAssetEntityData, i1.$$RemoteAlbumAssetEntityTableReferences),
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i0.PrefetchHooks Function({bool assetId, bool albumId})> {
|
||||
$$RemoteAlbumAssetEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumAssetEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer(
|
||||
$db: db, $table: table),
|
||||
createOrderingComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer(
|
||||
$db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> assetId = const i0.Value.absent(),
|
||||
i0.Value<String> albumId = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumAssetEntityCompanion(
|
||||
assetId: assetId,
|
||||
albumId: albumId,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
}) =>
|
||||
i1.RemoteAlbumAssetEntityCompanion.insert(
|
||||
assetId: assetId,
|
||||
albumId: albumId,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumAssetEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({assetId = false, albumId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (assetId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.assetId,
|
||||
referencedTable: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._assetIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._assetIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (albumId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.albumId,
|
||||
referencedTable: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._albumIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._albumIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumAssetEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumAssetEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumAssetEntityTableUpdateCompanionBuilder,
|
||||
(
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
),
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i0.PrefetchHooks Function({bool assetId, bool albumId})>;
|
||||
|
||||
class $RemoteAlbumAssetEntityTable extends i2.RemoteAlbumAssetEntity
|
||||
with
|
||||
i0.TableInfo<$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumAssetEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _assetIdMeta =
|
||||
const i0.VerificationMeta('assetId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> assetId = i0.GeneratedColumn<String>(
|
||||
'asset_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_asset_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _albumIdMeta =
|
||||
const i0.VerificationMeta('albumId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> albumId = i0.GeneratedColumn<String>(
|
||||
'album_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_album_entity (id) ON DELETE CASCADE'));
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [assetId, albumId];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_asset_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumAssetEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('asset_id')) {
|
||||
context.handle(_assetIdMeta,
|
||||
assetId.isAcceptableOrUnknown(data['asset_id']!, _assetIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_assetIdMeta);
|
||||
}
|
||||
if (data.containsKey('album_id')) {
|
||||
context.handle(_albumIdMeta,
|
||||
albumId.isAcceptableOrUnknown(data['album_id']!, _albumIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_albumIdMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {assetId, albumId};
|
||||
@override
|
||||
i1.RemoteAlbumAssetEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumAssetEntityData(
|
||||
assetId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}asset_id'])!,
|
||||
albumId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}album_id'])!,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumAssetEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumAssetEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumAssetEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumAssetEntityData> {
|
||||
final String assetId;
|
||||
final String albumId;
|
||||
const RemoteAlbumAssetEntityData(
|
||||
{required this.assetId, required this.albumId});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['asset_id'] = i0.Variable<String>(assetId);
|
||||
map['album_id'] = i0.Variable<String>(albumId);
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumAssetEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumAssetEntityData(
|
||||
assetId: serializer.fromJson<String>(json['assetId']),
|
||||
albumId: serializer.fromJson<String>(json['albumId']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'assetId': serializer.toJson<String>(assetId),
|
||||
'albumId': serializer.toJson<String>(albumId),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumAssetEntityData copyWith({String? assetId, String? albumId}) =>
|
||||
i1.RemoteAlbumAssetEntityData(
|
||||
assetId: assetId ?? this.assetId,
|
||||
albumId: albumId ?? this.albumId,
|
||||
);
|
||||
RemoteAlbumAssetEntityData copyWithCompanion(
|
||||
i1.RemoteAlbumAssetEntityCompanion data) {
|
||||
return RemoteAlbumAssetEntityData(
|
||||
assetId: data.assetId.present ? data.assetId.value : this.assetId,
|
||||
albumId: data.albumId.present ? data.albumId.value : this.albumId,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumAssetEntityData(')
|
||||
..write('assetId: $assetId, ')
|
||||
..write('albumId: $albumId')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(assetId, albumId);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumAssetEntityData &&
|
||||
other.assetId == this.assetId &&
|
||||
other.albumId == this.albumId);
|
||||
}
|
||||
|
||||
class RemoteAlbumAssetEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumAssetEntityData> {
|
||||
final i0.Value<String> assetId;
|
||||
final i0.Value<String> albumId;
|
||||
const RemoteAlbumAssetEntityCompanion({
|
||||
this.assetId = const i0.Value.absent(),
|
||||
this.albumId = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumAssetEntityCompanion.insert({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
}) : assetId = i0.Value(assetId),
|
||||
albumId = i0.Value(albumId);
|
||||
static i0.Insertable<i1.RemoteAlbumAssetEntityData> custom({
|
||||
i0.Expression<String>? assetId,
|
||||
i0.Expression<String>? albumId,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (assetId != null) 'asset_id': assetId,
|
||||
if (albumId != null) 'album_id': albumId,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumAssetEntityCompanion copyWith(
|
||||
{i0.Value<String>? assetId, i0.Value<String>? albumId}) {
|
||||
return i1.RemoteAlbumAssetEntityCompanion(
|
||||
assetId: assetId ?? this.assetId,
|
||||
albumId: albumId ?? this.albumId,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (assetId.present) {
|
||||
map['asset_id'] = i0.Variable<String>(assetId.value);
|
||||
}
|
||||
if (albumId.present) {
|
||||
map['album_id'] = i0.Variable<String>(albumId.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumAssetEntityCompanion(')
|
||||
..write('assetId: $assetId, ')
|
||||
..write('albumId: $albumId')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumUserEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumUserEntity();
|
||||
|
||||
TextColumn get albumId =>
|
||||
text().references(RemoteAlbumEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get userId =>
|
||||
text().references(UserEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
IntColumn get role => intEnum<AlbumUserRole>()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {albumId, userId};
|
||||
}
|
||||
@@ -1,618 +0,0 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.dart'
|
||||
as i3;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i4;
|
||||
import 'package:drift/internal/modular.dart' as i5;
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
as i6;
|
||||
|
||||
typedef $$RemoteAlbumUserEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumUserEntityCompanion Function({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
});
|
||||
typedef $$RemoteAlbumUserEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumUserEntityCompanion Function({
|
||||
i0.Value<String> albumId,
|
||||
i0.Value<String> userId,
|
||||
i0.Value<i2.AlbumUserRole> role,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumUserEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData> {
|
||||
$$RemoteAlbumUserEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i4.$RemoteAlbumEntityTable _albumIdTable(i0.GeneratedDatabase db) =>
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumUserEntityTable>(
|
||||
'remote_album_user_entity')
|
||||
.albumId,
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.id));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableProcessedTableManager get albumId {
|
||||
final $_column = $_itemColumn<String>('album_id')!;
|
||||
|
||||
final manager = i4
|
||||
.$$RemoteAlbumEntityTableTableManager(
|
||||
$_db,
|
||||
i5.ReadDatabaseContainer($_db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_albumIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i6.$UserEntityTable _userIdTable(i0.GeneratedDatabase db) =>
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumUserEntityTable>(
|
||||
'remote_album_user_entity')
|
||||
.userId,
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity')
|
||||
.id));
|
||||
|
||||
i6.$$UserEntityTableProcessedTableManager get userId {
|
||||
final $_column = $_itemColumn<String>('user_id')!;
|
||||
|
||||
final manager = i6
|
||||
.$$UserEntityTableTableManager(
|
||||
$_db,
|
||||
i5.ReadDatabaseContainer($_db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_userIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnWithTypeConverterFilters<i2.AlbumUserRole, i2.AlbumUserRole, int>
|
||||
get role => $composableBuilder(
|
||||
column: $table.role,
|
||||
builder: (column) => i0.ColumnWithTypeConverterFilters(column));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableFilterComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableFilterComposer get userId {
|
||||
final i6.$$UserEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnOrderings<int> get role => $composableBuilder(
|
||||
column: $table.role, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableOrderingComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableOrderingComposer get userId {
|
||||
final i6.$$UserEntityTableOrderingComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.GeneratedColumnWithTypeConverter<i2.AlbumUserRole, int> get role =>
|
||||
$composableBuilder(column: $table.role, builder: (column) => column);
|
||||
|
||||
i4.$$RemoteAlbumEntityTableAnnotationComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableAnnotationComposer get userId {
|
||||
final i6.$$UserEntityTableAnnotationComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumUserEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumUserEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumUserEntityData, i1.$$RemoteAlbumUserEntityTableReferences),
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i0.PrefetchHooks Function({bool albumId, bool userId})> {
|
||||
$$RemoteAlbumUserEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumUserEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer(
|
||||
$db: db, $table: table),
|
||||
createOrderingComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer(
|
||||
$db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> albumId = const i0.Value.absent(),
|
||||
i0.Value<String> userId = const i0.Value.absent(),
|
||||
i0.Value<i2.AlbumUserRole> role = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumUserEntityCompanion(
|
||||
albumId: albumId,
|
||||
userId: userId,
|
||||
role: role,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
}) =>
|
||||
i1.RemoteAlbumUserEntityCompanion.insert(
|
||||
albumId: albumId,
|
||||
userId: userId,
|
||||
role: role,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumUserEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({albumId = false, userId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (albumId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.albumId,
|
||||
referencedTable: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._albumIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._albumIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (userId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.userId,
|
||||
referencedTable: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._userIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._userIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumUserEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumUserEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumUserEntityTableUpdateCompanionBuilder,
|
||||
(
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableReferences
|
||||
),
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i0.PrefetchHooks Function({bool albumId, bool userId})>;
|
||||
|
||||
class $RemoteAlbumUserEntityTable extends i3.RemoteAlbumUserEntity
|
||||
with
|
||||
i0
|
||||
.TableInfo<$RemoteAlbumUserEntityTable, i1.RemoteAlbumUserEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumUserEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _albumIdMeta =
|
||||
const i0.VerificationMeta('albumId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> albumId = i0.GeneratedColumn<String>(
|
||||
'album_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_album_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _userIdMeta =
|
||||
const i0.VerificationMeta('userId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> userId = i0.GeneratedColumn<String>(
|
||||
'user_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES user_entity (id) ON DELETE CASCADE'));
|
||||
@override
|
||||
late final i0.GeneratedColumnWithTypeConverter<i2.AlbumUserRole, int> role =
|
||||
i0.GeneratedColumn<int>('role', aliasedName, false,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: true)
|
||||
.withConverter<i2.AlbumUserRole>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole);
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [albumId, userId, role];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_user_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumUserEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('album_id')) {
|
||||
context.handle(_albumIdMeta,
|
||||
albumId.isAcceptableOrUnknown(data['album_id']!, _albumIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_albumIdMeta);
|
||||
}
|
||||
if (data.containsKey('user_id')) {
|
||||
context.handle(_userIdMeta,
|
||||
userId.isAcceptableOrUnknown(data['user_id']!, _userIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_userIdMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {albumId, userId};
|
||||
@override
|
||||
i1.RemoteAlbumUserEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumUserEntityData(
|
||||
albumId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}album_id'])!,
|
||||
userId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}user_id'])!,
|
||||
role: i1.$RemoteAlbumUserEntityTable.$converterrole.fromSql(
|
||||
attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}role'])!),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumUserEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumUserEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
static i0.JsonTypeConverter2<i2.AlbumUserRole, int, int> $converterrole =
|
||||
const i0.EnumIndexConverter<i2.AlbumUserRole>(i2.AlbumUserRole.values);
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumUserEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumUserEntityData> {
|
||||
final String albumId;
|
||||
final String userId;
|
||||
final i2.AlbumUserRole role;
|
||||
const RemoteAlbumUserEntityData(
|
||||
{required this.albumId, required this.userId, required this.role});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['album_id'] = i0.Variable<String>(albumId);
|
||||
map['user_id'] = i0.Variable<String>(userId);
|
||||
{
|
||||
map['role'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toSql(role));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumUserEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumUserEntityData(
|
||||
albumId: serializer.fromJson<String>(json['albumId']),
|
||||
userId: serializer.fromJson<String>(json['userId']),
|
||||
role: i1.$RemoteAlbumUserEntityTable.$converterrole
|
||||
.fromJson(serializer.fromJson<int>(json['role'])),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'albumId': serializer.toJson<String>(albumId),
|
||||
'userId': serializer.toJson<String>(userId),
|
||||
'role': serializer.toJson<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toJson(role)),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumUserEntityData copyWith(
|
||||
{String? albumId, String? userId, i2.AlbumUserRole? role}) =>
|
||||
i1.RemoteAlbumUserEntityData(
|
||||
albumId: albumId ?? this.albumId,
|
||||
userId: userId ?? this.userId,
|
||||
role: role ?? this.role,
|
||||
);
|
||||
RemoteAlbumUserEntityData copyWithCompanion(
|
||||
i1.RemoteAlbumUserEntityCompanion data) {
|
||||
return RemoteAlbumUserEntityData(
|
||||
albumId: data.albumId.present ? data.albumId.value : this.albumId,
|
||||
userId: data.userId.present ? data.userId.value : this.userId,
|
||||
role: data.role.present ? data.role.value : this.role,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumUserEntityData(')
|
||||
..write('albumId: $albumId, ')
|
||||
..write('userId: $userId, ')
|
||||
..write('role: $role')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(albumId, userId, role);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumUserEntityData &&
|
||||
other.albumId == this.albumId &&
|
||||
other.userId == this.userId &&
|
||||
other.role == this.role);
|
||||
}
|
||||
|
||||
class RemoteAlbumUserEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumUserEntityData> {
|
||||
final i0.Value<String> albumId;
|
||||
final i0.Value<String> userId;
|
||||
final i0.Value<i2.AlbumUserRole> role;
|
||||
const RemoteAlbumUserEntityCompanion({
|
||||
this.albumId = const i0.Value.absent(),
|
||||
this.userId = const i0.Value.absent(),
|
||||
this.role = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumUserEntityCompanion.insert({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
}) : albumId = i0.Value(albumId),
|
||||
userId = i0.Value(userId),
|
||||
role = i0.Value(role);
|
||||
static i0.Insertable<i1.RemoteAlbumUserEntityData> custom({
|
||||
i0.Expression<String>? albumId,
|
||||
i0.Expression<String>? userId,
|
||||
i0.Expression<int>? role,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (albumId != null) 'album_id': albumId,
|
||||
if (userId != null) 'user_id': userId,
|
||||
if (role != null) 'role': role,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumUserEntityCompanion copyWith(
|
||||
{i0.Value<String>? albumId,
|
||||
i0.Value<String>? userId,
|
||||
i0.Value<i2.AlbumUserRole>? role}) {
|
||||
return i1.RemoteAlbumUserEntityCompanion(
|
||||
albumId: albumId ?? this.albumId,
|
||||
userId: userId ?? this.userId,
|
||||
role: role ?? this.role,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (albumId.present) {
|
||||
map['album_id'] = i0.Variable<String>(albumId.value);
|
||||
}
|
||||
if (userId.present) {
|
||||
map['user_id'] = i0.Variable<String>(userId.value);
|
||||
}
|
||||
if (role.present) {
|
||||
map['role'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toSql(role.value));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumUserEntityCompanion(')
|
||||
..write('albumId: $albumId, ')
|
||||
..write('userId: $userId, ')
|
||||
..write('role: $role')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
@@ -35,22 +34,3 @@ class RemoteAssetEntity extends Table
|
||||
@override
|
||||
Set<Column> get primaryKey => {id};
|
||||
}
|
||||
|
||||
extension RemoteAssetEntityDataDomainEx on RemoteAssetEntityData {
|
||||
RemoteAsset toDto() => RemoteAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
ownerId: ownerId,
|
||||
checksum: checksum,
|
||||
type: type,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
height: height,
|
||||
width: width,
|
||||
thumbHash: thumbHash,
|
||||
visibility: visibility,
|
||||
localId: null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ class ApiRepository {
|
||||
|
||||
Future<T> checkNull<T>(Future<T?> future) async {
|
||||
final response = await future;
|
||||
if (response == null) throw const NoResponseDtoError();
|
||||
if (response == null) throw NoResponseDtoError();
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,6 @@ import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
|
||||
@@ -43,9 +40,6 @@ class IsarDatabaseRepository implements IDatabaseRepository {
|
||||
LocalAlbumAssetEntity,
|
||||
RemoteAssetEntity,
|
||||
RemoteExifEntity,
|
||||
RemoteAlbumEntity,
|
||||
RemoteAlbumAssetEntity,
|
||||
RemoteAlbumUserEntity,
|
||||
],
|
||||
include: {
|
||||
'package:immich_mobile/infrastructure/entities/merged_asset.drift',
|
||||
|
||||
@@ -17,15 +17,9 @@ import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.d
|
||||
as i7;
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
|
||||
as i8;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i9;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
as i10;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
as i11;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
as i12;
|
||||
import 'package:drift/internal/modular.dart' as i13;
|
||||
as i9;
|
||||
import 'package:drift/internal/modular.dart' as i10;
|
||||
|
||||
abstract class $Drift extends i0.GeneratedDatabase {
|
||||
$Drift(i0.QueryExecutor e) : super(e);
|
||||
@@ -45,14 +39,8 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
i7.$LocalAlbumAssetEntityTable(this);
|
||||
late final i8.$RemoteExifEntityTable remoteExifEntity =
|
||||
i8.$RemoteExifEntityTable(this);
|
||||
late final i9.$RemoteAlbumEntityTable remoteAlbumEntity =
|
||||
i9.$RemoteAlbumEntityTable(this);
|
||||
late final i10.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity =
|
||||
i10.$RemoteAlbumAssetEntityTable(this);
|
||||
late final i11.$RemoteAlbumUserEntityTable remoteAlbumUserEntity =
|
||||
i11.$RemoteAlbumUserEntityTable(this);
|
||||
i12.MergedAssetDrift get mergedAssetDrift => i13.ReadDatabaseContainer(this)
|
||||
.accessor<i12.MergedAssetDrift>(i12.MergedAssetDrift.new);
|
||||
i9.MergedAssetDrift get mergedAssetDrift => i10.ReadDatabaseContainer(this)
|
||||
.accessor<i9.MergedAssetDrift>(i9.MergedAssetDrift.new);
|
||||
@override
|
||||
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
|
||||
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
|
||||
@@ -68,10 +56,7 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
partnerEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
remoteExifEntity,
|
||||
remoteAlbumEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity
|
||||
remoteExifEntity
|
||||
];
|
||||
@override
|
||||
i0.StreamQueryUpdateRules get streamUpdateRules =>
|
||||
@@ -129,52 +114,6 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
i0.TableUpdate('remote_exif_entity', kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('user_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_entity', kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_entity', kind: i0.UpdateKind.update),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_asset_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_album_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_asset_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_album_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_user_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('user_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_user_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
],
|
||||
);
|
||||
@override
|
||||
@@ -201,11 +140,4 @@ class $DriftManager {
|
||||
.$$LocalAlbumAssetEntityTableTableManager(_db, _db.localAlbumAssetEntity);
|
||||
i8.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
|
||||
i8.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
|
||||
i9.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
|
||||
i9.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
|
||||
i10.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
|
||||
i10.$$RemoteAlbumAssetEntityTableTableManager(
|
||||
_db, _db.remoteAlbumAssetEntity);
|
||||
i11.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i11
|
||||
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
|
||||
enum SortRemoteAlbumsBy { id }
|
||||
|
||||
class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
const DriftRemoteAlbumRepository(this._db) : super(_db);
|
||||
|
||||
Future<List<Album>> getAll({Set<SortRemoteAlbumsBy> sortBy = const {}}) {
|
||||
final assetCount = _db.remoteAlbumAssetEntity.assetId.count();
|
||||
|
||||
final query = _db.remoteAlbumEntity.select().join([
|
||||
leftOuterJoin(
|
||||
_db.remoteAlbumAssetEntity,
|
||||
_db.remoteAlbumAssetEntity.albumId.equalsExp(_db.remoteAlbumEntity.id),
|
||||
useColumns: false,
|
||||
),
|
||||
leftOuterJoin(
|
||||
_db.userEntity,
|
||||
_db.userEntity.id.equalsExp(_db.remoteAlbumEntity.ownerId),
|
||||
),
|
||||
]);
|
||||
query
|
||||
..addColumns([assetCount])
|
||||
..groupBy([_db.remoteAlbumEntity.id]);
|
||||
|
||||
if (sortBy.isNotEmpty) {
|
||||
final orderings = <OrderingTerm>[];
|
||||
for (final sort in sortBy) {
|
||||
orderings.add(
|
||||
switch (sort) {
|
||||
SortRemoteAlbumsBy.id => OrderingTerm.asc(_db.remoteAlbumEntity.id),
|
||||
},
|
||||
);
|
||||
}
|
||||
query.orderBy(orderings);
|
||||
}
|
||||
|
||||
return query
|
||||
.map(
|
||||
(row) => row.readTable(_db.remoteAlbumEntity).toDto(
|
||||
assetCount: row.read(assetCount) ?? 0,
|
||||
ownerName: row.readTable(_db.userEntity).name,
|
||||
),
|
||||
)
|
||||
.get();
|
||||
}
|
||||
}
|
||||
|
||||
extension on RemoteAlbumEntityData {
|
||||
Album toDto({int assetCount = 0, required String ownerName}) {
|
||||
return Album(
|
||||
id: id,
|
||||
name: name,
|
||||
ownerId: ownerId,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
description: description,
|
||||
thumbnailAssetId: thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled,
|
||||
order: order,
|
||||
assetCount: assetCount,
|
||||
ownerName: ownerName,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
|
||||
final remoteAssetRepositoryProvider = Provider<RemoteAssetRepository>(
|
||||
(ref) => RemoteAssetRepository(ref.watch(driftProvider)),
|
||||
);
|
||||
|
||||
class RemoteAssetRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
const RemoteAssetRepository(this._db) : super(_db);
|
||||
|
||||
Future<void> updateFavorite(List<String> ids, bool isFavorite) {
|
||||
return _db.batch((batch) async {
|
||||
for (final id in ids) {
|
||||
batch.update(
|
||||
_db.remoteAssetEntity,
|
||||
RemoteAssetEntityCompanion(isFavorite: Value(isFavorite)),
|
||||
where: (e) => e.id.equals(id),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> updateVisibility(List<String> ids, AssetVisibility visibility) {
|
||||
return _db.batch((batch) async {
|
||||
for (final id in ids) {
|
||||
batch.update(
|
||||
_db.remoteAssetEntity,
|
||||
RemoteAssetEntityCompanion(visibility: Value(visibility)),
|
||||
where: (e) => e.id.equals(id),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -42,16 +42,11 @@ class SyncApiRepository {
|
||||
SyncStreamDto(
|
||||
types: [
|
||||
SyncRequestType.usersV1,
|
||||
SyncRequestType.assetsV1,
|
||||
SyncRequestType.assetExifsV1,
|
||||
SyncRequestType.partnersV1,
|
||||
SyncRequestType.assetsV1,
|
||||
SyncRequestType.partnerAssetsV1,
|
||||
SyncRequestType.assetExifsV1,
|
||||
SyncRequestType.partnerAssetExifsV1,
|
||||
SyncRequestType.albumsV1,
|
||||
SyncRequestType.albumUsersV1,
|
||||
SyncRequestType.albumAssetsV1,
|
||||
SyncRequestType.albumAssetExifsV1,
|
||||
SyncRequestType.albumToAssetsV1,
|
||||
],
|
||||
).toJson(),
|
||||
);
|
||||
@@ -140,25 +135,6 @@ const _kResponseMap = <SyncEntityType, Function(Object)>{
|
||||
SyncEntityType.assetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.assetExifV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.partnerAssetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.partnerAssetBackfillV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.partnerAssetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.partnerAssetExifV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.partnerAssetExifBackfillV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.albumV1: SyncAlbumV1.fromJson,
|
||||
SyncEntityType.albumDeleteV1: SyncAlbumDeleteV1.fromJson,
|
||||
SyncEntityType.albumUserV1: SyncAlbumUserV1.fromJson,
|
||||
SyncEntityType.albumUserBackfillV1: SyncAlbumUserV1.fromJson,
|
||||
SyncEntityType.albumUserDeleteV1: SyncAlbumUserDeleteV1.fromJson,
|
||||
SyncEntityType.albumAssetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.albumAssetBackfillV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.albumAssetExifV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.albumAssetExifBackfillV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.albumToAssetV1: SyncAlbumToAssetV1.fromJson,
|
||||
SyncEntityType.albumToAssetBackfillV1: SyncAlbumToAssetV1.fromJson,
|
||||
SyncEntityType.albumToAssetDeleteV1: SyncAlbumToAssetDeleteV1.fromJson,
|
||||
SyncEntityType.syncAckV1: _SyncAckV1.fromJson,
|
||||
};
|
||||
|
||||
class _SyncAckV1 {
|
||||
static _SyncAckV1? fromJson(dynamic _) => _SyncAckV1();
|
||||
}
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart' as api show AssetVisibility, AlbumUserRole;
|
||||
import 'package:openapi/api.dart' hide AssetVisibility, AlbumUserRole;
|
||||
import 'package:openapi/api.dart' as api show AssetVisibility;
|
||||
import 'package:openapi/api.dart' hide AssetVisibility;
|
||||
|
||||
class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
final Logger _logger = Logger('DriftSyncStreamRepository');
|
||||
@@ -21,10 +17,16 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
|
||||
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
||||
try {
|
||||
await _db.userEntity
|
||||
.deleteWhere((row) => row.id.isIn(data.map((e) => e.userId)));
|
||||
await _db.batch((batch) {
|
||||
for (final user in data) {
|
||||
batch.delete(
|
||||
_db.userEntity,
|
||||
UserEntityCompanion(id: Value(user.userId)),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error: SyncUserDeleteV1', error, stack);
|
||||
_logger.severe('Error while processing SyncUserDeleteV1', error, stack);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
@@ -46,7 +48,7 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error: SyncUserV1', error, stack);
|
||||
_logger.severe('Error while processing SyncUserV1', error, stack);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
@@ -65,7 +67,7 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: SyncPartnerDeleteV1', e, s);
|
||||
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
@@ -88,30 +90,67 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: SyncPartnerV1', e, s);
|
||||
_logger.severe('Error while processing SyncPartnerV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> deleteAssetsV1(
|
||||
Iterable<SyncAssetDeleteV1> data, {
|
||||
String debugLabel = 'user',
|
||||
}) async {
|
||||
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
try {
|
||||
await _db.remoteAssetEntity
|
||||
.deleteWhere((row) => row.id.isIn(data.map((e) => e.assetId)));
|
||||
await _deleteAssetsV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: deleteAssetsV1 - $debugLabel', e, s);
|
||||
_logger.severe('Error while processing deleteAssetsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAssetsV1(
|
||||
Iterable<SyncAssetV1> data, {
|
||||
String debugLabel = 'user',
|
||||
}) async {
|
||||
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
await _updateAssetsV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing updateAssetsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
try {
|
||||
await _deleteAssetsV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing deletePartnerAssetsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
try {
|
||||
await _updateAssetsV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing updatePartnerAssetsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
try {
|
||||
await _updateAssetExifV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing updateAssetsExifV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
try {
|
||||
await _updateAssetExifV1(data);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing updatePartnerAssetsExifV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _updateAssetsV1(Iterable<SyncAssetV1> data) =>
|
||||
_db.batch((batch) {
|
||||
for (final asset in data) {
|
||||
final companion = RemoteAssetEntityCompanion(
|
||||
name: Value(asset.originalFileName),
|
||||
@@ -136,18 +175,19 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: updateAssetsV1 - $debugLabel', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAssetsExifV1(
|
||||
Iterable<SyncAssetExifV1> data, {
|
||||
String debugLabel = 'user',
|
||||
}) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
Future<void> _deleteAssetsV1(Iterable<SyncAssetDeleteV1> assets) =>
|
||||
_db.batch((batch) {
|
||||
for (final asset in assets) {
|
||||
batch.delete(
|
||||
_db.remoteAssetEntity,
|
||||
RemoteAssetEntityCompanion(id: Value(asset.assetId)),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
Future<void> _updateAssetExifV1(Iterable<SyncAssetExifV1> data) =>
|
||||
_db.batch((batch) {
|
||||
for (final exif in data) {
|
||||
final companion = RemoteExifEntityCompanion(
|
||||
city: Value(exif.city),
|
||||
@@ -179,141 +219,6 @@ class SyncStreamRepository extends DriftDatabaseRepository {
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: updateAssetsExifV1 - $debugLabel', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> deleteAlbumsV1(Iterable<SyncAlbumDeleteV1> data) async {
|
||||
try {
|
||||
await _db.remoteAlbumEntity
|
||||
.deleteWhere((row) => row.id.isIn(data.map((e) => e.albumId)));
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: deleteAlbumsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAlbumsV1(Iterable<SyncAlbumV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final album in data) {
|
||||
final companion = RemoteAlbumEntityCompanion(
|
||||
name: Value(album.name),
|
||||
description: Value(album.description),
|
||||
isActivityEnabled: Value(album.isActivityEnabled),
|
||||
order: Value(album.order.toAlbumAssetOrder()),
|
||||
thumbnailAssetId: Value(album.thumbnailAssetId),
|
||||
ownerId: Value(album.ownerId),
|
||||
createdAt: Value(album.createdAt),
|
||||
updatedAt: Value(album.updatedAt),
|
||||
);
|
||||
|
||||
batch.insert(
|
||||
_db.remoteAlbumEntity,
|
||||
companion.copyWith(id: Value(album.id)),
|
||||
onConflict: DoUpdate((_) => companion),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: updateAlbumsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> deleteAlbumUsersV1(Iterable<SyncAlbumUserDeleteV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final album in data) {
|
||||
batch.delete(
|
||||
_db.remoteAlbumUserEntity,
|
||||
RemoteAlbumUserEntityCompanion(
|
||||
albumId: Value(album.albumId),
|
||||
userId: Value(album.userId),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: deleteAlbumUsersV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAlbumUsersV1(
|
||||
Iterable<SyncAlbumUserV1> data, {
|
||||
String debugLabel = 'user',
|
||||
}) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final album in data) {
|
||||
final companion = RemoteAlbumUserEntityCompanion(
|
||||
role: Value(album.role.toAlbumUserRole()),
|
||||
);
|
||||
|
||||
batch.insert(
|
||||
_db.remoteAlbumUserEntity,
|
||||
companion.copyWith(
|
||||
albumId: Value(album.albumId),
|
||||
userId: Value(album.userId),
|
||||
),
|
||||
onConflict: DoUpdate((_) => companion),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: updateAlbumUsersV1 - $debugLabel', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> deleteAlbumToAssetsV1(
|
||||
Iterable<SyncAlbumToAssetDeleteV1> data,
|
||||
) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final album in data) {
|
||||
batch.delete(
|
||||
_db.remoteAlbumAssetEntity,
|
||||
RemoteAlbumAssetEntityCompanion(
|
||||
albumId: Value(album.albumId),
|
||||
assetId: Value(album.assetId),
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: deleteAlbumToAssetsV1', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> updateAlbumToAssetsV1(
|
||||
Iterable<SyncAlbumToAssetV1> data, {
|
||||
String debugLabel = 'user',
|
||||
}) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final album in data) {
|
||||
final companion = RemoteAlbumAssetEntityCompanion(
|
||||
albumId: Value(album.albumId),
|
||||
assetId: Value(album.assetId),
|
||||
);
|
||||
|
||||
batch.insert(
|
||||
_db.remoteAlbumAssetEntity,
|
||||
companion,
|
||||
onConflict: DoNothing(),
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error: updateAlbumToAssetsV1 - $debugLabel', e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension on AssetTypeEnum {
|
||||
@@ -326,22 +231,6 @@ extension on AssetTypeEnum {
|
||||
};
|
||||
}
|
||||
|
||||
extension on AssetOrder {
|
||||
AlbumAssetOrder toAlbumAssetOrder() => switch (this) {
|
||||
AssetOrder.asc => AlbumAssetOrder.asc,
|
||||
AssetOrder.desc => AlbumAssetOrder.desc,
|
||||
_ => throw Exception('Unknown AssetOrder value: $this'),
|
||||
};
|
||||
}
|
||||
|
||||
extension on api.AlbumUserRole {
|
||||
AlbumUserRole toAlbumUserRole() => switch (this) {
|
||||
api.AlbumUserRole.editor => AlbumUserRole.editor,
|
||||
api.AlbumUserRole.viewer => AlbumUserRole.viewer,
|
||||
_ => throw Exception('Unknown AlbumUserRole value: $this'),
|
||||
};
|
||||
}
|
||||
|
||||
extension on api.AssetVisibility {
|
||||
AssetVisibility toAssetVisibility() => switch (this) {
|
||||
api.AssetVisibility.timeline => AssetVisibility.timeline,
|
||||
|
||||
@@ -6,7 +6,6 @@ import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/timeline.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:stream_transform/stream_transform.dart';
|
||||
|
||||
@@ -15,21 +14,6 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
|
||||
|
||||
const DriftTimelineRepository(super._db) : _db = _db;
|
||||
|
||||
Stream<List<String>> watchTimelineUserIds(String userId) {
|
||||
final query = _db.partnerEntity.selectOnly()
|
||||
..addColumns([_db.partnerEntity.sharedById])
|
||||
..where(
|
||||
_db.partnerEntity.inTimeline.equals(true) &
|
||||
_db.partnerEntity.sharedWithId.equals(userId),
|
||||
);
|
||||
|
||||
return query
|
||||
.map((row) => row.read(_db.partnerEntity.sharedById)!)
|
||||
.watch()
|
||||
// Add current user ID to the list
|
||||
.map((users) => users..add(userId));
|
||||
}
|
||||
|
||||
List<Bucket> _generateBuckets(int count) {
|
||||
final numBuckets = (count / kTimelineNoneSegmentSize).floor();
|
||||
final buckets = List.generate(
|
||||
@@ -70,38 +54,36 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
|
||||
return _db.mergedAssetDrift
|
||||
.mergedAsset(userIds, limit: Limit(count, offset))
|
||||
.map(
|
||||
(row) {
|
||||
return row.remoteId != null && row.ownerId != null
|
||||
? RemoteAsset(
|
||||
id: row.remoteId!,
|
||||
localId: row.localId,
|
||||
name: row.name,
|
||||
ownerId: row.ownerId!,
|
||||
checksum: row.checksum,
|
||||
type: row.type,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
thumbHash: row.thumbHash,
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
isFavorite: row.isFavorite,
|
||||
durationInSeconds: row.durationInSeconds,
|
||||
)
|
||||
: LocalAsset(
|
||||
id: row.localId!,
|
||||
remoteId: row.remoteId,
|
||||
name: row.name,
|
||||
checksum: row.checksum,
|
||||
type: row.type,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
isFavorite: row.isFavorite,
|
||||
durationInSeconds: row.durationInSeconds,
|
||||
);
|
||||
},
|
||||
).get();
|
||||
(row) => row.remoteId != null
|
||||
? Asset(
|
||||
id: row.remoteId!,
|
||||
localId: row.localId,
|
||||
name: row.name,
|
||||
checksum: row.checksum,
|
||||
type: row.type,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
thumbHash: row.thumbHash,
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
isFavorite: row.isFavorite,
|
||||
durationInSeconds: row.durationInSeconds,
|
||||
)
|
||||
: LocalAsset(
|
||||
id: row.localId!,
|
||||
remoteId: row.remoteId,
|
||||
name: row.name,
|
||||
checksum: row.checksum,
|
||||
type: row.type,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
isFavorite: row.isFavorite,
|
||||
durationInSeconds: row.durationInSeconds,
|
||||
),
|
||||
)
|
||||
.get();
|
||||
}
|
||||
|
||||
Stream<List<Bucket>> watchLocalBucket(
|
||||
@@ -157,62 +139,6 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
|
||||
.map((row) => row.readTable(_db.localAssetEntity).toDto())
|
||||
.get();
|
||||
}
|
||||
|
||||
Stream<List<Bucket>> watchRemoteBucket(
|
||||
String albumId, {
|
||||
GroupAssetsBy groupBy = GroupAssetsBy.day,
|
||||
}) {
|
||||
if (groupBy == GroupAssetsBy.none) {
|
||||
return _db.remoteAlbumAssetEntity
|
||||
.count(where: (row) => row.albumId.equals(albumId))
|
||||
.map(_generateBuckets)
|
||||
.watchSingle();
|
||||
}
|
||||
|
||||
final assetCountExp = _db.remoteAssetEntity.id.count();
|
||||
final dateExp = _db.remoteAssetEntity.createdAt.dateFmt(groupBy);
|
||||
|
||||
final query = _db.remoteAssetEntity.selectOnly()
|
||||
..addColumns([assetCountExp, dateExp])
|
||||
..join([
|
||||
innerJoin(
|
||||
_db.remoteAlbumAssetEntity,
|
||||
_db.remoteAlbumAssetEntity.assetId
|
||||
.equalsExp(_db.remoteAssetEntity.id),
|
||||
),
|
||||
])
|
||||
..where(_db.remoteAlbumAssetEntity.albumId.equals(albumId))
|
||||
..groupBy([dateExp])
|
||||
..orderBy([OrderingTerm.desc(dateExp)]);
|
||||
|
||||
return query.map((row) {
|
||||
final timeline = row.read(dateExp)!.dateFmt(groupBy);
|
||||
final assetCount = row.read(assetCountExp)!;
|
||||
return TimeBucket(date: timeline, assetCount: assetCount);
|
||||
}).watch();
|
||||
}
|
||||
|
||||
Future<List<BaseAsset>> getRemoteBucketAssets(
|
||||
String albumId, {
|
||||
required int offset,
|
||||
required int count,
|
||||
}) {
|
||||
final query = _db.remoteAssetEntity.select().join(
|
||||
[
|
||||
innerJoin(
|
||||
_db.remoteAlbumAssetEntity,
|
||||
_db.remoteAlbumAssetEntity.assetId
|
||||
.equalsExp(_db.remoteAssetEntity.id),
|
||||
),
|
||||
],
|
||||
)
|
||||
..where(_db.remoteAlbumAssetEntity.albumId.equals(albumId))
|
||||
..orderBy([OrderingTerm.desc(_db.remoteAssetEntity.createdAt)])
|
||||
..limit(count, offset: offset);
|
||||
return query
|
||||
.map((row) => row.readTable(_db.remoteAssetEntity).toDto())
|
||||
.get();
|
||||
}
|
||||
}
|
||||
|
||||
extension on Expression<DateTime> {
|
||||
|
||||
@@ -5,7 +5,7 @@ class AlbumViewerPageState {
|
||||
final String editTitleText;
|
||||
final String editDescriptionText;
|
||||
|
||||
const AlbumViewerPageState({
|
||||
AlbumViewerPageState({
|
||||
required this.isEditAlbum,
|
||||
required this.editTitleText,
|
||||
required this.editDescriptionText,
|
||||
|
||||
@@ -4,7 +4,7 @@ import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
class AssetSelectionPageResult {
|
||||
final Set<Asset> selectedAssets;
|
||||
|
||||
const AssetSelectionPageResult({
|
||||
AssetSelectionPageResult({
|
||||
required this.selectedAssets,
|
||||
});
|
||||
@override
|
||||
|
||||
@@ -7,7 +7,7 @@ class AuthState {
|
||||
final bool isAdmin;
|
||||
final String profileImagePath;
|
||||
|
||||
const AuthState({
|
||||
AuthState({
|
||||
required this.deviceId,
|
||||
required this.userId,
|
||||
required this.userEmail,
|
||||
|
||||
@@ -5,7 +5,7 @@ class AuxilaryEndpoint {
|
||||
final String url;
|
||||
final AuxCheckStatus status;
|
||||
|
||||
const AuxilaryEndpoint({
|
||||
AuxilaryEndpoint({
|
||||
required this.url,
|
||||
required this.status,
|
||||
});
|
||||
@@ -55,7 +55,7 @@ class AuxilaryEndpoint {
|
||||
|
||||
class AuxCheckStatus {
|
||||
final String name;
|
||||
const AuxCheckStatus({
|
||||
AuxCheckStatus({
|
||||
required this.name,
|
||||
});
|
||||
const AuxCheckStatus._(this.name);
|
||||
|
||||
@@ -13,7 +13,7 @@ class LoginResponse {
|
||||
|
||||
final String userId;
|
||||
|
||||
const LoginResponse({
|
||||
LoginResponse({
|
||||
required this.accessToken,
|
||||
required this.isAdmin,
|
||||
required this.name,
|
||||
|
||||
@@ -4,7 +4,7 @@ class AvailableAlbum {
|
||||
final Album album;
|
||||
final int assetCount;
|
||||
final DateTime? lastBackup;
|
||||
const AvailableAlbum({
|
||||
AvailableAlbum({
|
||||
required this.album,
|
||||
required this.assetCount,
|
||||
this.lastBackup,
|
||||
|
||||
@@ -9,7 +9,7 @@ class CurrentUploadAsset {
|
||||
final int? fileSize;
|
||||
final bool? iCloudAsset;
|
||||
|
||||
const CurrentUploadAsset({
|
||||
CurrentUploadAsset({
|
||||
required this.id,
|
||||
required this.fileCreatedAt,
|
||||
required this.fileName,
|
||||
|
||||
@@ -5,7 +5,7 @@ class SuccessUploadAsset {
|
||||
final String remoteAssetId;
|
||||
final bool isDuplicate;
|
||||
|
||||
const SuccessUploadAsset({
|
||||
SuccessUploadAsset({
|
||||
required this.candidate,
|
||||
required this.remoteAssetId,
|
||||
required this.isDuplicate,
|
||||
|
||||
@@ -10,7 +10,7 @@ class DownloadInfo {
|
||||
// enum
|
||||
final TaskStatus status;
|
||||
|
||||
const DownloadInfo({
|
||||
DownloadInfo({
|
||||
required this.fileName,
|
||||
required this.progress,
|
||||
required this.status,
|
||||
@@ -71,7 +71,7 @@ class DownloadState {
|
||||
final TaskStatus downloadStatus;
|
||||
final Map<String, DownloadInfo> taskProgress;
|
||||
final bool showProgress;
|
||||
const DownloadState({
|
||||
DownloadState({
|
||||
required this.downloadStatus,
|
||||
required this.taskProgress,
|
||||
required this.showProgress,
|
||||
|
||||
@@ -3,7 +3,7 @@ import 'package:immich_mobile/models/folder/root_folder.model.dart';
|
||||
class RecursiveFolder extends RootFolder {
|
||||
final String name;
|
||||
|
||||
const RecursiveFolder({
|
||||
RecursiveFolder({
|
||||
required this.name,
|
||||
required super.path,
|
||||
required super.subfolders,
|
||||
|
||||
@@ -4,7 +4,7 @@ class RootFolder {
|
||||
final List<RecursiveFolder> subfolders;
|
||||
final String path;
|
||||
|
||||
const RootFolder({
|
||||
RootFolder({
|
||||
required this.subfolders,
|
||||
required this.path,
|
||||
});
|
||||
|
||||
@@ -8,6 +8,4 @@ class MapAssetsInBoundsUpdated extends MapEvent {
|
||||
const MapAssetsInBoundsUpdated(this.assetRemoteIds);
|
||||
}
|
||||
|
||||
class MapCloseBottomSheet extends MapEvent {
|
||||
const MapCloseBottomSheet();
|
||||
}
|
||||
class MapCloseBottomSheet extends MapEvent {}
|
||||
|
||||
@@ -4,7 +4,7 @@ import 'package:openapi/api.dart';
|
||||
class MapMarker {
|
||||
final LatLng latLng;
|
||||
final String assetRemoteId;
|
||||
const MapMarker({
|
||||
MapMarker({
|
||||
required this.latLng,
|
||||
required this.assetRemoteId,
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user